diff --git a/.github/ISSUE_TEMPLATE/Bug_Report.md b/.github/ISSUE_TEMPLATE/Bug_Report.md index f546850f2097..9764b24aa75f 100644 --- a/.github/ISSUE_TEMPLATE/Bug_Report.md +++ b/.github/ISSUE_TEMPLATE/Bug_Report.md @@ -58,11 +58,11 @@ To obtain the debug output, see the [Terraform documentation on debugging](https -### Expected Behavior +### Expected Behaviour -### Actual Behavior +### Actual Behaviour diff --git a/.github/ISSUE_TEMPLATE/Question.md b/.github/ISSUE_TEMPLATE/Question.md deleted file mode 100644 index 3e71b8399611..000000000000 --- a/.github/ISSUE_TEMPLATE/Question.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -name: 💬 Question -about: If you have a question, please check out our other community resources! - ---- - -Issues on GitHub are intended to be related to bugs or feature requests with provider codebase, -so we recommend using our other community resources instead of asking here 👍. - ---- - -If you have a support request or question please submit them to one of these resources: - -* [HashiCorp Community Forums](https://discuss.hashicorp.com/c/terraform-providers) -* [HashiCorp support](https://support.hashicorp.com) (Terraform Enterprise customers) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3ba13e0cec6c..8fdb9d63ca4f 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1 +1,11 @@ blank_issues_enabled: false +contact_links: + - name: Terraform Azure Provider Questions + url: https://discuss.hashicorp.com/c/terraform-providers/tf-azure + about: GitHub issues in this repository are only intended for bug reports and feature requests. Other issues will be closed. Please ask and answer questions through the Terraform Azure Provider Community Forum. + - name: Terraform Core Bug Reports and Feature Requests + url: https://github.com/hashicorp/terraform/issues/new/choose + about: Terraform Core, which handles the Terraform configuration language, CLI commands, and resource dependency graph, has its own codebase. Bug reports and feature requests for those pieces of functionality should be directed to that repository. + - name: Terraform Language or Workflow Questions + url: https://discuss.hashicorp.com/c/terraform-core + about: Please ask and answer language or workflow related questions through the Terraform Core Community Forum. diff --git a/.github/workflows/depscheck.yaml b/.github/workflows/depscheck.yaml index c7f84fb686d6..fa9799281747 100644 --- a/.github/workflows/depscheck.yaml +++ b/.github/workflows/depscheck.yaml @@ -2,7 +2,7 @@ name: Vendor Dependencies Check on: pull_request: - types: ['opened', 'edited', 'reopened', 'synchronize'] + types: ['opened', 'synchronize'] paths: - '**.go' - 'vendor/**' @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: - go-version: '1.14.5' + go-version: '1.15.5' - run: bash scripts/gogetcookie.sh - run: make tools - run: make depscheck diff --git a/.github/workflows/gencheck.yaml b/.github/workflows/gencheck.yaml new file mode 100644 index 000000000000..c328217e0c53 --- /dev/null +++ b/.github/workflows/gencheck.yaml @@ -0,0 +1,21 @@ +--- +name: Generation Check +on: + pull_request: + types: ['opened', 'synchronize'] + paths: + - '**.go' + - 'azurerm/**' + - '.github/workflows/**' + +jobs: + gencheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-go@v2 + with: + go-version: '1.15.5' + - run: bash scripts/gogetcookie.sh + - run: make tools + - run: make gencheck diff --git a/.github/workflows/golint.yaml b/.github/workflows/golint.yaml new file mode 100644 index 000000000000..f8317a4f9acf --- /dev/null +++ b/.github/workflows/golint.yaml @@ -0,0 +1,24 @@ +--- +name: GoLang Linting +on: + pull_request: + types: ['opened', 'synchronize'] + paths: + - '**.go' + - 'vendor/**' + - '.github/workflows/**' + +jobs: + golint: + runs-on: ubuntu-latest + strategy: + fail-fast: true + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-go@v2 + with: + go-version: '1.15.5' + - uses: golangci/golangci-lint-action@v2 + with: + version: 'v1.32' + args: --timeout=30m0s diff --git a/.github/workflows/lintrest.yaml b/.github/workflows/lintrest.yaml deleted file mode 100644 index f50deecd5020..000000000000 --- a/.github/workflows/lintrest.yaml +++ /dev/null @@ -1,23 +0,0 @@ ---- -name: GoLang Linting -on: - pull_request: - types: ['opened', 'edited', 'reopened', 'synchronize'] - paths: - - '**.go' - - 'vendor/**' - - '.github/workflows/**' - -jobs: - lintrest: - runs-on: ubuntu-latest - strategy: - fail-fast: true - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-go@v2 - with: - go-version: '1.14.5' - - run: bash scripts/gogetcookie.sh - - run: make tools - - run: GOGC=5 make lintrest diff --git a/.github/workflows/teamcity-test.yaml b/.github/workflows/teamcity-test.yaml new file mode 100644 index 000000000000..f4bd764c996d --- /dev/null +++ b/.github/workflows/teamcity-test.yaml @@ -0,0 +1,29 @@ +--- +name: TeamCity Config Test +on: + pull_request: + types: ['opened', 'synchronize'] + paths: + - '!.teamcity/components/generated/**' + - '!.teamcity/target/**' + - '.teamcity/**' + - '.github/workflows/**' + +jobs: + teamcity-test: + runs-on: ubuntu-latest + strategy: + fail-fast: true + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: '13' + java-package: jdk + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + - run: make teamcity-test diff --git a/.github/workflows/tflint.yaml b/.github/workflows/tflint.yaml index ee5fd9281f4e..9faa908c73f9 100644 --- a/.github/workflows/tflint.yaml +++ b/.github/workflows/tflint.yaml @@ -2,7 +2,7 @@ name: Terraform Schema Linting on: pull_request: - types: ['opened', 'edited', 'reopened', 'synchronize'] + types: ['opened', 'synchronize'] paths: - '**.go' - 'vendor/**' @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: - go-version: '1.14.5' + go-version: '1.15.5' - run: bash scripts/gogetcookie.sh - run: make tools - run: make tflint diff --git a/.github/workflows/thirty-two-bit.yaml b/.github/workflows/thirty-two-bit.yaml new file mode 100644 index 000000000000..c389a5130f33 --- /dev/null +++ b/.github/workflows/thirty-two-bit.yaml @@ -0,0 +1,23 @@ +--- +name: 32 Bit Build +on: + pull_request: + types: ['opened', 'synchronize'] + paths: + - '**.go' + - 'vendor/**' + - '.github/workflows/**' + +jobs: + compatability-32bit-test: + runs-on: ubuntu-latest + strategy: + fail-fast: true + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-go@v2 + with: + go-version: '1.15.5' + - run: bash scripts/gogetcookie.sh + - run: make tools + - run: GOARCH=386 GOOS=linux go build -o 32bitbuild . diff --git a/.github/workflows/unit-test.yaml b/.github/workflows/unit-test.yaml index 48f857fbecd0..e96643b1d1cd 100644 --- a/.github/workflows/unit-test.yaml +++ b/.github/workflows/unit-test.yaml @@ -2,7 +2,7 @@ name: Unit Tests on: pull_request: - types: ['opened', 'edited', 'reopened', 'synchronize'] + types: ['opened', 'synchronize'] paths: - '**.go' - 'vendor/**' @@ -17,8 +17,6 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: - go-version: '1.14.5' + go-version: '1.15.5' - run: bash scripts/gogetcookie.sh - - run: make tools - run: make test - diff --git a/.github/workflows/website-lint.yaml b/.github/workflows/website-lint.yaml index 3dba5ab8bd10..bf29f66b8737 100644 --- a/.github/workflows/website-lint.yaml +++ b/.github/workflows/website-lint.yaml @@ -2,7 +2,7 @@ name: Website Linting on: pull_request: - types: ['opened', 'edited', 'reopened', 'synchronize'] + types: ['opened', 'synchronize'] paths: - 'website/**' - '.github/workflows/**' @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: - go-version: '1.14.5' + go-version: '1.15.5' - run: bash scripts/gogetcookie.sh - run: make tools - run: make website-lint diff --git a/.github/workflows/website-test.yaml b/.github/workflows/website-test.yaml index 4fdf8dae9d2e..dc03cd4f215f 100644 --- a/.github/workflows/website-test.yaml +++ b/.github/workflows/website-test.yaml @@ -2,7 +2,7 @@ name: Website checks on: pull_request: - types: ['opened', 'edited', 'reopened', 'synchronize'] + types: ['opened', 'synchronize'] paths: - 'website/**' - '.github/workflows/**' @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: - go-version: '1.14.5' + go-version: '1.15.5' - run: bash scripts/gogetcookie.sh - run: make tools - run: make website-test diff --git a/.go-version b/.go-version index 24a57f28a415..d32434904bcb 100644 --- a/.go-version +++ b/.go-version @@ -1 +1 @@ -1.14.5 +1.15.5 diff --git a/.golangci-travis.yml b/.golangci-travis.yml deleted file mode 100644 index cca9f2365f5d..000000000000 --- a/.golangci-travis.yml +++ /dev/null @@ -1,39 +0,0 @@ -run: - deadline: 60m10s - modules-download-mode: vendor - -issues: - max-per-linter: 0 - max-same-issues: 0 - -linters: - disable-all: true - enable: - - deadcode - - errcheck - - gocritic - - gofmt - - goimports - - gosimple - - govet - - ineffassign - - interfacer - - nakedret - - misspell - - staticcheck - - structcheck - - typecheck - - unconvert - - unparam - - varcheck - - vet - - vetshadow - - whitespace - -linters-settings: - errcheck: - ignore: github.com/hashicorp/terraform-plugin-sdk/helper/schema:ForceNew|Set,fmt:.*,io:Close - misspell: - ignore-words: - - hdinsight - - exportfs diff --git a/.golangci.yml b/.golangci.yml index 71e500dba569..e0f08aad21dc 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,6 +1,8 @@ run: - deadline: 10m10s + deadline: 12m10s modules-download-mode: vendor + skip-dirs: + - vendor issues: max-per-linter: 0 @@ -9,6 +11,7 @@ issues: linters: disable-all: true enable: + - asciicheck - deadcode - errcheck - gocritic diff --git a/.teamcity/components/generated/services.kt b/.teamcity/components/generated/services.kt index b2c42e39e238..0358f33ce913 100644 --- a/.teamcity/components/generated/services.kt +++ b/.teamcity/components/generated/services.kt @@ -1,15 +1,15 @@ // NOTE: this is Generated from the Service Definitions - manual changes will be lost // to re-generate this file, run 'make generate' in the root of the repository var services = mapOf( + "apimanagement" to "API Management", "advisor" to "Advisor", "analysisservices" to "Analysis Services", - "apimanagement" to "API Management", "appconfiguration" to "App Configuration", - "appplatform" to "App Platform", "applicationinsights" to "Application Insights", "attestation" to "Attestation", "authorization" to "Authorization", "automation" to "Automation", + "azurestackhci" to "Azure Stack HCI", "batch" to "Batch", "blueprints" to "Blueprints", "bot" to "Bot", @@ -20,40 +20,42 @@ var services = mapOf( "cosmos" to "CosmosDB", "costmanagement" to "Cost Management", "customproviders" to "Custom Providers", - "databricks" to "DataBricks", + "dns" to "DNS", "datafactory" to "Data Factory", "datalake" to "Data Lake", - "databasemigration" to "Database Migration", "datashare" to "Data Share", + "databricks" to "DataBricks", + "databasemigration" to "Database Migration", "desktopvirtualization" to "Desktop Virtualization", - "devspace" to "DevSpaces", "devtestlabs" to "Dev Test", - "dns" to "DNS", + "digitaltwins" to "Digital Twins", "eventgrid" to "EventGrid", "eventhub" to "EventHub", + "firewall" to "Firewall", "frontdoor" to "FrontDoor", + "hdinsight" to "HDInsight", "hpccache" to "HPC Cache", "hsm" to "Hardware Security Module", - "hdinsight" to "HDInsight", "healthcare" to "Health Care", - "iothub" to "IoT Hub", "iotcentral" to "IoT Central", + "iothub" to "IoT Hub", "keyvault" to "KeyVault", "kusto" to "Kusto", + "lighthouse" to "Lighthouse", + "loadbalancer" to "Load Balancer", "loganalytics" to "Log Analytics", "logic" to "Logic", "machinelearning" to "Machine Learning", "maintenance" to "Maintenance", "managedapplications" to "Managed Applications", - "lighthouse" to "Lighthouse", + "msi" to "Managed Service Identities", "managementgroup" to "Management Group", "maps" to "Maps", "mariadb" to "MariaDB", "media" to "Media", + "mssql" to "Microsoft SQL Server / SQL Azure", "mixedreality" to "Mixed Reality", "monitor" to "Monitor", - "msi" to "Managed Service Identities", - "mssql" to "Microsoft SQL Server / SQL Azure", "mysql" to "MySQL", "netapp" to "NetApp", "network" to "Network", @@ -67,14 +69,15 @@ var services = mapOf( "redis" to "Redis", "relay" to "Relay", "resource" to "Resources", + "sql" to "SQL", "search" to "Search", "securitycenter" to "Security Center", "sentinel" to "Sentinel", - "servicebus" to "ServiceBus", "servicefabric" to "Service Fabric", "servicefabricmesh" to "Service Fabric Mesh", + "servicebus" to "ServiceBus", "signalr" to "SignalR", - "sql" to "SQL", + "springcloud" to "Spring Cloud", "storage" to "Storage", "streamanalytics" to "Stream Analytics", "subscription" to "Subscription", diff --git a/.teamcity/components/settings.kt b/.teamcity/components/settings.kt index b32bbe5c68cc..415b8db49289 100644 --- a/.teamcity/components/settings.kt +++ b/.teamcity/components/settings.kt @@ -19,15 +19,21 @@ var runNightly = mapOf( // specifies a list of services which should be run with a custom test configuration var serviceTestConfigurationOverrides = mapOf( + // Spring Cloud only allows a max of 10 provisioned + "appplatform" to testConfiguration(5, defaultStartHour), + // The AKS API has a low rate limit "containers" to testConfiguration(5, defaultStartHour), // Data Lake has a low quota "datalake" to testConfiguration(2, defaultStartHour), - //HSM has low quota and potentially slow recycle time + // HSM has low quota and potentially slow recycle time "hsm" to testConfiguration(1, defaultStartHour), + // Log Analytics Clusters have a max deployments of 2 - parallelism set to 1 or `importTest` fails + "loganalytics" to testConfiguration(1, defaultStartHour), + // servicebus quotas are limited and we experience failures if tests // execute too quickly as we run out of namespaces in the sub "servicebus" to testConfiguration(10, defaultStartHour), diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 20008eb7ed25..000000000000 --- a/.travis.yml +++ /dev/null @@ -1,46 +0,0 @@ -dist: xenial -sudo: required -services: -- docker -language: go -go: -- "1.14.x" - -branches: - only: - - master - -env: - global: - GOFLAGS=-mod=vendor - -install: - # This script is used by the Travis build to install a cookie for - # go.googlesource.com so rate limits are higher when using `go get` to fetch - # packages that live there. - # See: https://github.com/golang/go/issues/12933 - - bash scripts/gogetcookie.sh - - make tools - -matrix: - fast_finish: true - allow_failures: - - go: tip - include: - - name: "make lintrest" - script: - - TRAVIS=ci GOGC=5 make lintrest - # results in a OOM error - #- name: "make lintunused" - # script: GOGC=5 make lintunused - - name: "make tflint" - script: TRAVIS=ci make tflint - - name: "make test" - script: TRAVIS=ci make test - - name: "make depscheck" - script: make depscheck - - name: "make website-lint" - script: make website-lint - - name: "make website-test" - script: make website-test - diff --git a/CHANGELOG-v1.md b/CHANGELOG-v1.md index 0f43fc5ebe58..a956bbae3ec8 100644 --- a/CHANGELOG-v1.md +++ b/CHANGELOG-v1.md @@ -1232,7 +1232,7 @@ IMPROVEMENTS: * `azurerm_policy_assignment` - support exclusions with the `not_scopes` property ([#2620](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2620)) * `azurerm_policy_definition` - polices can now be assigned to a management group ([#2490](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2490)) * `azurerm_policy_set_definition` - policy sets can now be assigned to a management group ([#2618](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2618)) -* `azurerm_public_ip` - deprecated `public_ip_address_allocation` in favor of `allocation_method` to better match the SDK ([#2576](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2576)) +* `azurerm_public_ip` - deprecated `public_ip_address_allocation` in favour of `allocation_method` to better match the SDK ([#2576](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2576)) * `azurerm_redis_cache` - add availability zone support ([#2580](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2580)) * `azurerm_service_fabric_cluster` - support for `azure_active_directory` ([#2553](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2553)) * `azurerm_service_fabric_cluster` - support for `reverse_proxy_certificate` ([#2544](https://github.com/terraform-providers/terraform-provider-azurerm/issues/2544)) @@ -1757,7 +1757,7 @@ BUG FIXES: IMPROVEMENTS: * `azurerm_automation_schedule` - adding the `interval` property and supporting recurring schedules ([#1384](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1384)) -* `azurerm_dns_ns_record` - deprecated `record` properties in favor of a `records` list ([#991](https://github.com/terraform-providers/terraform-provider-azurerm/issues/991)) +* `azurerm_dns_ns_record` - deprecated `record` properties in favour of a `records` list ([#991](https://github.com/terraform-providers/terraform-provider-azurerm/issues/991)) * `azurerm_function_app` - adding the `identity` property ([#1369](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1369)) * `azurerm_role_definition` - the `role_definition_id` property is now optional. The resource will now generate a random UUID if it is ommited ([#1378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1378)) * `azurerm_storage_account` - adding the `network_rules` property ([#1334](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1334)) @@ -1795,4 +1795,576 @@ BUG FIXES: * `azurerm_app_service_custom_hostname_binding` - adding validation to import ([#5107](https://github.com/terraform-providers/terraform-provider-azurerm/issues/5107)) * `azurerm_app_service_plan` - adding validation to import ([#5107](https://github.com/terraform-providers/terraform-provider-azurerm/issues/5107)) * `azurerm_app_service_slot` - adding validation to import ([#5107](https://github.com/terraform-providers/terraform-provider-azurerm/issues/5107)) -* `azurerm_app_service_source_control_token` - adding validation to import ([#5107](https://github.com/terraform-providers/terraform-provider-azurerm/issues/5107)) \ No newline at end of file +* `azurerm_app_service_source_control_token` - adding validation to import ([#5107](https://github.com/terraform-providers/terraform-provider-azurerm/issues/5107)) + +## 1.5.0 (May 14, 2018) + +UPGRADE NOTES: + +~> **Please Note:** Prior to v1.5 Data Sources in the AzureRM Provider returned `nil` rather than an error message when a Resource didn't exist, which was a bug. In order to bring this into line with other Providers - starting in v1.5 the AzureRM Provider will return an error message when a resource doesn't exist. + +~> **Please Note:** This release fixes a bug in the `azurerm_redis_cache` resource where changes to fields weren't detected; as such you may see changes in the `redis_configuration` block, particularly with the `rdb_storage_connection_string` field. There's a bug tracking this inconsistency in [the Azure Rest API Specs Repository](https://github.com/Azure/azure-rest-api-specs/issues/3037). + +FEATURES: + +* **New Data Source:** `azurerm_cosmosdb_account` ([#1056](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1056)) +* **New Data Source:** `azurerm_kubernetes_cluster` ([#1204](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1204)) +* **New Data Source:** `azurerm_key_vault` ([#1202](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1202)) +* **New Data Source:** `azurerm_key_vault_secret` ([#1202](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1202)) +* **New Data Source:** `azurerm_route_table` ([#1203](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1203)) + +BUG FIXES: + +* `azurerm_redis_cache` - changes to the `redis_configuration` block are now detected - please see the note above for more information ([#1211](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1211)) + +IMPROVEMENTS: + +* dependencies - upgrading to v16.2.1 of `Azure/azure-sdk-for-go` ([#1198](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1198)) +* dependencies - upgrading to v10.8.1 of `Azure/go-autorest` ([#1198](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1198)) +* `azurerm_app_service` - support for HTTP2 ([#1188](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1188)) +* `azurerm_app_service` - support for Managed Service Identity ([#1130](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1130)) +* `azurerm_app_service_slot` - support for HTTP2 ([#1205](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1205)) +* `azurerm_cosmosdb_account` - added support for the `connection_strings` property ([#1194](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1194)) +* `azurerm_key_vault_certificate` - exposing the `certificate_data` ([#1200](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1200)) +* `azurerm_kubernetes_cluster` - making `kube_config_raw` a sensitive field ([#1225](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1225)) +* `azurerm_redis_cache` - Redis Caches can now be Imported ([#1211](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1211)) +* `azurerm_redis_firewall_rule` - Redis Firewall Rules can now be Imported ([#1211](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1211)) +* `azurerm_virtual_network` - guarding against nil-objects in the response ([#1208](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1208)) +* `azurerm_virtual_network_gateway` - ignoring the case of the `GatewaySubnet` ([#1141](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1141)) + +## 1.4.0 (April 26, 2018) + +UPGRADE NOTES: + +* `azurerm_cosmosdb_account` - the field `failover_policy` has been deprecated in favour of `geo_locations` to better match Azure + +FEATURES: + +* **New Data Source:** `azurerm_recovery_services_vault` ([#995](https://github.com/terraform-providers/terraform-provider-azurerm/issues/995)) +* **New Resource:** `azurerm_recovery_services_vault` ([#995](https://github.com/terraform-providers/terraform-provider-azurerm/issues/995)) +* **New Resource:** `azurerm_servicebus_subscription_rule` ([#1124](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1124)) + +IMPROVEMENTS: + +* `azurerm_app_service` - support for updating in-place ([#1125](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1125)) +* `azurerm_app_service_plan` - support for `kind` being `app` ([#1156](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1156)) +* `azurerm_cosmosdb_account` - support for `enable_automatic_failover` ([#1055](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1055)) +* `azurerm_cosmosdb_account` - support for the `ConsistentPrefix` consistncy level ([#1055](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1055)) +* `azurerm_cosmosdb_account` - `prefixes` can now be configured for locations ([#1055](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1055)) +* `azurerm_function_app` - support for updating in-place ([#1125](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1125)) +* `azurerm_key_vault` - adding cert permissions for `Purge` and `Recover` ([#1132](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1132)) +* `azurerm_key_vault` - polling to ensure the Key Vault is resolvable via DNS ([#1081](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1081)] [[#1164](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1164)) +* `azurerm_kubernetes_cluster` - only setting the Subnet ID when it's not an empty string ([#1158](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1158)) +* `azurerm_kubernetes_cluster` - exposing the clusters credentials as `kube_config` ([#953](https://github.com/terraform-providers/terraform-provider-azurerm/issues/953)) +* `azurerm_metric_alertrule` - filtering out tags prefixed with `$type` ([#1107](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1107)) +* `azurerm_virtual_machine` - loading managed disk information from Azure when the machine is stopped ([#1100](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1100)) +* `azurerm_virtual_machine` - make the `vm_size` property case insensitive ([#1131](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1131)) + +BUG FIXES: + +* `azurerm_cosmosdb_account` - locations can now be modified in-place (without requiring multiple apply's) ([#1055](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1055)) + + +## 1.3.3 (April 17, 2018) + +FEATURES: + +* **New Data Source:** `azurerm_app_service` ([#1071](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1071)) +* **New Resource:** `azurerm_app_service_custom_hostname_binding` ([#1087](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1087)) + +IMPROVEMENTS: + +* dependencies: upgrading to `v15.1.0` of `Azure/azure-sdk-for-go` ([#1099](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1099)) +* dependencies: upgrading to `v10.6.0` of `Azure/go-autorest` ([#1077](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1077)) +* `azurerm_app_service` - added support for the `https_only` field ([#1080](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1080)) +* `azurerm_app_service_slot` - added support for the `https_only` field ([#1080](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1080)) +* `azurerm_function_app` - added support for the `https_only` field ([#1080](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1080)) +* `azurerm_key_vault_certificate` - exposing the certificate's associated `secret_id` ([#1096](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1096)) +* `azurerm_redis_cache` - support for clusters on the internal network ([#1086](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1086)) +* `azurerm_servicebus_queue` - support for setting `requires_session` ([#1111](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1111)) +* `azurerm_sql_database` - changes to `collation` force a new resource ([#1066](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1066)) + +## 1.3.2 (April 04, 2018) + +FEATURES: + +* **New Resource:** `azurerm_packet_capture` ([#1044](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1044)) +* **New Resource:** `azurerm_policy_assignment` ([#1051](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1051)) + +IMPROVEMENTS: + +* `azurerm_virtual_machine_scale_set` - adds support for MSI ([#1018](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1018)) + +## 1.3.1 (March 29, 2018) + +FEATURES: + +* **New Data Source:** `azurerm_scheduler_job_collection` ([#990](https://github.com/terraform-providers/terraform-provider-azurerm/issues/990)) +* **New Data Source:** `azurerm_traffic_manager_geographical_location` ([#987](https://github.com/terraform-providers/terraform-provider-azurerm/issues/987)) +* **New Resource:** `azurerm_express_route_circuit_authorization` ([#992](https://github.com/terraform-providers/terraform-provider-azurerm/issues/992)) +* **New Resource:** `azurerm_express_route_circuit_peering` ([#1033](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1033)) +* **New Resource:** `azurerm_iothub` ([#887](https://github.com/terraform-providers/terraform-provider-azurerm/issues/887)) +* **New Resource:** `azurerm_policy_definition` ([#1010](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1010)) +* **New Resource:** `azurerm_sql_virtual_network_rule` ([#978](https://github.com/terraform-providers/terraform-provider-azurerm/issues/978)) + +IMPROVEMENTS: + +* `azurerm_app_service` - allow changing `client_affinity_enabled` without requiring a resource recreation ([#993](https://github.com/terraform-providers/terraform-provider-azurerm/issues/993)) +* `azurerm_app_service` - support for configuring `LocalSCM` source control ([#826](https://github.com/terraform-providers/terraform-provider-azurerm/issues/826)) +* `azurerm_app_service` - returning a clearer error message when the name (which needs to be globally unique) is in use ([#1037](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1037)) +* `azurerm_cosmosdb_account` - increasing the maximum value for `max_interval_in_seconds` from 100s to 86400s (1 day) [[#1000](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1000)] +* `azurerm_function_app` - returning a clearer error message when the name (which needs to be globally unique) is in use ([#1037](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1037)) +* `azurerm_network_interface` - support for attaching to Application Gateways ([#1027](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1027)) +* `azurerm_traffic_manager_endpoint` - adding support for `geo_mappings` ([#986](https://github.com/terraform-providers/terraform-provider-azurerm/issues/986)) +* `azurerm_traffic_manager_profile` - adding support for the `traffic_routing_method` `Geographic` ([#986](https://github.com/terraform-providers/terraform-provider-azurerm/issues/986)) +* `azurerm_virtual_machine_scale_sets` - support for attaching to Application Gateways ([#1027](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1027)) +* `azurerm_virtual_network_gateway` - changes to `peering_address` now force a new resource ([#1040](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1040)) + +## 1.3.0 (March 15, 2018) + +FEATURES: + +* **New Data Source:** `azurerm_cdn_profile` ([#950](https://github.com/terraform-providers/terraform-provider-azurerm/issues/950)) +* **New Data Source:** `azurerm_network_interface` ([#854](https://github.com/terraform-providers/terraform-provider-azurerm/issues/854)) +* **New Data Source:** `azurerm_public_ips` ([#304](https://github.com/terraform-providers/terraform-provider-azurerm/issues/304)) +* **New Data Source:** `azurerm_subscriptions` ([#940](https://github.com/terraform-providers/terraform-provider-azurerm/issues/940)) +* **New Resource:** `azurerm_log_analytics_solution` ([#952](https://github.com/terraform-providers/terraform-provider-azurerm/issues/952)) +* **New Resource:** `azurerm_sql_active_directory_administrator` ([#765](https://github.com/terraform-providers/terraform-provider-azurerm/issues/765)) +* **New Resource:** `azurerm_scheduler_job_collection` ([#963](https://github.com/terraform-providers/terraform-provider-azurerm/issues/963)) + +BUG FIXES: + +* `azurerm_application_gateway` - fixes a crash where `ssl_policy` isn't returned from the Azure API when importing existing resources ([#935](https://github.com/terraform-providers/terraform-provider-azurerm/issues/935)) +* `azurerm_app_service` - supporting `client_affinity_enabled` being `false` ([#973](https://github.com/terraform-providers/terraform-provider-azurerm/issues/973)) +* `azurerm_kubernetes_cluster` - exporting the FQDN ([#907](https://github.com/terraform-providers/terraform-provider-azurerm/issues/907)) +* `azurerm_sql_elasticpool` - fixing a crash where `location` isn't returned for legacy resources ([#982](https://github.com/terraform-providers/terraform-provider-azurerm/issues/982)) + +IMPROVEMENTS: + +* Data Source: `azurerm_builtin_role_definition` - loading available role definitions from Azure ([#770](https://github.com/terraform-providers/terraform-provider-azurerm/issues/770)) +* Data Source: `azurerm_managed_disk` - adding support for Availability Zones ([#811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/811)) +* Data Source: `azurerm_network_security_group` - support for security rules including Application Security Groups ([#925](https://github.com/terraform-providers/terraform-provider-azurerm/issues/925)) +* `azurerm_app_service_plan` - support for provisioning Consumption Plans ([#981](https://github.com/terraform-providers/terraform-provider-azurerm/issues/981)) +* `azurerm_cdn_endpoint` - adding support for GeoFilters, ProbePaths ([#967](https://github.com/terraform-providers/terraform-provider-azurerm/issues/967)) +* `azurerm_cdn_endpoint` - making the `origin` block ForceNew to match Azure ([#967](https://github.com/terraform-providers/terraform-provider-azurerm/issues/967)) +* `azurerm_function_app` - adding `client_affinity_enabled`, `use_32_bit_worker_process` and `websockets_enabled` ([#886](https://github.com/terraform-providers/terraform-provider-azurerm/issues/886)) +* `azurerm_load_balancer` - adding support for Availability Zones ([#811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/811)) +* `azurerm_managed_disk` - adding support for Availability Zones ([#811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/811)) +* `azurerm_network_interface` - setting `internal_fqdn` if it's not nil ([#977](https://github.com/terraform-providers/terraform-provider-azurerm/issues/977)) +* `azurerm_network_security_group` - support for security rules including Application Security Groups ([#925](https://github.com/terraform-providers/terraform-provider-azurerm/issues/925)) +* `azurerm_network_security_rule` - support for security rules including Application Security Groups ([#925](https://github.com/terraform-providers/terraform-provider-azurerm/issues/925)) +* `azurerm_public_ip` - adding support for Availability Zones ([#811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/811)) +* `azurerm_redis_cache` - add support for `notify-keyspace-events` ([#949](https://github.com/terraform-providers/terraform-provider-azurerm/issues/949)) +* `azurerm_template_deployment` - support for specifying parameters via `parameters_body` ([#404](https://github.com/terraform-providers/terraform-provider-azurerm/issues/404)) +* `azurerm_virtual_machine` - adding support for Availability Zones ([#811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/811)) +* `azurerm_virtual_machine_scale_set` - adding support for Availability Zones ([#811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/811)) + +## 1.2.0 (March 02, 2018) + +FEATURES: + +* **New Data Source:** `azurerm_application_security_group` ([#914](https://github.com/terraform-providers/terraform-provider-azurerm/issues/914)) +* **New Resource:** `azurerm_application_security_group` ([#905](https://github.com/terraform-providers/terraform-provider-azurerm/issues/905)) +* **New Resource:** `azurerm_servicebus_topic_authorization_rule` ([#736](https://github.com/terraform-providers/terraform-provider-azurerm/issues/736)) + +BUG FIXES: + +* `azurerm_kubernetes_cluster` - an empty `linux_profile.ssh_key.keydata` no longer causes a crash ([#903](https://github.com/terraform-providers/terraform-provider-azurerm/issues/903)) +* `azurerm_kubernetes_cluster` - the `linux_profile.admin_username` and `linux_profile.ssh_key.keydata` fields now force a new resource ([#895](https://github.com/terraform-providers/terraform-provider-azurerm/issues/895)) +* `azurerm_network_interface` - the `subnet_id` field is now case insensitive ([#866](https://github.com/terraform-providers/terraform-provider-azurerm/issues/866)) +* `azurerm_network_security_group` - reverting `security_rules` to a set to fix an ordering issue ([#893](https://github.com/terraform-providers/terraform-provider-azurerm/issues/893)) +* `azurerm_virtual_machine_scale_set` - the `computer_name_prefix` field now forces a new resource ([#871](https://github.com/terraform-providers/terraform-provider-azurerm/issues/871)) + +IMPROVEMENTS: + +* authentication: adding support for Managed Service Identity ([#639](https://github.com/terraform-providers/terraform-provider-azurerm/issues/639)) +* `azurerm_container_group` - added `dns_name_label` and `FQDN` properties ([#877](https://github.com/terraform-providers/terraform-provider-azurerm/issues/877)) +* `azurerm_network_interface` - support for attaching to Application Security Groups ([#911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/911)) +* `azurerm_network_security_group` - support for augmented security rules ([#781](https://github.com/terraform-providers/terraform-provider-azurerm/issues/781)) +* `azurerm_servicebus_subscription` - added support for the `forward_to` property ([#861](https://github.com/terraform-providers/terraform-provider-azurerm/issues/861)) +* `azurerm_storage_account` - adding support for `account_kind` being `StorageV2` ([#851](https://github.com/terraform-providers/terraform-provider-azurerm/issues/851)) +* `azurerm_virtual_network_gateway_connection` - support for IPsec/IKE Policies ([#834](https://github.com/terraform-providers/terraform-provider-azurerm/issues/834)) + +## 1.1.2 (February 19, 2018) + +FEATURES: + +* **New Resource:** `azurerm_kubernetes_cluster` ([#693](https://github.com/terraform-providers/terraform-provider-azurerm/issues/693)) +* **New Resource:** `azurerm_app_service_active_slot` ([#818](https://github.com/terraform-providers/terraform-provider-azurerm/issues/818)) +* **New Resource:** `azurerm_app_service_slot` ([#818](https://github.com/terraform-providers/terraform-provider-azurerm/issues/818)) + +BUG FIXES: + +* **Data Source:** `azurerm_app_service_plan`: handling a 404 not being returned as an error ([#849](https://github.com/terraform-providers/terraform-provider-azurerm/issues/849)) +* **Data Source:** `azurerm_virtual_network` - Fixing a crash when the DhcpOptions aren't specified ([#803](https://github.com/terraform-providers/terraform-provider-azurerm/issues/803)) +* `azurerm_application_gateway` - fixing crashes due to schema mismatches for existing resources ([#848](https://github.com/terraform-providers/terraform-provider-azurerm/issues/848)) +* `azurerm_storage_container` - add a retry for creation ([#846](https://github.com/terraform-providers/terraform-provider-azurerm/issues/846)) + +IMPROVEMENTS: + +* authentication: pulling the `Environment` key from the Azure CLI Config ([#842](https://github.com/terraform-providers/terraform-provider-azurerm/issues/842)) +* core: upgrading to `v12.5.0-beta` of the Azure SDK for Go ([#830](https://github.com/terraform-providers/terraform-provider-azurerm/issues/830)) +* compute: upgrading to use the `2017-12-01` API Version ([#797](https://github.com/terraform-providers/terraform-provider-azurerm/issues/797)) +* `azurerm_app_service_plan`: support for attaching to an App Service Environment ([#850](https://github.com/terraform-providers/terraform-provider-azurerm/issues/850)) +* `azurerm_container_group` - adding `restart_policy` ([#827](https://github.com/terraform-providers/terraform-provider-azurerm/issues/827)) +* `azurerm_managed_disk` - updated the validation on `disk_size_gb` / made it computed ([#800](https://github.com/terraform-providers/terraform-provider-azurerm/issues/800)) +* `azurerm_role_assignment` - add `role_definition_name` ([#775](https://github.com/terraform-providers/terraform-provider-azurerm/issues/775)) +* `azurerm_subnet` - add support for Service Endpoints ([#786](https://github.com/terraform-providers/terraform-provider-azurerm/issues/786)) +* `azurerm_virtual_machine` - changing `managed_disk_id` and `create_option` to be not ForceNew ([#813](https://github.com/terraform-providers/terraform-provider-azurerm/issues/813)) + + +## 1.1.1 (February 06, 2018) + +BUG FIXES: + +* `azurerm_public_ip` - Setting the `ip_address` field regardless of the DNS Settings ([#772](https://github.com/terraform-providers/terraform-provider-azurerm/issues/772)) +* `azurerm_virtual_machine` - ignores the case of the Managed Data Disk ID's to work around an Azure Portal bug ([#792](https://github.com/terraform-providers/terraform-provider-azurerm/issues/792)) + +FEATURES: + +* **New Data Source:** `azurerm_storage_account` ([#794](https://github.com/terraform-providers/terraform-provider-azurerm/issues/794)) +* **New Data Source:** `azurerm_virtual_network_gateway` ([#796](https://github.com/terraform-providers/terraform-provider-azurerm/issues/796)) + +## 1.1.0 (January 26, 2018) + +UPGRADE NOTES: + +* Data Source: `azurerm_builtin_role_definition` - now returns the correct UUID/GUID for the `Virtual Machines Contributor` role (previously the ID for the `Classic Virtual Machine Contributor` role was returned) ([#762](https://github.com/terraform-providers/terraform-provider-azurerm/issues/762)) +* `azurerm_snapshot` - `source_uri` now forces a new resource on changes due to behavioural changes in the Azure API ([#744](https://github.com/terraform-providers/terraform-provider-azurerm/issues/744)) + +FEATURES: + +* **New Data Source:** `azurerm_dns_zone` ([#702](https://github.com/terraform-providers/terraform-provider-azurerm/issues/702)) +* **New Resource:** `azurerm_metric_alertrule` ([#478](https://github.com/terraform-providers/terraform-provider-azurerm/issues/478)) +* **New Resource:** `azurerm_virtual_network_gateway` ([#133](https://github.com/terraform-providers/terraform-provider-azurerm/issues/133)) +* **New Resource:** `azurerm_virtual_network_gateway_connection` ([#133](https://github.com/terraform-providers/terraform-provider-azurerm/issues/133)) + +IMPROVEMENTS: + +* core: upgrading to `v12.2.0-beta` of `Azure/azure-sdk-for-go` ([#684](https://github.com/terraform-providers/terraform-provider-azurerm/issues/684)) +* core: upgrading to `v9.7.0` of `Azure/go-autorest` ([#684](https://github.com/terraform-providers/terraform-provider-azurerm/issues/684)) +* Data Source: `azurerm_builtin_role_definition` - adding extra role definitions ([#762](https://github.com/terraform-providers/terraform-provider-azurerm/issues/762)) +* `azurerm_app_service` - exposing the `outbound_ip_addresses` field ([#700](https://github.com/terraform-providers/terraform-provider-azurerm/issues/700)) +* `azurerm_function_app` - exposing the `outbound_ip_addresses` field ([#706](https://github.com/terraform-providers/terraform-provider-azurerm/issues/706)) +* `azurerm_function_app` - add support for the `always_on` and `connection_string` fields ([#695](https://github.com/terraform-providers/terraform-provider-azurerm/issues/695)) +* `azurerm_image` - add support for filtering images by a regex on the name ([#642](https://github.com/terraform-providers/terraform-provider-azurerm/issues/642)) +* `azurerm_lb` - adding support for the `Standard` SKU (in Preview) ([#665](https://github.com/terraform-providers/terraform-provider-azurerm/issues/665)) +* `azurerm_public_ip` - adding support for the `Standard` SKU (in Preview) ([#665](https://github.com/terraform-providers/terraform-provider-azurerm/issues/665)) +* `azurerm_network_security_rule` - add support for augmented security rules ([#692](https://github.com/terraform-providers/terraform-provider-azurerm/issues/692)) +* `azurerm_role_assignment` - generating a name if one isn't specified ([#685](https://github.com/terraform-providers/terraform-provider-azurerm/issues/685)) +* `azurerm_traffic_manager_profile` - adding support for setting `protocol` to `TCP` ([#742](https://github.com/terraform-providers/terraform-provider-azurerm/issues/742)) + +## 1.0.1 (January 12, 2018) + +FEATURES: + +* **New Data Source:** `azurerm_app_service_plan` ([#668](https://github.com/terraform-providers/terraform-provider-azurerm/issues/668)) +* **New Data Source:** `azurerm_eventhub_namespace` ([#673](https://github.com/terraform-providers/terraform-provider-azurerm/issues/673)) +* **New Resource:** `azurerm_function_app` ([#647](https://github.com/terraform-providers/terraform-provider-azurerm/issues/647)) + +IMPROVEMENTS: + +* core: adding a cache to the Storage Account Keys ([#634](https://github.com/terraform-providers/terraform-provider-azurerm/issues/634)) +* `azurerm_eventhub` - added support for `capture_description` ([#681](https://github.com/terraform-providers/terraform-provider-azurerm/issues/681)) +* `azurerm_eventhub_consumer_group` - adding validation for the user metadata field ([#641](https://github.com/terraform-providers/terraform-provider-azurerm/issues/641)) +* `azurerm_lb` - adding the computed field `public_ip_addresses` ([#633](https://github.com/terraform-providers/terraform-provider-azurerm/issues/633)) +* `azurerm_local_network_gateway` - add support for `tags` ([#638](https://github.com/terraform-providers/terraform-provider-azurerm/issues/638)) +* `azurerm_network_interface` - support for Accelerated Networking ([#672](https://github.com/terraform-providers/terraform-provider-azurerm/issues/672)) +* `azurerm_storage_account` - expose `primary_connection_string` and `secondary_connection_string` ([#647](https://github.com/terraform-providers/terraform-provider-azurerm/issues/647)) + +## 1.0.0 (December 15, 2017) + +FEATURES: + +* **New Data Source:** `azurerm_network_security_group` ([#623](https://github.com/terraform-providers/terraform-provider-azurerm/issues/623)) +* **New Data Source:** `azurerm_virtual_network` ([#533](https://github.com/terraform-providers/terraform-provider-azurerm/issues/533)) +* **New Resource:** `azurerm_management_lock` ([#575](https://github.com/terraform-providers/terraform-provider-azurerm/issues/575)) +* **New Resource:** `azurerm_network_watcher` ([#571](https://github.com/terraform-providers/terraform-provider-azurerm/issues/571)) + +IMPROVEMENTS: + +* authentication - add support for the latest Azure CLI configuration ([#573](https://github.com/terraform-providers/terraform-provider-azurerm/issues/573)) +* authentication - conditional loading of the Subscription ID / Tenant ID / Environment ([#574](https://github.com/terraform-providers/terraform-provider-azurerm/issues/574)) +* core - appending additions to the User Agent, so we don't overwrite the Go SDK User Agent info ([#587](https://github.com/terraform-providers/terraform-provider-azurerm/issues/587)) +* core - Upgrading `Azure/azure-sdk-for-go` to v11.2.2-beta ([#594](https://github.com/terraform-providers/terraform-provider-azurerm/issues/594)) +* core - upgrading `Azure/go-autorest` to v9.5.2 ([#617](https://github.com/terraform-providers/terraform-provider-azurerm/issues/617)) +* core - skipping Resource Provider Registration in AutoRest when opted-out ([#630](https://github.com/terraform-providers/terraform-provider-azurerm/issues/630)) +* `azurerm_app_service` - exposing the Default Hostname as a Computed field + +## 0.3.3 (November 14, 2017) + +FEATURES: + +* **New Resource:** `azurerm_redis_firewall_rule` ([#529](https://github.com/terraform-providers/terraform-provider-azurerm/issues/529)) + +IMPROVEMENTS: + +* authentication: allow using multiple subscriptions for Azure CLI auth ([#445](https://github.com/terraform-providers/terraform-provider-azurerm/issues/445)) +* core: appending the CloudShell version to the user agent when running within CloudShell ([#483](https://github.com/terraform-providers/terraform-provider-azurerm/issues/483)) +* `azurerm_app_service` / `azurerm_app_service_plan` - adding validation for the `name` fields ([#528](https://github.com/terraform-providers/terraform-provider-azurerm/issues/528)) +* `azurerm_container_registry` - Migration: Fixing a crash when the storage_account block is nil ([#551](https://github.com/terraform-providers/terraform-provider-azurerm/issues/551)) +* `azurerm_lb_nat_rule`: support for floating IP's ([#542](https://github.com/terraform-providers/terraform-provider-azurerm/issues/542)) +* `azurerm_public_ip` - Clarify the error message for the validation of domain name label ([#485](https://github.com/terraform-providers/terraform-provider-azurerm/issues/485)) +* `azurerm_network_security_group` - fixing a crash when changes were made outside of Terraform ([#492](https://github.com/terraform-providers/terraform-provider-azurerm/issues/492)) +* `azurerm_redis_cache`: support for Patch Schedules ([#540](https://github.com/terraform-providers/terraform-provider-azurerm/issues/540)) +* `azurerm_virtual_machine` - ensuring `vhd_uri` is validated ([#470](https://github.com/terraform-providers/terraform-provider-azurerm/issues/470)) +* `azurerm_virtual_machine_scale_set`: fixing a crash where accelerated networking isn't returned by the API ([#480](https://github.com/terraform-providers/terraform-provider-azurerm/issues/480)) + +## 0.3.2 (October 30, 2017) + +FEATURES: + +* **New Resource:** `azurerm_application_gateway` ([#413](https://github.com/terraform-providers/terraform-provider-azurerm/issues/413)) + +IMPROVEMENTS: + + - `azurerm_virtual_machine_scale_set` - Add nil check to os disk ([#436](https://github.com/terraform-providers/terraform-provider-azurerm/issues/436)) + + - `azurerm_key_vault` - Increased timeout on dns availability ([#457](https://github.com/terraform-providers/terraform-provider-azurerm/issues/457)) + + - `azurerm_route_table` - Fix issue when routes are computed ([#450](https://github.com/terraform-providers/terraform-provider-azurerm/issues/450)) + +## 0.3.1 (October 21, 2017) + +IMPROVEMENTS: + + - `azurerm_virtual_machine_scale_set` - Updating this resource with the v11 of the Azure SDK for Go ([#448](https://github.com/terraform-providers/terraform-provider-azurerm/issues/448)) + +## 0.3.0 (October 17, 2017) + +UPGRADE NOTES: + + - `azurerm_automation_account` - the SKU `Free` has been replaced with `Basic`. + - `azurerm_container_registry` - Azure has updated the SKU from `Basic` to `Classic`, with new `Basic`, `Standard` and `Premium` SKU's introduced. + - `azurerm_container_registry` - the `storage_account` block is now `storage_account_id` and is only required for `Classic` SKU's + - `azurerm_key_vault` - `certificate_permissions`, `key_permissions` and `secret_permissions` have all had the `All` option removed by Azure. Each permission now needs to be specified manually. + * `azurerm_route_table` - `route` is no longer computed + - `azurerm_servicebus_namespace` - The `capacity` field can only be set for `Premium` SKU's + - `azurerm_servicebus_queue` - The `enable_batched_operations` and `support_ordering` fields have been deprecated by Azure. + - `azurerm_servicebus_subscription` - The `dead_lettering_on_filter_evaluation_exceptions` has been removed by Azure. + - `azurerm_servicebus_topic` - The `enable_filtering_messages_before_publishing` field has been removed by Azure. + +FEATURES: + +* **New Data Source:** `azurerm_builtin_role_definition` ([#384](https://github.com/terraform-providers/terraform-provider-azurerm/issues/384)) +* **New Data Source:** `azurerm_image` ([#382](https://github.com/terraform-providers/terraform-provider-azurerm/issues/382)) +* **New Data Source:** `azurerm_key_vault_access_policy` ([#423](https://github.com/terraform-providers/terraform-provider-azurerm/issues/423)) +* **New Data Source:** `azurerm_platform_image` ([#375](https://github.com/terraform-providers/terraform-provider-azurerm/issues/375)) +* **New Data Source:** `azurerm_role_definition` ([#414](https://github.com/terraform-providers/terraform-provider-azurerm/issues/414)) +* **New Data Source:** `azurerm_snapshot` ([#420](https://github.com/terraform-providers/terraform-provider-azurerm/issues/420)) +* **New Data Source:** `azurerm_subnet` ([#411](https://github.com/terraform-providers/terraform-provider-azurerm/issues/411)) +* **New Resource:** `azurerm_key_vault_certificate` ([#408](https://github.com/terraform-providers/terraform-provider-azurerm/issues/408)) +* **New Resource:** `azurerm_role_assignment` ([#414](https://github.com/terraform-providers/terraform-provider-azurerm/issues/414)) +* **New Resource:** `azurerm_role_definition` ([#414](https://github.com/terraform-providers/terraform-provider-azurerm/issues/414)) +* **New Resource:** `azurerm_snapshot` ([#420](https://github.com/terraform-providers/terraform-provider-azurerm/issues/420)) + +IMPROVEMENTS: + +* Upgrading to v11 of the Azure SDK for Go ([#367](https://github.com/terraform-providers/terraform-provider-azurerm/issues/367)) +* `azurerm_client_config` - updating the data source to work when using AzureCLI auth ([#393](https://github.com/terraform-providers/terraform-provider-azurerm/issues/393)) +* `azurerm_container_group` - add support for volume mounts ([#366](https://github.com/terraform-providers/terraform-provider-azurerm/issues/366)) +* `azurerm_key_vault` - fix a crash when no certificate_permissions are defined ([#374](https://github.com/terraform-providers/terraform-provider-azurerm/issues/374)) +* `azurerm_key_vault` - waiting for the DNS to propagate ([#401](https://github.com/terraform-providers/terraform-provider-azurerm/issues/401)) +* `azurerm_managed_disk` - support for creating Managed Disks from Platform Images by supporting "FromImage" ([#399](https://github.com/terraform-providers/terraform-provider-azurerm/issues/399)) +* `azurerm_managed_disk` - support for creating Encrypted Managed Disks ([#399](https://github.com/terraform-providers/terraform-provider-azurerm/issues/399)) +* `azurerm_mysql_*` - Ensuring we register the MySQL Resource Provider ([#397](https://github.com/terraform-providers/terraform-provider-azurerm/issues/397)) +* `azurerm_network_interface` - exposing all of the Private IP Addresses assigned to the NIC ([#409](https://github.com/terraform-providers/terraform-provider-azurerm/issues/409)) +* `azurerm_network_security_group` / `azurerm_network_security_rule` - refactoring ([#405](https://github.com/terraform-providers/terraform-provider-azurerm/issues/405)) +* `azurerm_route_table` - removing routes when none are specified ([#403](https://github.com/terraform-providers/terraform-provider-azurerm/issues/403)) +* `azurerm_route_table` - refactoring `route` from a Set to a List ([#402](https://github.com/terraform-providers/terraform-provider-azurerm/issues/402)) +* `azurerm_route` - refactoring `route` from a Set to a List ([#402](https://github.com/terraform-providers/terraform-provider-azurerm/issues/402)) +* `azurerm_storage_account` - support for File Encryption ([#363](https://github.com/terraform-providers/terraform-provider-azurerm/issues/363)) +* `azurerm_storage_account` - support for Custom Domain ([#363](https://github.com/terraform-providers/terraform-provider-azurerm/issues/363)) +* `azurerm_storage_account` - splitting the storage account Tier and Replication out into separate fields ([#363](https://github.com/terraform-providers/terraform-provider-azurerm/issues/363)) +- `azurerm_storage_account` - returning a user friendly error when trying to provision a Blob Storage Account with ZRS redundancy ([#421](https://github.com/terraform-providers/terraform-provider-azurerm/issues/421)) +* `azurerm_subnet` - making it possible to remove Network Security Groups / Route Tables ([#411](https://github.com/terraform-providers/terraform-provider-azurerm/issues/411)) +* `azurerm_virtual_machine` - fixing a bug where `additional_unattend_config.content` was being updated unintentionally ([#377](https://github.com/terraform-providers/terraform-provider-azurerm/issues/377)) +* `azurerm_virtual_machine` - switching to use Lists instead of Sets ([#426](https://github.com/terraform-providers/terraform-provider-azurerm/issues/426)) +* `azurerm_virtual_machine_scale_set` - fixing a bug where `additional_unattend_config.content` was being updated unintentionally ([#377](https://github.com/terraform-providers/terraform-provider-azurerm/issues/377)) +* `azurerm_virtual_machine_scale_set` - support for multiple network profiles ([#378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/378)) + +## 0.2.2 (September 28, 2017) + +FEATURES: + +* **New Resource:** `azurerm_key_vault_key` ([#356](https://github.com/terraform-providers/terraform-provider-azurerm/issues/356)) +* **New Resource:** `azurerm_log_analytics_workspace` ([#331](https://github.com/terraform-providers/terraform-provider-azurerm/issues/331)) +* **New Resource:** `azurerm_mysql_configuration` ([#352](https://github.com/terraform-providers/terraform-provider-azurerm/issues/352)) +* **New Resource:** `azurerm_mysql_database` ([#352](https://github.com/terraform-providers/terraform-provider-azurerm/issues/352)) +* **New Resource:** `azurerm_mysql_firewall_rule` ([#352](https://github.com/terraform-providers/terraform-provider-azurerm/issues/352)) +* **New Resource:** `azurerm_mysql_server` ([#352](https://github.com/terraform-providers/terraform-provider-azurerm/issues/352)) + +IMPROVEMENTS: + +* Updating the provider initialization & adding a `skip_credentials_validation` field to the provider for some advanced scenarios ([#322](https://github.com/terraform-providers/terraform-provider-azurerm/issues/322)) + +## 0.2.1 (September 25, 2017) + +FEATURES: + +* **New Resource:** `azurerm_automation_account` ([#257](https://github.com/terraform-providers/terraform-provider-azurerm/issues/257)) +* **New Resource:** `azurerm_automation_credential` ([#257](https://github.com/terraform-providers/terraform-provider-azurerm/issues/257)) +* **New Resource:** `azurerm_automation_runbook` ([#257](https://github.com/terraform-providers/terraform-provider-azurerm/issues/257)) +* **New Resource:** `azurerm_automation_schedule` ([#257](https://github.com/terraform-providers/terraform-provider-azurerm/issues/257)) +* **New Resource:** `azurerm_app_service` ([#344](https://github.com/terraform-providers/terraform-provider-azurerm/issues/344)) + +IMPROVEMENTS: + +* `azurerm_client_config` - adding `service_principal_application_id` ([#348](https://github.com/terraform-providers/terraform-provider-azurerm/issues/348)) +* `azurerm_key_vault` - adding `application_id` and `certificate_permissions` ([#348](https://github.com/terraform-providers/terraform-provider-azurerm/issues/348)) + +BUG FIXES: + +* `azurerm_virtual_machine_scale_set` - fix panic with `additional_unattend_config` block ([#266](https://github.com/terraform-providers/terraform-provider-azurerm/issues/266)) + +## 0.2.0 (September 15, 2017) + +FEATURES: + +* **Support for authenticating using the Azure CLI** ([#316](https://github.com/terraform-providers/terraform-provider-azurerm/issues/316)) +* **New Resource:** `azurerm_container_group` ([#333](https://github.com/terraform-providers/terraform-provider-azurerm/issues/333)] [[#311](https://github.com/terraform-providers/terraform-provider-azurerm/issues/311)] [[#338](https://github.com/terraform-providers/terraform-provider-azurerm/issues/338)) + +IMPROVEMENTS: + +* `azurerm_app_service_plan` - support for Linux App Service Plans ([#332](https://github.com/terraform-providers/terraform-provider-azurerm/issues/332)) +* `azurerm_postgresql_server` - supporting additional storage sizes ([#239](https://github.com/terraform-providers/terraform-provider-azurerm/issues/239)) +* `azurerm_public_ip` - verifying the ID is valid before importing ([#320](https://github.com/terraform-providers/terraform-provider-azurerm/issues/320)) +* `azurerm_sql_server` - verifying the name is valid before creating ([#323](https://github.com/terraform-providers/terraform-provider-azurerm/issues/323)) +* `resource_group_name` - validation has been added to all resources that use this attribute ([#330](https://github.com/terraform-providers/terraform-provider-azurerm/issues/330)) + +## 0.1.7 (September 11, 2017) + +FEATURES: + +* **New Resource:** `azurerm_postgresql_configuration` ([#210](https://github.com/terraform-providers/terraform-provider-azurerm/issues/210)) +* **New Resource:** `azurerm_postgresql_database` ([#210](https://github.com/terraform-providers/terraform-provider-azurerm/issues/210)) +* **New Resource:** `azurerm_postgresql_firewall_rule` ([#210](https://github.com/terraform-providers/terraform-provider-azurerm/issues/210)) +* **New Resource:** `azurerm_postgresql_server` ([#210](https://github.com/terraform-providers/terraform-provider-azurerm/issues/210)) + +IMPROVEMENTS: + +* `azurerm_cdn_endpoint` - defaulting the `http_port` and `https_port` ([#301](https://github.com/terraform-providers/terraform-provider-azurerm/issues/301)) +* `azurerm_cosmos_db_account`: allow setting the Kind to MongoDB/GlobalDocumentDB ([#299](https://github.com/terraform-providers/terraform-provider-azurerm/issues/299)) + +## 0.1.6 (August 31, 2017) + +FEATURES: + +* **New Data Source**: `azurerm_subscription` ([#285](https://github.com/terraform-providers/terraform-provider-azurerm/issues/285)) +* **New Resource:** `azurerm_app_service_plan` ([#1](https://github.com/terraform-providers/terraform-provider-azurerm/issues/1)) +* **New Resource:** `azurerm_eventgrid_topic` ([#260](https://github.com/terraform-providers/terraform-provider-azurerm/issues/260)) +* **New Resource:** `azurerm_key_vault_secret` ([#269](https://github.com/terraform-providers/terraform-provider-azurerm/issues/269)) + +IMPROVEMENTS: + +* `azurerm_image` - added a default to the `caching` field ([#259](https://github.com/terraform-providers/terraform-provider-azurerm/issues/259)) +* `azurerm_key_vault` - validation for the `name` field ([#270](https://github.com/terraform-providers/terraform-provider-azurerm/issues/270)) +* `azurerm_network_interface` - support for multiple IP Configurations / setting the Primary IP Configuration ([#245](https://github.com/terraform-providers/terraform-provider-azurerm/issues/245)) +* `azurerm_resource_group` - poll until the resource group is created (by migrating to the Azure SDK for Go) ([#289](https://github.com/terraform-providers/terraform-provider-azurerm/issues/289)) +* `azurerm_search_service` - migrating to use the Azure SDK for Go ([#283](https://github.com/terraform-providers/terraform-provider-azurerm/issues/283)) +* `azurerm_sql_*` - ensuring deleted resources are detected ([#289](https://github.com/terraform-providers/terraform-provider-azurerm/issues/289)] / [[#255](https://github.com/terraform-providers/terraform-provider-azurerm/issues/255)) +* `azurerm_sql_database` - Import Support ([#289](https://github.com/terraform-providers/terraform-provider-azurerm/issues/289)) +* `azurerm_sql_database` - migrating to using the Azure SDK for Go ([#289](https://github.com/terraform-providers/terraform-provider-azurerm/issues/289)) +* `azurerm_sql_firewall_rule` - migrating to using the Azure SDK for Go ([#289](https://github.com/terraform-providers/terraform-provider-azurerm/issues/289)) +* `azurerm_sql_server` - added checks to handle `name` not being globally unique ([#189](https://github.com/terraform-providers/terraform-provider-azurerm/issues/189)) +* `azurerm_sql_server` - making `administrator_login` `ForceNew` ([#189](https://github.com/terraform-providers/terraform-provider-azurerm/issues/189)) +* `azurerm_sql_server` - migrate to using the azure-sdk-for-go ([#189](https://github.com/terraform-providers/terraform-provider-azurerm/issues/189)) +* `azurerm_virtual_machine` - Force recreation if `storage_data_disk`.`create_option` changes ([#240](https://github.com/terraform-providers/terraform-provider-azurerm/issues/240)) +* `azurerm_virtual_machine_scale_set` - Fix address issue when setting the `winrm` block ([#271](https://github.com/terraform-providers/terraform-provider-azurerm/issues/271)) +* updating to `v10.3.0-beta` of the Azure SDK for Go ([#258](https://github.com/terraform-providers/terraform-provider-azurerm/issues/258)) +* Removing the (now unused) Riviera SDK ([#289](https://github.com/terraform-providers/terraform-provider-azurerm/issues/289)] [[#291](https://github.com/terraform-providers/terraform-provider-azurerm/issues/291)) + +BUG FIXES: + +* `azurerm_cosmosdb_account` - fixing the validation on the name field ([#263](https://github.com/terraform-providers/terraform-provider-azurerm/issues/263)) +* `azurerm_sql_server` - handle deleted servers correctly ([#189](https://github.com/terraform-providers/terraform-provider-azurerm/issues/189)) +* Fixing the `Microsoft.Insights` Resource Provider Registration ([#282](https://github.com/terraform-providers/terraform-provider-azurerm/issues/282)) + +## 0.1.5 (August 09, 2017) + +IMPROVEMENTS: + +* `azurerm_sql_*` - upgrading to version `2014-04-01` of the SQL API's ([#201](https://github.com/terraform-providers/terraform-provider-azurerm/issues/201)) +* `azurerm_virtual_machine` - support for the `Windows_Client` Hybrid Use Benefit type ([#212](https://github.com/terraform-providers/terraform-provider-azurerm/issues/212)) +* `azurerm_virtual_machine_scale_set` - support for custom images and managed disks ([#203](https://github.com/terraform-providers/terraform-provider-azurerm/issues/203)) + +BUG FIXES: + +* `azurerm_sql_database` - fixing creating a DB with a PointInTimeRestore ([#197](https://github.com/terraform-providers/terraform-provider-azurerm/issues/197)) +* `azurerm_virtual_machine` - fix a crash when the properties for a network inteface aren't returned ([#208](https://github.com/terraform-providers/terraform-provider-azurerm/issues/208)) +* `azurerm_virtual_machine` - changes to custom data should force new resource ([#211](https://github.com/terraform-providers/terraform-provider-azurerm/issues/211)) +* `azurerm_virtual_machine` - fixes a crash caused by an empty `os_profile_windows_config` block ([#222](https://github.com/terraform-providers/terraform-provider-azurerm/issues/222)) +* Checking to ensure the HTTP Response isn't `nil` before accessing it (fixes ([#200](https://github.com/terraform-providers/terraform-provider-azurerm/issues/200)]) [[#204](https://github.com/terraform-providers/terraform-provider-azurerm/issues/204)) + +## 0.1.4 (July 26, 2017) + +BUG FIXES: + +* `azurerm_dns_*` - upgrading to version `2016-04-01` of the Azure DNS API by switching from Riviera -> Azure SDK for Go ([#192](https://github.com/terraform-providers/terraform-provider-azurerm/issues/192)) + +## 0.1.3 (July 21, 2017) + +FEATURES: + +* **New Resource:** `azurerm_dns_ptr_record` ([#141](https://github.com/terraform-providers/terraform-provider-azurerm/issues/141)) +* **New Resource:**`azurerm_image` ([#8](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8)) +* **New Resource:** `azurerm_servicebus_queue` ([#151](https://github.com/terraform-providers/terraform-provider-azurerm/issues/151)) + +IMPROVEMENTS: + +* `azurerm_client_config` - added a `service_principal_object_id` attribute to the data source ([#175](https://github.com/terraform-providers/terraform-provider-azurerm/issues/175)) +* `azurerm_search_service` - added import support ([#172](https://github.com/terraform-providers/terraform-provider-azurerm/issues/172)) +* `azurerm_servicebus_topic` - added a `status` field to allow disabling the topic ([#150](https://github.com/terraform-providers/terraform-provider-azurerm/issues/150)) +* `azurerm_storage_account` - Added support for Require secure transfer ([#167](https://github.com/terraform-providers/terraform-provider-azurerm/issues/167)) +* `azurerm_storage_table` - updating the name validation ([#143](https://github.com/terraform-providers/terraform-provider-azurerm/issues/143)) +* `azurerm_virtual_machine` - making `admin_password` optional for Linux VM's ([#154](https://github.com/terraform-providers/terraform-provider-azurerm/issues/154)) +* `azurerm_virtual_machine_scale_set` - adding a `plan` block for Marketplace images ([#161](https://github.com/terraform-providers/terraform-provider-azurerm/issues/161)) + +## 0.1.2 (June 29, 2017) + +FEATURES: + +* **New Data Source:** `azurerm_managed_disk` ([#121](https://github.com/terraform-providers/terraform-provider-azurerm/issues/121)) +* **New Resource:** `azurerm_application_insights` ([#3](https://github.com/terraform-providers/terraform-provider-azurerm/issues/3)) +* **New Resource:** `azurerm_cosmosdb_account` ([#108](https://github.com/terraform-providers/terraform-provider-azurerm/issues/108)) +* `azurerm_network_interface` now supports import ([#119](https://github.com/terraform-providers/terraform-provider-azurerm/issues/119)) + +IMPROVEMENTS: + +* Ensuring consistency in when storing the `location` field in the state for the `azurerm_availability_set`, `azurerm_express_route_circuit`, `azurerm_load_balancer`, `azurerm_local_network_gateway`, `azurerm_managed_disk`, `azurerm_network_security_group` +`azurerm_public_ip`, `azurerm_resource_group`, `azurerm_route_table`, `azurerm_storage_account`, `azurerm_virtual_machine` and `azurerm_virtual_network` resources ([#123](https://github.com/terraform-providers/terraform-provider-azurerm/issues/123)) +* `azurerm_redis_cache` - now supports backup settings for Premium Redis Cache's ([#130](https://github.com/terraform-providers/terraform-provider-azurerm/issues/130)) +* `azurerm_storage_account` - exposing a formatted Connection String for Blob access ([#142](https://github.com/terraform-providers/terraform-provider-azurerm/issues/142)) + +BUG FIXES: + +* `azurerm_cdn_endpoint` - fixing update of the `origin_host_header` ([#134](https://github.com/terraform-providers/terraform-provider-azurerm/issues/134)) +* `azurerm_container_service` - exposes the FQDN of the `master_profile` as a computed field ([#125](https://github.com/terraform-providers/terraform-provider-azurerm/issues/125)) +* `azurerm_key_vault` - fixing import / the validation on Access Policies ([#124](https://github.com/terraform-providers/terraform-provider-azurerm/issues/124)) +* `azurerm_network_interface` - Normalizing the location field in the state ([#122](https://github.com/terraform-providers/terraform-provider-azurerm/issues/122)) +* `azurerm_network_interface` - fixing a crash when importing a NIC with a Public IP ([#128](https://github.com/terraform-providers/terraform-provider-azurerm/issues/128)) +* `azurerm_network_security_rule`: `network_security_group_name` is now `ForceNew` ([#138](https://github.com/terraform-providers/terraform-provider-azurerm/issues/138)) +* `azurerm_subnet` now correctly detects changes to Network Securtiy Groups and Routing Table's ([#113](https://github.com/terraform-providers/terraform-provider-azurerm/issues/113)) +* `azurerm_virtual_machine_scale_set` - making `storage_profile_os_disk`.`name` optional ([#129](https://github.com/terraform-providers/terraform-provider-azurerm/issues/129)) + +## 0.1.1 (June 21, 2017) + +BUG FIXES: + +* Sort ResourceID.Path keys for consistent output ([#116](https://github.com/terraform-providers/terraform-provider-azurerm/issues/116)) + +## 0.1.0 (June 20, 2017) + +BACKWARDS INCOMPATIBILITIES / NOTES: + +FEATURES: + +* **New Data Source:** `azurerm_resource_group` [[#15022](https://github.com/terraform-providers/terraform-provider-azurerm/issues/15022)](https://github.com/hashicorp/terraform/pull/15022) + +IMPROVEMENTS: + +* Add diff supress func to endpoint_location [[#15094](https://github.com/terraform-providers/terraform-provider-azurerm/issues/15094)](https://github.com/hashicorp/terraform/pull/15094) + +BUG FIXES: + +* Fixing the Deadlock issue ([#6](https://github.com/terraform-providers/terraform-provider-azurerm/issues/6)) diff --git a/CHANGELOG-v2.md b/CHANGELOG-v2.md index cadd15280513..fe222e331024 100644 --- a/CHANGELOG-v2.md +++ b/CHANGELOG-v2.md @@ -1,3 +1,433 @@ +## 2.40.0 (December 10, 2020) + +FEATURES: + +* **New Resource:** `azurerm_app_service_certificate_binding` ([#9415](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9415)) +* **New Resource:** `azurerm_digital_twins_endpoint_eventhub` ([#9673](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9673)) +* **New Resource:** `azurerm_digital_twins_endpoint_servicebus` ([#9702](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9702)) +* **New Resource:** `azurerm_media_asset` ([#9387](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9387)) +* **New Resource:** `azurerm_media_transform` ([#9663](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9663)) +* **New Resource:** `azurerm_resource_provider` ([#7951](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7951)) +* **New Resource:** `azurerm_stack_hci_cluster` ([#9134](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9134)) +* **New Resource:** `azurerm_storage_share_file` ([#9406](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9406)) +* **New Resource:** `azurerm_storage_sync_cloud_endpoint` ([#8540](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8540)) + +IMPROVEMENTS: + +* dependencies: upgrading `github.com/Azure/go-autorest/validation` to `v0.3.1` ([#9783](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9783)) +* dependencies: updating Log Analytics to API version `2020-08-01` ([#9764](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9764)) +* internal: disabling the Azure SDK's validation since it's superfluous ([#9783](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9783)) +* `azurerm_app_service` - support for PHP version `7.4` ([#9727](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9727)) +* `azurerm_bot_channel_directline` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_channel_email` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_channel_ms_teams` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_channel_slack` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_channels_registration` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_connection` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_web_app` - support for enhanced import validation ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_cosmosdb_sql_container` - support for the `partition_key_version` property ([#9496](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9496)) +* `azurerm_kusto_cluster` - support for the `engine` property ([#9696](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9696)) +* `azurerm_kusto_eventhub_data_connection` - support for `compression` ([#9692](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9692)) +* `azurerm_iothub` - support for the `min_tls_version` property ([#9670](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9670)) +* `azurerm_recovery_services_vault` - support for the `identity` block ([#9689](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9689)) +* `azurerm_redis_cache` - adding enhanced import validation ([#9771](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9771)) +* `azurerm_redis_cache` - adding validation that `subnet_id` is a valid Subnet ID ([#9771](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9771)) +* `azurerm_redis_firewall_rule` - adding enhanced import validation ([#9771](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9771)) +* `azurerm_redis_linked_server` - adding enhanced import validation ([#9771](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9771)) +* `azurerm_redis_linked_server` - adding validation that `linked_redis_cache_id` is a valid Redis Cache ID ([#9771](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9771)) +* `azurerm_security_center_automation` - support for the `description` and `tags` properties ([#9676](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9676)) +* `azurerm_stream_analytics_reference_input_blob` - support for enhanced import validation ([#9735](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9735)) +* `azurerm_stream_analytics_stream_input_blob` - support for enhanced import validation ([#9735](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9735)) +* `azurerm_stream_analytics_stream_input_iothub` - support for enhanced import validation ([#9735](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9735)) +* `azurerm_stream_analytics_stream_input_eventhub` - support for enhanced import validation ([#9735](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9735)) +* `azurerm_storage_account` - enable the `allow_blob_public_access` and `azurerm_storage_account` properties in US Government Cloud ([#9540](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9540)) + +BUG FIXES: + +* `azurerm_app_service_managed_certificate` - create certificate in service plan resource group to prevent diff loop ([#9701](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9701)) +* `azurerm_bot_channel_directline` - the field `bot_name` is now ForceNew to match the documentation/API behaviour ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_channel_ms_teams` - the field `bot_name` is now ForceNew to match the documentation/API behaviour ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_channel_slack` - the field `bot_name` is now ForceNew to match the documentation/API behaviour ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_bot_connection` - the field `bot_name` is now ForceNew to match the documentation/API behaviour ([#9690](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9690)) +* `azurerm_frontdoor` - working around an upstream API issue by rewriting the returned ID's within Terraform ([#9750](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9750)) +* `azurerm_frontdoor_custom_https_configuration` - working around an upstream API issue by rewriting the returned ID's within Terraform ([#9750](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9750)) +* `azurerm_frontdoor_firewall_policy` - working around an upstream API issue by rewriting the returned ID's within Terraform ([#9750](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9750)) +* `azurerm_media_services_account` - fixing a bug where `storage_authentication_type` wasn't set ([#9663](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9663)) +* `azurerm_media_service_account` - checking for the presence of an existing account during creation ([#9802](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9802)) +* `azurerm_postgresql_server` - changing the `geo_redundant_backup_enabled` property now forces a new resource ([#9694](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9694)) +* `azurerm_postgresql_server` - Fix issue when specifying empty threat detection list attributes ([#9739](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9739)) +* `azurerm_signar_service` - having an empty `allowed_origins` in the `cors` block will no longer cause a panic ([#9671](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9671)) + +## 2.39.0 (December 04, 2020) + +FEATURES: + +* **New Resource:** `azurerm_api_management_policy` ([#9215](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9215)) +* **New Resource:** `azurerm_digital_twins_endpoint_eventgrid` ([#9489](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9489)) +* **New Resource:** `azurerm_iot_time_series_insights_gen2_environment` ([#9616](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9616)) + +IMPROVEMENTS: + +* `azurerm_dashboard` - adding validation at import time to ensure the ID is for a Dashboard ([#9530](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9530)) +* `azurerm_keyvault_certificate` - add `3072` to allowed values for `key_size` ([#9524](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9524)) +* `azurerm_media_services_account` - support for the `identity`, `tags`, and `storage_authentication` properties ([#9457](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9457)) +* `azurerm_notification_hub_authorization_rule` - adding validation at import time to ensure the ID is for a Notification Hub Authorization Rule ([#9529](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9529)) +* `azurerm_notification_hub_namespace` - adding validation at import time to ensure the ID is for a Notification Hub Namespace ([#9529](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9529)) +* `azurerm_postgresql_active_directory_administrator` - validating during import that the ID is for a PostgreSQL Active Directory Administrator ([#9532](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9532)) +* `azurerm_postgresql_configuration` - validating during import that the ID is for a PostgreSQL Configuration ([#9532](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9532)) +* `azurerm_postgresql_database` - validating during import that the ID is for a PostgreSQL Database ([#9532](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9532)) +* `azurerm_postgresql_firewall_rule` - validating during import that the ID is for a PostgreSQL Firewall Rule ([#9532](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9532)) +* `azurerm_postgresql_virtual_network_rule` - validating during import that the ID is for a PostgreSQL Virtual Network Rule ([#9532](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9532)) +* `azurerm_traffic_manager_profile` - allow up to `2147483647` for the `ttl` property ([#9522](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9522)) + +BUG FIXES: + +* `azurerm_security_center_workspace` - fixing the casing on the `workspace_id` ([#9651](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9651)) +* `azurerm_eventhub_dedicated_cluster` - the `sku_name` capacity can be greater then `1` ([#9649](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9649)) + +## 2.38.0 (November 27, 2020) + +FEATURES: + +* **New Resource** `azurerm_app_service_managed_certificate` ([#9378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9378)) +* **New Data Source:** `azurerm_digital_twins_instance` ([#9430](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9430)) +* **New Data Source:** `azurerm_virtual_wan` ([#9382](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9382)) +* **New Resource:** `azurerm_digital_twins_instance` ([#9430](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9430)) + +IMPROVEMENTS: + +* dependencies: updating App Service to API version `2020-06-01` ([#9409](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9409)) +* Data Source `azurerm_app_service` now exports the `custom_domain_verification_id` attribute ([#9378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9378)) +* Data Source`azurerm_function_app` now exports the `custom_domain_verification_id` attribute ([#9378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9378)) +* Data Source: `azurerm_spring_cloud_service` - now exports the `outbound_public_ip_addresses` attribute ([#9261](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9261)) +* `azurerm_app_service` now exports `custom_domain_verification_id` ([#9378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9378)) +* `azurerm_application_insights` - validating the resource ID is correct during import ([#9446](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9446)) +* `azurerm_application_insights_web_test` - validating the resource ID is correct during import ([#9446](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9446)) +* `azurerm_express_route_circuit_peering` - support for the `ipv6` block ([#9235](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9235)) +* `azurerm_function_app` now exports the `custom_domain_verification_id` attribute ([#9378](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9378)) +* `azurerm_vpn_server_configuration` - deprecate the `radius_server` block in favour of the `radius` block which supports multiple servers ([#9308](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9308)) +* `azurerm_spring_cloud_service` - now exports the `outbound_public_ip_addresses` attribute ([#9261](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9261)) +* `azurerm_virtual_network_gateway` - support for the `dpd_timeout_seconds` and `local_azure_ip_address_enabled` properties ([#9330](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9330)) +* `azurerm_virtual_network_gateway_connection` - support for the `private_ip_address_enabled` propeties and the `custom_route` block ([#9330](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9330)) + +BUG FIXES: + +* `azurerm_api_management` - fixing an issue where developer portal certificates are updated on every apply ([#7299](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7299)) +* `azurerm_cosmosdb_account` - corrently updates the `zone_redundant` property during updates ([#9485](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9485)) +* `azurerm_search_service` - `allowed_ips` now supports specifying a CIDR Block in addition to an IPv4 address ([#9493](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9493)) +* `azurerm_virtual_desktop_application_group` - adding a state migration to avoid a breaking change when upgrading from `v2.35.0` or later ([#9495](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9495)) +* `azurerm_virtual_desktop_host_pool` - adding a state migration to avoid a breaking change when upgrading from `v2.35.0` or later ([#9495](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9495)) +* `azurerm_virtual_desktop_workspace` - adding a state migration to avoid a breaking change when upgrading from `v2.35.0` or later ([#9495](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9495)) +* `azurerm_virtual_desktop_workspace_application_group_association` - adding a state migration to avoid a breaking change when upgrading from `v2.35.0` or later ([#9495](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9495)) +* `azurerm_windows_virtual_machine` - no longer sets `patch_mode` on creation if it is the default value ([#9495](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9432)) + +## 2.37.0 (November 20, 2020) + +FEATURES: + +* **New Data Source:** `azurerm_servicebus_subscription` ([#9272](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9272)) +* **New Data Source:** `azurerm_storage_encryption_scope` ([#8894](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8894)) +* **New Resource:** `azurerm_log_analytics_cluster` ([#8946](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8946)) +* **New Resource:** `azurerm_log_analytics_cluster_customer_managed_key` ([#8946](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8946)) +* **New Resource:** `azurerm_security_center_automation` ([#8781](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8781)) +* **New Resource:** `azurerm_storage_data_lake_gen2_path` ([#7521](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7521)) +* **New Resource:** `azurerm_storage_encryption_scope` ([#8894](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8894)) +* **New Resource:** `azurerm_vpn_gateway_connection` ([#9160](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9160)) + +IMPROVEMENTS: + +* storage: foundational improvements to support toggling between the Data Plane and Resource Manager Storage API's in the future ([#9314](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9314)) +* Data Source: `azurerm_firewall`- exposing `dns_servers`, `firewall_policy_id`, `sku_name`, `sku_tier`, `threat_intel_mode`, `virtual_hub` and `zones` ([#8879](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8879)) +* Data Source: `azurerm_firewall`- exposing `public_ip_address_id` and `private_ip_address_id` within the `ip_configuration` block ([#8879](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8879)) +* Data Source: `azurerm_firewall`- exposing `name` within the `management_ip_configuration` block ([#8879](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8879)) +* Data Source: `azurerm_kubernetes_node_pool` - exposing `os_disk_type` ([#9166](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9166)) +* `azurerm_api_management_api_diagnostic` - support for the `always_log_errors`, `http_correlation_protocol`, `log_client_ip` and `verbosity` attributes ([#9172](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9172)) +* `azurerm_api_management_api_diagnostic` - support the `frontend_request`, `frontend_response`, `backend_request` and `backend_response` blocks ([#9172](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9172)) +* `azurerm_container_group` - support for secret container volumes with the `container.#.volume.#.secret` attribute ([#9117](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9117)) +* `azurerm_cosmosdb_account` - support for the `public_network_access_enabled` property ([#9236](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9236)) +* `azurerm_cosmosdb_cassandra_keyspace` - `throughput` can now be set to higher than `1000000` if enabled by Azure Support ([#9050](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9050)) +* `azurerm_cosmosdb_gremlin_database` - `throughput` can now be set to higher than `1000000` if enabled by Azure Support ([#9050](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9050)) +* `azurerm_cosmosdb_mongo_database` - `throughput` can now be set to higher than `1000000` if enabled by Azure Support ([#9050](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9050)) +* `azurerm_cosmosdb_sql_container` - `max_throughput` within the `autoscale_settings` block can now be set to higher than `1000000` if enabled by Azure Support ([#9050](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9050)) +* `azurerm_cosmosdb_sql_database` - `throughput` can now be set to higher than `1000000` if enabled by Azure Support ([#9050](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9050)) +* `azurerm_cosmosdb_table` - `throughput` can now be set to higher than `1000000` if enabled by Azure Support ([#9050](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9050)) +* `azurerm_dns_zone` - support for the `soa_record` block ([#9319](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9319)) +* `azurerm_firewall` - support for `firewall_policy_id`, `sku_name`, `sku_tier` and `virtual_hub` ([#8879](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8879)) +* `azurerm_kubernetes_cluster` - support for configuring `os_disk_type` within the `default_node_pool` block ([#9166](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9166)) +* `azurerm_kubernetes_cluster` - `max_count` within the `default_node_pool` block can now be set to a maximum value of `1000` ([#9227](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9227)) +* `azurerm_kubernetes_cluster` - `min_count` within the `default_node_pool` block can now be set to a maximum value of `1000` ([#9227](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9227)) +* `azurerm_kubernetes_cluster` - `node_count` within the `default_node_pool` block can now be set to a maximum value of `1000` ([#9227](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9227)) +* `azurerm_kubernetes_cluster` - the block `http_application_routing` within the `addon_profile` block can now be updated/removed ([#9358](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9358)) +* `azurerm_kubernetes_node_pool` - support for configuring `os_disk_type` ([#9166](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9166)) +* `azurerm_kubernetes_node_pool` - `max_count` can now be set to a maximum value of `1000` ([#9227](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9227)) +* `azurerm_kubernetes_node_pool` - `min_count` can now be set to a maximum value of `1000` ([#9227](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9227)) +* `azurerm_kubernetes_node_pool` - `node_count` can now be set to a maximum value of `1000` ([#9227](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9227)) +* `azurerm_linux_virtual_machine` - support for the `extensions_time_budget` property ([#9257](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9257)) +* `azurerm_linux_virtual_machine` - updating the `dedicated_host_id` no longer forces a new resource ([#9264](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9264)) +* `azurerm_linux_virtual_machine` - support for graceful shutdowns (via the features block) ([#8470](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8470)) +* `azurerm_linux_virtual_machine_scale_set` - support for the `platform_fault_domain_count`, `disk_iops_read_write`, and `disk_mbps_read_write` properties ([#9262](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9262)) +* `azurerm_mssql_database` - `sku_name` supports more `DWxxxc` options ([#9370](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9370)) +* `azurerm_policy_set_definition` - support for the `policy_definition_group` block ([#9259](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9259)) +* `azurerm_postgresql_server` - increase max storage to 16TiB ([#9373](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9373)) +* `azurerm_private_dns_zone` - support for the `soa_record` block ([#9319](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9319)) +* `azurerm_storage_blob` - support for `content_md5` ([#7786](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7786)) +* `azurerm_windows_virtual_machine` - support for the `extensions_time_budget` property ([#9257](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9257)) +* `azurerm_windows_virtual_machine` - updating the `dedicated_host_id` nolonger forces a new resource ([#9264](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9264)) +* `azurerm_windows_virtual_machine` - support for graceful shutdowns (via the features block) ([#8470](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8470)) +* `azurerm_windows_virtual_machine` - support for the `patch_mode` property ([#9258](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9258)) +* `azurerm_windows_virtual_machine_scale_set` - support for the `platform_fault_domain_count`, `disk_iops_read_write`, and `disk_mbps_read_write` properties ([#9262](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9262)) + +BUG FIXES: + +* Data Source: `azurerm_key_vault_certificate` - fixing a crash when serializing the certificate policy block ([#9355](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9355)) +* `azurerm_api_management` - the field `xml_content` within the `policy` block now supports C#/.net interpolations ([#9296](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9296)) +* `azurerm_cosmosdb_sql_container` - no longer attempts to get throughput settings when cosmos account is serverless ([#9311](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9311)) +* `azurerm_firewall_policy` - deprecate the `dns.network_rule_fqdn_enabled` property as the API no longer allows it to be set ([#9332](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9332)) +* `azurerm_key_vault_certificate` - fixing a crash when serializing the certificate policy block ([#9355](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9355)) +* `azurerm_mssql_virtual_machine` - fixing a crash when serializing `auto_patching` ([#9388](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9388)) +* `azurerm_resource_group_template_deployment` - fixing an issue during deletion where the API version of nested resources couldn't be determined ([#9364](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9364)) + +## 2.36.0 (November 12, 2020) + +UPGRADE NOTES: + +* `azurerm_network_connection_monitor` - has been updated to work with v2 of the resource as the service team is deprecating v1 - all v1 properties have been deprecated and will be removed in version `3.0` of the provider and v2 propeties added. ([#8640](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8640)) + +FEATURES: + +* **New Data Source:** `azurerm_data_share_dataset_kusto_database` ([#8544](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8544)) +* **New Data Source:** `azurerm_traffic_manager_profile` ([#9229](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9229)) +* **New Resource:** `azurerm_api_management_custom_domain` ([#8228](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8228)) +* **New Resource:** `azurerm_data_share_dataset_kusto_database` ([#8544](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8544)) +* **New Resource:** `azurerm_log_analytics_storage_insights` ([#9014](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9014)) +* **New Resource:** `azurerm_monitor_smart_detector_alert_rule` ([#9032](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9032)) +* **New Resource:** `azurerm_virtual_hub_security_partner_provider` ([#8978](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8978)) +* **New Resource:** `azurerm_virtual_hub_bgp_connection` ([#8959](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8959)) + +IMPROVEMENTS: + +* dependencies: upgrading to `v0.4.2` of `github.com/Azure/go-autorest/autorest/azure/cli` ([#9168](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9168)) +* dependencies: upgrading to `v48.1.0` of `github.com/Azure/azure-sdk-for-go` ([#9213](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9213)) +* dependencies: upgrading to `v0.13.0` of `github.com/hashicorp/go-azure-helpers` ([#9191](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9191)) +* dependencies: upgrading to `v0.14.0` of `github.com/tombuildsstuff/giovanni` ([#9189](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9189)) +* storage: upgrading the Data Plane API's to API Version `2019-12-12` ([#9192](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9192)) +* Data Source `azurerm_kubernetes_node_pool` - exporting `proximity_placement_group_id` ([#9195](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9195)) +* `azurerm_app_service` support `v5.0` for the `dotnet_framework_version` ([#9251](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9251)) +* `azurerm_availability_set` - adding validation to the `name` field ([#9279](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9279)) +* `azurerm_cosmosdb_account` - support for the `key_vault_key_id` property allowing use of Customer Managed Keys ([#8919](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8919)) +* `azurerm_eventgrid_domain` - adding validation to the `name` field ([#9281](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9281)) +* `azurerm_eventgrid_domain_topic` - adding validation to the `name` field ([#9281](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9281)) +* `azurerm_eventgrid_domain_topic` - adding validation to the `domain_name` field ([#9281](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9281)) +* `azurerm_eventgrid_event_subscription` - adding validation to the `name` field ([#9281](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9281)) +* `azurerm_eventgrid_topic` - adding validation to the `name` field ([#9281](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9281)) +* `azurerm_eventgrid_system_topic` - adding validation to the `name` field ([#9281](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9281)) +* `azurerm_function_app` - support for the `health_check_path` property under site_config ([#9233](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9233)) +* `azurerm_linux_virtual_machine` - support for managed boot diagnostics by leaving the `storage_account_uri` property empty ([#8917](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8917)) +* `azurerm_linux_virtual_machine_scale_set` - support for managed boot diagnostics by leaving the `storage_account_uri` property empty ([#8917](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8917)) +* `azurerm_log_analytics_workspace` - support for the `internet_ingestion_enabled` and `internet_query_enabled` properties ([#9033](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9033)) +* `azurerm_logic_app_workflow` added logicapp name validation ([#9282](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9282)) +* `azurerm_kubernetes_cluster` - support for `proximity_placement_group_id` within the `default_node_pool` block ([#9195](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9195)) +* `azurerm_kubernetes_node_pool` - support for `proximity_placement_group_id` ([#9195](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9195)) +* `azurerm_policy_remediation` - support for the `resource_discovery_mode` property ([#9210](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9210)) +* `azurerm_point_to_site_vpn_gateway` - support for the `route` block ([#9158](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9158)) +* `azurerm_virtual_network` - support for the `bgp_community` and `vnet_protection_enabled` ([#8979](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8979)) +* `azurerm_vpn_gateway` - support for the `instance_0_bgp_peering_addresses` and `instance_1_bgp_peering_addresses` blocks ([#9035](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9035)) +* `azurerm_windows_virtual_machine` - support for managed boot diagnostics by leaving the `storage_account_uri` property empty ([#8917](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8917)) +* `azurerm_windows_virtual_machine_scale_set` - support for managed boot diagnostics by leaving the `storage_account_uri` property empty ([#8917](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8917)) + +BUG FIXES: + +* `azurerm_cosmosdb_sql_database` no longer attempts to get throughput settings when cosmos account is serverless ([#9187](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9187)) +* `azurerm_kubernetes_cluster` - changing the field `availability_zones` within the `default_node_pool` block now requires recreating the resource to match the behaviour of the Azure API ([#8814](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8814)) +* `azurerm_kubernetes_cluster_node_pool` - changing the field `availability_zones` now requires recreating the resource to match the behaviour of the Azure API ([#8814](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8814)) +* `azurerm_log_analytics_workspace` - fix the `Free` tier from setting the `daily_quota_gb` property ([#9228](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9228)) +* `azurerm_linux_virtual_machine` - the field `disk_size_gb` within the `os_disk` block can now be configured up to `4095` ([#9202](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9202)) +* `azurerm_linux_virtual_machine_scale_set` - the field `disk_size_gb` within the `os_disk` block can now be configured up to `4095` ([#9202](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9202)) +* `azurerm_linux_virtual_machine_scale_set` - the field `computer_name_prefix` can now end with a dash ([#9182](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9182)) +* `azurerm_windows_virtual_machine` - the field `disk_size_gb` within the `os_disk` block can now be configured up to `4095` ([#9202](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9202)) +* `azurerm_windows_virtual_machine_scale_set` - the field `disk_size_gb` within the `os_disk` block can now be configured up to `4095` ([#9202](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9202)) + +## 2.35.0 (November 05, 2020) + +UPGRADE NOTES: + +* `azurerm_kubernetes_cluster` - the field `enable_pod_security_policy` and `node_taints` (within the `default_node_pool` block) can no longer be configured - see below for more details ([#8982](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8982)) + +FEATURES: + +* **New Data Source:** `azurerm_images` ([#8629](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8629)) +* **New Resource:** `azurerm_firewall_policy_rule_collection_group` ([#8603](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8603)) +* **New Resource:** `azurerm_virtual_hub_ip_configuration` ([#8912](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8912)) +* **New Resource:** `azurerm_virtual_hub_route_table` ([#8939](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8939)) + +IMPROVEMENTS: + +* dependencies: updating `containerservice` to API version `2020-09-01` ([#8982](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8982)) +* dependencies: updating `iottimeseriesinsights` to API Version `2020-05-15` ([#9129](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9129)) +* `azurerm_data_factory_linked_service_data_lake_storage_gen2` - Supports managed identity auth through `use_managed_identity ` ([#8938](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8938)) +* `azurerm_firewall` - support the `dns_servers` property ([#8878](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8878)) +* `azurerm_firewall_network_rule_collection` - support the `destination_fqdns` property in the `rule` block ([#8878](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8878)) +* `azurerm_virtual_hub_connection` - support for the `routing` block ([#8950](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8950)) + +BUG FIXES: + +* Fixed regression that prevented Synapse client registering in all Azure environments ([#9100](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9100)) +* `azurerm_cosmosdb_mongo_database` no longer attempts to get throughput settings when cosmos account is serverless ([#8673](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8673)) +* `azurerm_key_vault_access_policy` - check access policy consistency before committing to state ([#9125](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9125)) +* `azurerm_kubernetes_cluster` - the field `enable_pod_security_policy` can no longer be set, due to this functionality being removed from AKS as of `2020-10-15` ([#8982](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8982)) +* `azurerm_kubernetes_cluster` - the field `node_taints` can no longer be set on the `default_node_pool` block, to match the behaviour of AKS ([#8982](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8982)) +* `azurerm_virtual_desktop_application_group` - adding validation to the `host_pool_id` field ([#9057](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9057)) +* `azurerm_virtual_desktop_workspace_application_group_association` - adding validation to the `application_group_id` field ([#9057](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9057)) +* `azurerm_virtual_desktop_workspace_application_group_association` - adding validation to the `workspace_id` field ([#9057](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9057)) +* `azurerm_virtual_desktop_workspace_application_group_association` - validating the ID during import is a Workspace Application Group Association ID ([#9057](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9057)) +* `azurerm_postgresql_firewall_rule` - add validation for `start_ip_address` and `end_ip_address` properties ([#8963](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8963)) + + +## 2.34.0 (October 29, 2020) + +UPGRADE NOTES + +* `azurerm_api_management_api` - fixing a regression introduced in v2.16 where this value for `subscription_required` was defaulted to `false` instead of `true` ([#7963](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7963)) + +FEATURES: + +* **New Data Source:** `azurerm_cognitive_account` ([#8773](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8773)) +* **New Resource:** `azurerm_log_analytics_data_export_rule` ([#8995](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8995)) +* **New Resource:** `azurerm_log_analytics_linked_storage_account` ([#9002](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9002)) +* **New Resource:** `azurerm_security_center_auto_provisioning` ([#8595](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8595)) +* **New Resource:** `azurerm_synapse_role_assignment` ([#8863](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8863)) +* **New Resource:** `azurerm_vpn_site` ([#8896](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8896)) + +IMPROVEMENTS: + +* Data Source: `azurerm_policy_definition` - can now look up built-in policy by name ([#9078](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9078)) +* `azurerm_backup_policy_vm` - support for the property `instant_restore_retention_days` ([#8822](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8822)) +* `azurerm_container_group` - support for the property `git_repo` within the `volume` block ([#7924](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7924)) +* `azurerm_iothub` - support for the `resource_group` property within the `endpoint` block ([#8032](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8032)) +* `azurerm_key_vault` - support for the `contact` block ([#8937](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8937)) +* `azurerm_log_analytics_saved_search` - support for `tags` ([#9034](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9034)) +* `azurerm_log_analytics_solution` - support for `tags` ([#9048](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9048)) +* `azurerm_logic_app_trigger_recurrence` - support for `time_zone` [[#8829](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8829)] +* `azurerm_policy_definition` - can now look up builtin policy by name ([#9078](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9078)) + +BUG FIXES: + +* `azurerm_automation_module` - raising the full error from the Azure API during creation ([#8498](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8498)) +* `azurerm_api_management_api` - fixing a regression introduced in v2.16 where the value for `subscription_required` was defaulted to `false` instead of `true` ([#7963](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7963)) +* `azurerm_app_service` - fixing a crash when provisioning an app service inside an App Service Environment which doesn't exist ([#8993](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8993)) +* `azurerm_cdn_endpoint` - disable persisting default value for `is_compression_enabled` to state file ([#8610](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8610)) +* `azurerm_databricks_workspace` correctly validate the `name` property ([#8997](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8997)) +* `azurerm_dev_test_policy` - now correctly deletes ([#9077](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9077)) +* `azurerm_log_analytics_workspace` - support for the `daily_quota_gb` property ([#8861](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8861)) +* `azurerm_local_network_gateway` - support for the `gateway_fqdn` property ([#8998](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8998)) +* `azurerm_key_vault` - prevent unwanted diff due to inconsistent casing for the `sku_name` property ([#8983](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8983)) +* `azurerm_kubernetes_cluster` - fix issue where `min_count` and `max_count` couldn't be equal ([#8957](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8957)) +* `azurerm_kubernetes_cluster` - `min_count` can be updated when `enable_auto_scaling` is set to true ([#8619](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8619)) +* `azurerm_private_dns_zone_virtual_network_link` - fixes case issue in `name` ([#8617](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8617)) +* `azurerm_private_endpoint` - fix crash when deleting private endpoint ([#9068](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9068)) +* `azurerm_signalr_service` - switching the`features` block to a set so order is irrelevant ([#8815](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8815)) +* `azurerm_virtual_desktop_application_group` - correctly validate the `name`property ([#9030](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9030)) + +## 2.33.0 (October 22, 2020) + +UPGRADE NOTES + +* This release includes a workaround for [a breaking change in Azure’s API related to the Extended Auditing Policy](https://github.com/Azure/azure-rest-api-specs/issues/11271) of the SQL and MSSQL resources. The Service Team have confirmed that this Regression will first roll out to all regions before the bug fix is deployed - as such this workaround will be removed in a future release once the fix for the Azure API has been rolled out to all regions. + +FEATURES: + +* **New Resource:** `azurerm_service_fabric_mesh_secret` ([#8933](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8933)) +* **New Resource:** `azurerm_service_fabric_mesh_secret_value` ([#8933](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8933)) + +IMPROVEMENTS: + +* Data Source: `azurerm_shared_image_version` - exposing `os_disk_image_size_gb` ([#8904](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8904)) +* `azurerm_app_configuration` - support for the `identity` block ([#8875](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8875)) +* `azurerm_cosmosdb_sql_container` - support for composite indexes ([#8792](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8792)) +* `azurerm_mssql_database` - do not set longterm and shortterm retention policies when using the `DW` SKUs ([#8899](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8899)) +* `azurerm_mysql_firewall_rule` - validating the `start_ip_address` and `end_ip_address` fields are IP Addresses ([#8948](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8948)) +* `azurerm_redis_firewall_rule` - validating the `start_ip` and `end_ip` fields are IP Addresses ([#8948](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8948)) +* `azurerm_search_service` - support for the `identity` block ([#8907](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8907)) +* `azurerm_sql_firewall_rule` - adding validation for the `start_ip_address` and `end_ip_address` fields ([#8935](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8935)) + +BUG FIXES: + +* `azurerm_application_gateway` - now supports `ignore_changes` for `ssl_certificate` when using pre-existing certificates ([#8761](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8761)) +* `azurerm_mssql_database` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) +* `azurerm_mssql_database_extended_auditing_policy` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) +* `azurerm_mssql_server` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) +* `azurerm_mssql_server_extended_auditing_policy` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) +* `azurerm_sql_database` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) +* `azurerm_sql_server` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) +* `azurerm_policy_set_definition` - Fix updates for `parameters` and `parameter_values` in `policy_definition_reference` blocks ([#8882](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8882)) + +## 2.32.0 (October 15, 2020) + +FEATURES: + +* **New data source:** `azurerm_mysql_server` ([#8787](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8787)) +* **New resource:** `azurerm_security_center_setting` ([#8783](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8783)) +* **New Resource:** `azurerm_service_fabric_mesh_local_network` ([#8838](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8838)) +* **New resource:** `azurerm_eventgrid_system_topic` ([#8735](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8735)) + +IMPROVEMENTS: + +* `azurerm_container_registry` - support for the `trust_policy` and `retention_policy` blocks ([#8698](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8698)) +* `azurerm_security_center_contact` - override SDK creat function to handle `201` response code ([#8774](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8774)) + +## 2.31.1 (October 08, 2020) + +IMPROVEMENTS: + +* `azurerm_cognitive_account` - `kind` now supports `Personalizer` ([#8860](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8860)) +* `azurerm_search_service` - `sku` now supports `storage_optimized_l1` and `storage_optimized_l2` ([#8859](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8859)) +* `azurerm_storage_share` - set `metadata` to `Computed` and set `acl` `start` and `expiry` to `Optional` ([#8811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8811)) + +BUG FIXES: + +* `azurerm_dedicated_hardware_security_module` - `stamp_id` now optional to allow use in Locations which use `zones` ([#8826](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8826)) +* `azurerm_storage_account`-`large_file_share_enabled` marked as computed to prevent existing storage shares from attempting to disable the default ([#8807](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8807)) + +## 2.31.0 (October 08, 2020) + +UPGRADE NOTES + +* This release updates the `azurerm_security_center_subscription_pricing` resource to use the latest version of the Security API which now allows configuring multiple Resource Types - as such a new field `resource_type` is now available. Configurations default the `resource_type` to `VirtualMachines` which matches the behaviour of the previous release - but your Terraform Configuration may need updating. + +FEATURES: + +* **New Resource:** `azurerm_service_fabric_mesh_application` ([#6761](https://github.com/terraform-providers/terraform-provider-azurerm/issues/6761)) +* **New Resource:** `azurerm_virtual_desktop_application_group` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) +* **New Resource:** `azurerm_virtual_desktop_workspace_application_group_association` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) +* **New Resource:** `azurerm_virtual_desktop_host_pool` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) +* **New Resource:** `azurerm_virtual_desktop_workspace` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) + +IMPROVEMENTS: + +* dependencies: updating `github.com/Azure/azure-sdk-for-go` to `v46.4.0` ([#8642](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8642)) +* `data.azurerm_application_insights` - support for the `connection_string` property ([#8699](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8699)) +* `azurerm_app_service` - support for IPV6 addresses in the `ip_restriction` property ([#8599](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8599)) +* `azurerm_application_insights` - support for the `connection_string` property ([#8699](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8699)) +* `azurerm_backup_policy_vm` - validate daily backups is > `7` ([#7898](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7898)) +* `azurerm_dedicated_host` - add support for the `DSv4-Type1` and `sku_name` properties ([#8718](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8718)) +* `azurerm_iothub` - Support for the `public_network_access_enabled` property ([#8586](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8586)) +* `azurerm_key_vault_certificate_issuer` - the `org_id` property is now optional ([#8687](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8687)) +* `azurerm_kubernetes_cluster_node_pool` - the `max_node`, `min_node`, and `node_count` properties can now be set to `0` ([#8300](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8300)) +* `azurerm_mssql_database` - the `min_capacity` property can now be set to `0` ([#8308](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8308)) +* `azurerm_mssql_database` - support for `long_term_retention_policy` and `short_term_retention_policy` blocks [[#8765](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8765)] +* `azurerm_mssql_server` - support the `minimum_tls_version` property ([#8361](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8361)) +* `azurerm_mssql_virtual_machine` - support for `storage_configuration_settings` ([#8623](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8623)) +* `azurerm_security_center_subscription_pricing` - now supports per `resource_type` pricing ([#8549](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8549)) +* `azurerm_storage_account` - support for the `large_file_share_enabled` property ([#8789](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8789)) +* `azurerm_storage_share` - support for large quotas (up to `102400` GB) ([#8666](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8666)) + +BUG FIXES: + +* `azurerm_function_app` - mark the `app_settings` block as computed ([#8682](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8682)) +* `azurerm_function_app_slot` - mark the `app_settings` block as computed ([#8682](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8682)) +* `azurerm_policy_set_definition` - corrects issue with empty `parameter_values` attribute ([#8668](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8668)) +* `azurerm_policy_definition` - `mode` property now enforces correct case ([#8795](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8795)) + ## 2.30.0 (October 01, 2020) UPGRADE NOTES diff --git a/CHANGELOG.md b/CHANGELOG.md index c3f0b9187a0d..a5286b24b1dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,138 +1,94 @@ -## 2.34.0 (Unreleased) - -UPGRADE NOTES - -* `azurerm_api_management_api` - fixing a regression introduced in v2.16 where this value for `subscription_required` was defaulted to `false` instead of `true` [GH-7963] - -FEATURES: - -* **New Resource:** `azurerm_log_analytics_data_export_rule` [GH-8995] -* **New Resource:** `azurerm_log_analytics_linked_storage_account` [GH-9002] -* **New Resource:** `azurerm_security_center_auto_provisioning` [GH-8595] -* **New Resource:** `azurerm_synapse_role_assignment` [GH-8863] -* **New Resource:** `azurerm_vpn_site` [GH-8896] - -IMPROVEMENTS: - -* `azurerm_container_group` - add support for `volume.x.git_repo` [GH-7924] -* `azurerm_iothub` - support for the `endpoint.resource_group` property [GH-8032] -* `azurerm_key_vault` - support for the `contact` block [GH-8937] - -BUG FIXES: - -* `azurerm_automation_module` - correctly show creation error messages [GH-8498] -* `azurerm_api_management_api` - fixing a regression introduced in v2.16 where the value for `subscription_required` was defaulted to `false` instead of `true` [GH-7963] -* `azurerm_app_service` - fixing a crash when provisioning an app service inside an App Service Environment which doesn't exist [GH-8993] -* `azurerm_cdn_endpoint` - disable persisting default value for `is_compression_enabled` to state file [GH-8610] -* `azurerm_databricks_workspace` correctly validate the `name` property [GH-8997] -* `azurerm_log_analytics_workspace` - support for the `daily_quota_gb` property [GH-8861] -* `azurerm_key_vault` - prevent unwanted diff due to inconsistent casing for the `sku_name` property [GH-8983] -* `azurerm_kubernetes_cluster` - Fix issue where `min_count` and `max_count` couldn't be the equal [GH-8957] -* `azurerm_kubernetes_cluster` - `min_count` can be updated when `enable_auto_scaling` is set to true [GH-8619] -* `azurerm_signalr_service` - switching the`features` block to a set so order is irrelevant [GH-8815] - -## 2.33.0 (October 22, 2020) - -UPGRADE NOTES - -* This release includes a workaround for [a breaking change in Azure’s API related to the Extended Auditing Policy](https://github.com/Azure/azure-rest-api-specs/issues/11271) of the SQL and MSSQL resources. The Service Team have confirmed that this Regression will first roll out to all regions before the bug fix is deployed - as such this workaround will be removed in a future release once the fix for the Azure API has been rolled out to all regions. - -FEATURES: - -* **New Resource:** `azurerm_service_fabric_mesh_secret` ([#8933](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8933)) -* **New Resource:** `azurerm_service_fabric_mesh_secret_value` ([#8933](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8933)) - -IMPROVEMENTS: - -* Data Source: `azurerm_shared_image_version` - exposing `os_disk_image_size_gb` ([#8904](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8904)) -* `azurerm_app_configuration` - support for the `identity` block ([#8875](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8875)) -* `azurerm_cosmosdb_sql_container` - support for composite indexes ([#8792](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8792)) -* `azurerm_mssql_database` - do not set longterm and shortterm retention policies when using the `DW` SKUs ([#8899](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8899)) -* `azurerm_mysql_firewall_rule` - validating the `start_ip_address` and `end_ip_address` fields are IP Addresses ([#8948](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8948)) -* `azurerm_redis_firewall_rule` - validating the `start_ip` and `end_ip` fields are IP Addresses ([#8948](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8948)) -* `azurerm_search_service` - support for the `identity` block ([#8907](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8907)) -* `azurerm_sql_firewall_rule` - adding validation for the `start_ip_address` and `end_ip_address` fields ([#8935](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8935)) - -BUG FIXES: - -* `azurerm_application_gateway` - now supports `ignore_changes` for `ssl_certificate` when using pre-existing certificates ([#8761](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8761)) -* `azurerm_mssql_database` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) -* `azurerm_mssql_database_extended_auditing_policy` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) -* `azurerm_mssql_server` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) -* `azurerm_mssql_server_extended_auditing_policy` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) -* `azurerm_sql_database` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) -* `azurerm_sql_server` - working around a breaking change/regression in the Azure API ([#8975](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8975)) -* `azurerm_policy_set_definition` - Fix updates for `parameters` and `parameter_values` in `policy_definition_reference` blocks ([#8882](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8882)) - -## 2.32.0 (October 15, 2020) +## 2.42.0 (Unreleased) FEATURES: -* **New data source:** `azurerm_mysql_server` ([#8787](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8787)) -* **New resource:** `azurerm_security_center_setting` ([#8783](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8783)) -* **New Resource:** `azurerm_service_fabric_mesh_local_network` ([#8838](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8838)) -* **New resource:** `azurerm_eventgrid_system_topic` ([#8735](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8735)) - -IMPROVEMENTS: - -* `azurerm_container_registry` - support for the `trust_policy` and `retention_policy` blocks ([#8698](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8698)) -* `azurerm_security_center_contact` - override SDK creat function to handle `201` response code ([#8774](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8774)) - -## 2.31.1 (October 08, 2020) +* **New data source:** `azurerm_eventgrid_domain_topic` [GH-10050] +* **New Resource:** `azurerm_data_factory_linked_service_synapse` [GH-9928] +* **New Resource:** `azurerm_disk_access` [GH-9889] +* **New Resource:** `azurerm_media_streaming_locator` [GH-9992] +* **New Resource:** `azurerm_sentinel_alert_rule_fusion` [GH-9829] IMPROVEMENTS: -* `azurerm_cognitive_account` - `kind` now supports `Personalizer` ([#8860](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8860)) -* `azurerm_search_service` - `sku` now supports `storage_optimized_l1` and `storage_optimized_l2` ([#8859](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8859)) -* `azurerm_storage_share` - set `metadata` to `Computed` and set `acl` `start` and `expiry` to `Optional` ([#8811](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8811)) +* dependencies: upgrading to `v0.15.1` of `github.com/tombuildsstuff/giovanni` [GH-10035] +* Data Source: `azurerm_hdinsight_cluster` - support for the `kafka_rest_proxy_endpoint` property [GH-8064] +* Data Source: `azurerm_databricks_workspace` - support for the `tags` property [GH-9933] +* Data Source: `azurerm_subscription` - support for the `tags` property [GH-8064] +* `azurerm_batch_pool` support for the `public_address_provisioning_type` property [GH-10036] +* `azurerm_api_management` - support `Consumption_0` for the `sku_name` property [GH-6868] +* `azurerm_cdn_endpoint` - only send `content_types_to_compress` and `geo_filter` to the API when actually set [GH-9902] +* `azurerm_cosmosdb_mongo_collection` - correctly read back the `_id` index when mongo 3.6 [GH-8690] +* `azurerm_container_group` - support for the `volume.empty_dir` property [GH-9836] +* `azurerm_data_factory_linked_service_azure_file_storage` - support for the `file_share` property [GH-9934] +* `azurerm_dedicated_host` - support for addtional `sku_name` values [GH-9951] +* `azurerm_devspace_controller` - deprecating since new DevSpace Controllers can no longer be provisioned, this will be removed in version 3.0 of the Azure Provider [GH-10049] +* `azurerm_function_app` - make `pre_warmed_instance_count` computed to use azure's default [GH-9069] +* `azurerm_hdinsight_hadoop_cluster` - allow the value `Standard_D4a_V4` for the `vm_type` property [GH-10000] +* `azurerm_hdinsight_kafka_cluster` - support for the `rest_proxy` and `kafka_management_node` blocks [GH-8064] +* `azurerm_log_analytics_linked_service` - add validation for resource ID type [GH-9932] +* `azurerm_log_analytics_linked_service` - update validation to use generated validate functions [GH-9950] +* `azurerm_monitor_diagnostic_setting` - validation that `eventhub_authorization_rule_id` is a EventHub Namespace Authorization Rule ID [GH-9914] +* `azurerm_monitor_diagnostic_setting` - validation that `log_analytics_workspace_id` is a Log Analytics Workspace ID [GH-9914] +* `azurerm_monitor_diagnostic_setting` - validation that `storage_account_id` is a Storage Account ID [GH-9914] +* `azurerm_network_security_rule` - increase allowed the number of `application_security_group` blocks allowed [GH-9884] +* `azurerm_sentinel_alert_rule_ms_security_incident` - support the `alert_rule_template_guid` and `display_name_exclude_filter` properties [GH-9797] +* `azurerm_sentinel_alert_rule_scheduled` - support for the `alert_rule_template_guid` property [GH-9712] +* `azurerm_sentinel_alert_rule_scheduled` - support for creating incidents [GH-8564] +* `azurerm_synapse_workspace` - support for the `managed_resource_group_name` property [GH-10017] +* `azurerm_traffic_manager_profile` - support for the `traffic_view_enabled` property [GH-10005] BUG FIXES: -* `azurerm_dedicated_hardware_security_module` - `stamp_id` now optional to allow use in Locations which use `zones` ([#8826](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8826)) -* `azurerm_storage_account`-`large_file_share_enabled` marked as computed to prevent existing storage shares from attempting to disable the default ([#8807](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8807)) +* `azurerm_application_gateway` - allow `750` for `file_upload_limit_mb` when the sku is `WAF_v2` [GH-8753] +* `azurerm_firewall_policy_rule_collection_group` - correctly validate the `network_rule_collection.destination_ports` property [GH-9490] +* `azurerm_cdn_endpoint` - changing many `delivery_rule` condition `match_values` to optional [GH-8850] +* `azurerm_cosmosdb_account` - always include `key_vault_id` in update requests for azure policy enginer compatibility [GH-9966] +* `azurerm_cosmosdb_table` - do not call the throughput api when serverless [GH-9749] +* `azurerm_kubernetes_cluster` - parse oms `log_analytics_workspace_id` to ensure correct casing [GH-9976] +* `azurerm_role_assignment` fix crash in retry logic [GH-10051] +* `azurerm_storage_account` - allow hns when `account_tier` is `Premium` [GH-9548] +* `azurerm_storage_share_file` - allowing files smaller than 4KB to be uploaded [GH-10035] -## 2.31.0 (October 08, 2020) +## 2.41.0 (December 17, 2020) -UPGRADE NOTES +UPGRADE NOTES: -* This release updates the `azurerm_security_center_subscription_pricing` resource to use the latest version of the Security API which now allows configuring multiple Resource Types - as such a new field `resource_type` is now available. Configurations default the `resource_type` to `VirtualMachines` which matches the behaviour of the previous release - but your Terraform Configuration may need updating. +* `azurerm_key_vault` - Azure will be introducing a breaking change on December 31st, 2020 by force-enabling Soft Delete on all new and existing Key Vaults. To workaround this, this release of the Azure Provider still allows you to configure Soft Delete on before this date (but once this is enabled this cannot be disabled). Since new Key Vaults will automatically be provisioned using Soft Delete in the future, and existing Key Vaults will be upgraded - a future release will deprecate the `soft_delete_enabled` field and default this to true early in 2021. ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_certificate` - Terraform will now attempt to `purge` Certificates during deletion due to the upcoming breaking change in the Azure API where Key Vaults will have soft-delete force-enabled. This can be disabled by setting the `purge_soft_delete_on_destroy` field within the `features -> keyvault` block to `false`. ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_key` - Terraform will now attempt to `purge` Keys during deletion due to the upcoming breaking change in the Azure API where Key Vaults will have soft-delete force-enabled. This can be disabled by setting the `purge_soft_delete_on_destroy` field within the `features -> keyvault` block to `false`. ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_secret` - Terraform will now attempt to `purge` Secrets during deletion due to the upcoming breaking change in the Azure API where Key Vaults will have soft-delete force-enabled. This can be disabled by setting the `purge_soft_delete_on_destroy` field within the `features -> keyvault` block to `false`. ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) FEATURES: -* **New Resource:** `azurerm_service_fabric_mesh_application` ([#6761](https://github.com/terraform-providers/terraform-provider-azurerm/issues/6761)) -* **New Resource:** `azurerm_virtual_desktop_application_group` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) -* **New Resource:** `azurerm_virtual_desktop_workspace_application_group_association` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) -* **New Resource:** `azurerm_virtual_desktop_host_pool` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) -* **New Resource:** `azurerm_virtual_desktop_workspace` ([#8605](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8605)) +* **New Resource:** `azurerm_eventgrid_system_topic_event_subscription` ([#9852](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9852)) +* **New Resource:** `azurerm_media_job` ([#9859](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9859)) +* **New Resource:** `azurerm_media_streaming_endpoint` ([#9537](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9537)) +* **New Resource:** `azurerm_subnet_service_endpoint_storage_policy` ([#8966](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8966)) +* **New Resource:** `azurerm_synapse_managed_private_endpoint` ([#9260](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9260)) IMPROVEMENTS: -* dependencies: updating `github.com/Azure/azure-sdk-for-go` to `v46.4.0` ([#8642](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8642)) -* `data.azurerm_application_insights` - support for the `connection_string` property ([#8699](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8699)) -* `azurerm_app_service` - support for IPV6 addresses in the `ip_restriction` property ([#8599](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8599)) -* `azurerm_application_insights` - support for the `connection_string` property ([#8699](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8699)) -* `azurerm_backup_policy_vm` - validate daily backups is > `7` ([#7898](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7898)) -* `azurerm_dedicated_host` - add support for the `DSv4-Type1` and `sku_name` properties ([#8718](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8718)) -* `azurerm_iothub` - Support for the `public_network_access_enabled` property ([#8586](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8586)) -* `azurerm_key_vault_certificate_issuer` - the `org_id` property is now optional ([#8687](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8687)) -* `azurerm_kubernetes_cluster_node_pool` - the `max_node`, `min_node`, and `node_count` properties can now be set to `0` ([#8300](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8300)) -* `azurerm_mssql_database` - the `min_capacity` property can now be set to `0` ([#8308](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8308)) -* `azurerm_mssql_database` - support for `long_term_retention_policy` and `short_term_retention_policy` blocks [[#8765](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8765)] -* `azurerm_mssql_server` - support the `minimum_tls_version` property ([#8361](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8361)) -* `azurerm_mssql_virtual_machine` - support for `storage_configuration_settings` ([#8623](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8623)) -* `azurerm_security_center_subscription_pricing` - now supports per `resource_type` pricing ([#8549](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8549)) -* `azurerm_storage_account` - support for the `large_file_share_enabled` property ([#8789](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8789)) -* `azurerm_storage_share` - support for large quotas (up to `102400` GB) ([#8666](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8666)) +* `azurerm_app_service` - Add support for `outbound_ip_address_list` and `possible_outbound_ip_address_list` ([#9871](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9871)) +* `azurerm_disk_encryption_set` - support for updating `key_vault_key_id` ([#7913](https://github.com/terraform-providers/terraform-provider-azurerm/issues/7913)) +* `azurerm_iot_time_series_insights_gen2_environment` - exposing `data_access_fqdn` ([#9848](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9848)) +* `azurerm_key_vault_certificate` - performing a "purge" of the Certificate during deletion if the feature is opted-in within the `features` block, see the "Upgrade Notes" for more information ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_key` - performing a "purge" of the Key during deletion if the feature is opted-in within the `features` block, see the "Upgrade Notes" for more information ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_secret` - performing a "purge" of the Secret during deletion if the feature is opted-in within the `features` block, see the "Upgrade Notes" for more information ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_log_analytics_linked_service` - Add new fields `workspace_id`, `read_access_id`, and `write_access_id` ([#9410](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9410)) +* `azurerm_linux_virtual_machine` - Normalise SSH keys to cover VM import cases ([#9897](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9897)) +* `azurerm_subnet` - support for the `service_endpoint_policy` block ([#8966](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8966)) +* `azurerm_traffic_manager_profile` - support for new field `max_return` and support for `traffic_routing_method` to be `MultiValue` ([#9487](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9487)) BUG FIXES: -* `azurerm_function_app` - mark the `app_settings` block as computed ([#8682](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8682)) -* `azurerm_function_app_slot` - mark the `app_settings` block as computed ([#8682](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8682)) -* `azurerm_policy_set_definition` - corrects issue with empty `parameter_values` attribute ([#8668](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8668)) -* `azurerm_policy_definition` - `mode` property now enforces correct case ([#8795](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8795)) +* `azurerm_key_vault_certificate` - reading `dns_names` and `emails` within the `subject_alternative_names` block from the Certificate if not returned from the API ([#8631](https://github.com/terraform-providers/terraform-provider-azurerm/issues/8631)) +* `azurerm_key_vault_certificate` - polling until the Certificate is fully deleted during deletion ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_key` - polling until the Key is fully deleted during deletion ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_key_vault_secret` - polling until the Secret is fully deleted during deletion ([#9911](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9911)) +* `azurerm_log_analytics_workspace` - adding a state migration to correctly update the Resource ID ([#9853](https://github.com/terraform-providers/terraform-provider-azurerm/issues/9853)) --- -For information on changes between the v2.30.0 and v2.0.0 releases, please see [the previous v2.x changelog entries](https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/CHANGELOG-v2.md). +For information on changes between the v2.40.0 and v2.0.0 releases, please see [the previous v2.x changelog entries](https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/CHANGELOG-v2.md). -For information on changes in version v1.44.0 and prior releases, please see [the v1.44.0 changelog](https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/CHANGELOG-v1.md). +For information on changes in version v1.44.0 and prior releases, please see [the v1.x changelog](https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/CHANGELOG-v1.md). diff --git a/GNUmakefile b/GNUmakefile index 0edd6e9646a3..b36b1bfb409a 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -17,7 +17,9 @@ tools: GO111MODULE=off go get -u github.com/bflad/tfproviderlint/cmd/tfproviderlint GO111MODULE=off go get -u github.com/bflad/tfproviderdocs GO111MODULE=off go get -u github.com/katbyte/terrafmt - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $$(go env GOPATH || $$GOPATH)/bin v1.24.0 + GO111MODULE=off go get -u golang.org/x/tools/cmd/goimports + GO111MODULE=off go get -u mvdan.cc/gofumpt + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $$(go env GOPATH || $$GOPATH)/bin v1.32.0 build: fmtcheck generate go install @@ -31,6 +33,11 @@ fmt: # This logic should match the search logic in scripts/gofmtcheck.sh find . -name '*.go' | grep -v vendor | xargs gofmt -s -w +fumpt: + @echo "==> Fixing source code with gofmt..." + # This logic should match the search logic in scripts/gofmtcheck.sh + find . -name '*.go' | grep -v vendor | xargs gofumpt -s -w + # Currently required by tf-deploy compile, duplicated by linters fmtcheck: @sh "$(CURDIR)/scripts/gofmtcheck.sh" @@ -44,6 +51,7 @@ terrafmt: @find . | egrep html.markdown | sort | while read f; do terrafmt fmt $$f; done generate: + go generate ./azurerm/internal/services/... go generate ./azurerm/internal/provider/ goimports: @@ -53,15 +61,6 @@ goimports: lint: ./scripts/run-lint.sh -# we have split off static check because it causes travis to fail with an OOM error -lintunused: - @echo "==> Checking source code against static check linters..." - (while true; do sleep 300; echo "(I'm still alive and linting!)"; done) & PID=$$!; echo $$PID; \ - golangci-lint run ./... -v --no-config --concurrency 1 --deadline=30m10s --disable-all --enable=unused; ES=$$?; kill -9 $$PID; exit $$ES - -lintrest: - ./scripts/run-lint-rest.sh - depscheck: @echo "==> Checking source code with go mod tidy..." @go mod tidy @@ -72,6 +71,13 @@ depscheck: @git diff --compact-summary --exit-code -- vendor || \ (echo; echo "Unexpected difference in vendor/ directory. Run 'go mod vendor' command or revert any go.mod/go.sum/vendor changes and commit."; exit 1) +gencheck: + @echo "==> Generating..." + @make generate + @echo "==> Comparing generated code to committed code..." + @git diff --compact-summary --exit-code -- ./ || \ + (echo; echo "Unexpected difference in generated code. Run 'go generate' to update the generated code and commit."; exit 1) + tflint: ./scripts/run-tflint.sh @@ -128,4 +134,9 @@ ifeq (,$(wildcard $(GOPATH)/src/$(WEBSITE_REPO))) endif @$(MAKE) -C $$(go env GOPATH || $$GOPATH)/src/$(WEBSITE_REPO) website-provider-test PROVIDER_PATH=$(shell pwd) PROVIDER_NAME=$(PKG_NAME) +teamcity-test: + @$(MAKE) -C .teamcity tools + @$(MAKE) -C .teamcity test + + .PHONY: build build-docker test test-docker testacc vet fmt fmtcheck errcheck scaffold-website test-compile website website-test diff --git a/README.md b/README.md index 2da20ac25646..06244604b75f 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Terraform Provider for Azure (Resource Manager) -Version 2.0 of the AzureRM Provider requires Terraform 0.12.x and later. +Version 2.x of the AzureRM Provider requires Terraform 0.12.x and later. * [Terraform Website](https://www.terraform.io) * [AzureRM Provider Documentation](https://www.terraform.io/docs/providers/azurerm/index.html) @@ -14,13 +14,13 @@ Version 2.0 of the AzureRM Provider requires Terraform 0.12.x and later. provider "azurerm" { # We recommend pinning to the specific version of the Azure Provider you're using # since new versions are released frequently - version = "=2.20.0" + version = "=2.40.0" features {} # More information on the authentication methods supported by # the AzureRM Provider can be found here: - # http://terraform.io/docs/providers/azurerm/index.html + # https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs # subscription_id = "..." # client_id = "..." @@ -37,8 +37,8 @@ resource "azurerm_resource_group" "example" { # Create a virtual network in the production-resources resource group resource "azurerm_virtual_network" "test" { name = "production-network" - resource_group_name = "${azurerm_resource_group.example.name}" - location = "${azurerm_resource_group.example.location}" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location address_space = ["10.0.0.0/16"] } ``` @@ -48,9 +48,9 @@ Further [usage documentation is available on the Terraform website](https://www. ## Developer Requirements * [Terraform](https://www.terraform.io/downloads.html) version 0.12.x + -* [Go](https://golang.org/doc/install) version 1.14.x (to build the provider plugin) +* [Go](https://golang.org/doc/install) version 1.15.x (to build the provider plugin) -### On Windows +### On Windows If you're on Windows you'll also need: * [Git Bash for Windows](https://git-scm.com/download/win) @@ -70,7 +70,7 @@ You must run `Developing the Provider` commands in `bash` because `sh` scrips a ## Developing the Provider -If you wish to work on the provider, you'll first need [Go](http://www.golang.org) installed on your machine (version 1.13+ is **required**). You'll also need to correctly setup a [GOPATH](http://golang.org/doc/code.html#GOPATH), as well as adding `$GOPATH/bin` to your `$PATH`. +If you wish to work on the provider, you'll first need [Go](http://www.golang.org) installed on your machine (version 1.15+ is **required**). You'll also need to correctly setup a [GOPATH](http://golang.org/doc/code.html#GOPATH), as well as adding `$GOPATH/bin` to your `$PATH`. First clone the repository to: `$GOPATH/src/github.com/terraform-providers/terraform-provider-azurerm` @@ -125,6 +125,24 @@ The following Environment Variables must be set in your shell prior to running a --- +## Developer: Generating Resource ID Formatters, Parsers and Validators + +You can generate a Resource ID Formatter, Parser and Validator by adding the following line to a `resourceids.go` within each Service Package (for example `./azurerm/internal/services/someservice/resourceids.go`): + +```go +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Server -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/Server1 +``` + +Where `name` is the name of the Resource ID Type - and `id` is an example Resource ID with placeholder data. + +When `make generate` is run, this will then generate the following for this Resource ID: + +* Resource ID Struct, containing the fields and a Formatter to convert this into a string - and the associated Unit Tests. +* Resource ID Parser (`./parse/{name}.go`) - to be able to parse a Resource ID into said struct - and the associated Unit Tests. +* Resource ID Validator (`./validate/{name}_id.go`) - to validate the Resource ID is what's expected (and not for a different resource) - and the associated Unit Tests. + +--- + ## Developer: Scaffolding the Website Documentation You can scaffold the documentation for a Data Source by running: diff --git a/azurerm/helpers/azure/api_management.go b/azurerm/helpers/azure/api_management.go index ce28eb212c40..61bacd508834 100644 --- a/azurerm/helpers/azure/api_management.go +++ b/azurerm/helpers/azure/api_management.go @@ -309,3 +309,20 @@ func FlattenApiManagementOperationParameterContract(input *[]apimanagement.Param return outputs } + +// CopyCertificateAndPassword copies any certificate and password attributes +// from the old config to the current to avoid state diffs. +// Iterate through old state to find sensitive props not returned by API. +// This must be done in order to avoid state diffs. +// NOTE: this information won't be available during times like Import, so this is a best-effort. +func CopyCertificateAndPassword(vals []interface{}, hostName string, output map[string]interface{}) { + for _, val := range vals { + oldConfig := val.(map[string]interface{}) + + if oldConfig["host_name"] == hostName { + output["certificate_password"] = oldConfig["certificate_password"] + output["certificate"] = oldConfig["certificate"] + break + } + } +} diff --git a/azurerm/helpers/azure/deprecated.go b/azurerm/helpers/azure/deprecated.go new file mode 100644 index 000000000000..547ca44738f7 --- /dev/null +++ b/azurerm/helpers/azure/deprecated.go @@ -0,0 +1,22 @@ +package azure + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +// shared schema +func MergeSchema(a map[string]*schema.Schema, b map[string]*schema.Schema) map[string]*schema.Schema { + // TODO: Deprecate and remove this + + s := map[string]*schema.Schema{} + + for k, v := range a { + s[k] = v + } + + for k, v := range b { + s[k] = v + } + + return s +} diff --git a/azurerm/helpers/azure/eventhub_test.go b/azurerm/helpers/azure/eventhub_test.go index 11cbf19d7f4e..8a3dadfb109c 100644 --- a/azurerm/helpers/azure/eventhub_test.go +++ b/azurerm/helpers/azure/eventhub_test.go @@ -52,7 +52,7 @@ func TestValidateEventHubName(t *testing.T) { valid: false, }, } - var validationFunction = ValidateEventHubName() + validationFunction := ValidateEventHubName() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { _, err := validationFunction(tt.input, "") diff --git a/azurerm/helpers/azure/key_vault_child.go b/azurerm/helpers/azure/key_vault_child.go index d352eda26aae..59911399c18c 100644 --- a/azurerm/helpers/azure/key_vault_child.go +++ b/azurerm/helpers/azure/key_vault_child.go @@ -15,6 +15,20 @@ type KeyVaultChildID struct { Version string } +func NewKeyVaultChildResourceID(keyVaultBaseUrl, childType, name, version string) (string, error) { + fmtString := "%s/%s/%s/%s" + keyVaultUrl, err := url.Parse(keyVaultBaseUrl) + if err != nil || keyVaultBaseUrl == "" { + return "", fmt.Errorf("failed to parse Key Vault Base URL %q: %+v", keyVaultBaseUrl, err) + } + // (@jackofallops) - Log Analytics service adds the port number to the API returns, so we strip it here + if hostParts := strings.Split(keyVaultUrl.Host, ":"); len(hostParts) > 1 { + keyVaultUrl.Host = hostParts[0] + } + + return fmt.Sprintf(fmtString, keyVaultUrl.String(), childType, name, version), nil +} + func ParseKeyVaultChildID(id string) (*KeyVaultChildID, error) { // example: https://tharvey-keyvault.vault.azure.net/type/bird/fdf067c93bbb4b22bff4d8b7a9a56217 idURL, err := url.ParseRequestURI(id) diff --git a/azurerm/helpers/azure/key_vault_child_test.go b/azurerm/helpers/azure/key_vault_child_test.go index 2e5951cc7659..0fdbce778ad0 100644 --- a/azurerm/helpers/azure/key_vault_child_test.go +++ b/azurerm/helpers/azure/key_vault_child_test.go @@ -1,6 +1,8 @@ package azure -import "testing" +import ( + "testing" +) func TestAccAzureRMValidateKeyVaultChildID(t *testing.T) { cases := []struct { @@ -320,3 +322,46 @@ func TestAccAzureRMKeyVaultChild_validateName(t *testing.T) { } } } + +func TestNewKeyVaultChildResourceID(t *testing.T) { + childType := "keys" + childName := "test" + childVersion := "testVersionString" + cases := []struct { + Scenario string + keyVaultBaseUrl string + Expected string + ExpectError bool + }{ + { + Scenario: "empty values", + keyVaultBaseUrl: "", + Expected: "", + ExpectError: true, + }, + { + Scenario: "valid, no port", + keyVaultBaseUrl: "https://test.vault.azure.net", + Expected: "https://test.vault.azure.net/keys/test/testVersionString", + ExpectError: false, + }, + { + Scenario: "valid, with port", + keyVaultBaseUrl: "https://test.vault.azure.net:443", + Expected: "https://test.vault.azure.net/keys/test/testVersionString", + ExpectError: false, + }, + } + for _, tc := range cases { + id, err := NewKeyVaultChildResourceID(tc.keyVaultBaseUrl, childType, childName, childVersion) + if err != nil { + if !tc.ExpectError { + t.Fatalf("Got error for New Resource ID '%s': %+v", tc.keyVaultBaseUrl, err) + return + } + } + if id != tc.Expected { + t.Fatalf("Expected id for %q to be %q, got %q", tc.keyVaultBaseUrl, tc.Expected, id) + } + } +} diff --git a/azurerm/helpers/azure/servicebus.go b/azurerm/helpers/azure/servicebus.go deleted file mode 100644 index 4ffd552e2434..000000000000 --- a/azurerm/helpers/azure/servicebus.go +++ /dev/null @@ -1,158 +0,0 @@ -package azure - -import ( - "fmt" - "log" - "regexp" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - - "github.com/Azure/azure-sdk-for-go/services/servicebus/mgmt/2017-04-01/servicebus" -) - -// validation -func ValidateServiceBusQueueName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`^[a-zA-Z0-9][\w-./~]{0,258}([a-zA-Z0-9])?$`), - "The topic name can contain only letters, numbers, periods, hyphens, tildas, forward slashes and underscores. The namespace must start and end with a letter or number and be less then 260 characters long.", - ) -} - -func ValidateServiceBusSubscriptionName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile("^[a-zA-Z0-9][-._a-zA-Z0-9]{0,48}([a-zA-Z0-9])?$"), - "The name can contain only letters, numbers, periods, hyphens and underscores. The name must start and end with a letter or number and be less then 50 characters long.", - ) -} - -func ValidateServiceBusTopicName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile("^[a-zA-Z0-9]([-._~a-zA-Z0-9]{0,258}[a-zA-Z0-9])?$"), - "The topic name can contain only letters, numbers, periods, hyphens, tildas and underscores. The namespace must start with a letter or number, and it must end with a letter or number and be less then 260 characters long.", - ) -} - -func ValidateServiceBusAuthorizationRuleName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile("^[a-zA-Z0-9][-._a-zA-Z0-9]{0,48}([a-zA-Z0-9])?$"), - "The name can contain only letters, numbers, periods, hyphens and underscores. The name must start and end with a letter or number and be less the 50 characters long.", - ) -} - -func ExpandServiceBusAuthorizationRuleRights(d *schema.ResourceData) *[]servicebus.AccessRights { - rights := make([]servicebus.AccessRights, 0) - - if d.Get("listen").(bool) { - rights = append(rights, servicebus.Listen) - } - - if d.Get("send").(bool) { - rights = append(rights, servicebus.SendEnumValue) - } - - if d.Get("manage").(bool) { - rights = append(rights, servicebus.Manage) - } - - return &rights -} - -func FlattenServiceBusAuthorizationRuleRights(rights *[]servicebus.AccessRights) (listen, send, manage bool) { - // zero (initial) value for a bool in go is false - - if rights != nil { - for _, right := range *rights { - switch right { - case servicebus.Listen: - listen = true - case servicebus.SendEnumValue: - send = true - case servicebus.Manage: - manage = true - default: - log.Printf("[DEBUG] Unknown Authorization Rule Right '%s'", right) - } - } - } - - return listen, send, manage -} - -// shared schema -func MergeSchema(a map[string]*schema.Schema, b map[string]*schema.Schema) map[string]*schema.Schema { - s := map[string]*schema.Schema{} - - for k, v := range a { - s[k] = v - } - - for k, v := range b { - s[k] = v - } - - return s -} - -func ServiceBusAuthorizationRuleSchemaFrom(s map[string]*schema.Schema) map[string]*schema.Schema { - authSchema := map[string]*schema.Schema{ - "listen": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - - "send": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - - "manage": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - - "primary_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "primary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - } - return MergeSchema(s, authSchema) -} - -func ServiceBusAuthorizationRuleCustomizeDiff(d *schema.ResourceDiff, _ interface{}) error { - listen, hasListen := d.GetOk("listen") - send, hasSend := d.GetOk("send") - manage, hasManage := d.GetOk("manage") - - if !hasListen && !hasSend && !hasManage { - return fmt.Errorf("One of the `listen`, `send` or `manage` properties needs to be set") - } - - if manage.(bool) && !listen.(bool) && !send.(bool) { - return fmt.Errorf("if `manage` is set both `listen` and `send` must be set to true too") - } - - return nil -} diff --git a/azurerm/helpers/azure/servicebus_test.go b/azurerm/helpers/azure/servicebus_test.go deleted file mode 100644 index 266c7445dae7..000000000000 --- a/azurerm/helpers/azure/servicebus_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package azure - -import ( - "strings" - "testing" -) - -func TestValidateServiceBusTopicName(t *testing.T) { - tests := []struct { - name string - input string - valid bool - }{ - { - name: "Empty value", - input: "", - valid: false, - }, - { - name: "Invalid name with only 1 letter", - input: "a", - valid: true, - }, - { - name: "Invalid name starts with underscore", - input: "_a", - valid: false, - }, - { - name: "Invalid name ends with period", - input: "a.", - valid: false, - }, - { - name: "Valid name with numbers", - input: "12345", - valid: true, - }, - { - name: "Valid name with only 1 number", - input: "1", - valid: true, - }, - { - name: "Valid name with hyphens", - input: "malcolm-in-the-middle", - valid: true, - }, - { - name: "Valid name with 259 characters", - input: strings.Repeat("w", 259), - valid: true, - }, - { - name: "Valid name with 260 characters", - input: strings.Repeat("w", 260), - valid: true, - }, - { - name: "Invalid name with 261 characters", - input: strings.Repeat("w", 261), - valid: false, - }, - } - - var validationFunction = ValidateServiceBusTopicName() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := validationFunction(tt.input, "name") - valid := err == nil - if valid != tt.valid { - t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) - } - }) - } -} diff --git a/azurerm/helpers/azure/sku.go b/azurerm/helpers/azure/sku.go index 8550a624bd09..09c3fb1625f4 100644 --- a/azurerm/helpers/azure/sku.go +++ b/azurerm/helpers/azure/sku.go @@ -4,8 +4,6 @@ import ( "fmt" "strconv" "strings" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" ) func SplitSku(sku string) (string, int32, error) { @@ -16,43 +14,9 @@ func SplitSku(sku string) (string, int32, error) { } capacity, err := strconv.Atoi(skuParts[1]) - if err != nil { return "", -1, fmt.Errorf("%s in sku_name is not a valid value.", skuParts[1]) } return skuParts[0], int32(capacity), nil } - -// MinCapacitySkuNameInSlice returns a SchemaValidateFunc which tests if the provided value -// is of type string and matches the value of an element in the valid slice -// will test with in lower case if ignoreCase is true will also validate if the -// capacity if above passed minCapacity value -func MinCapacitySkuNameInSlice(valid []string, minCapacity int32, ignoreCase bool) schema.SchemaValidateFunc { - return func(i interface{}, k string) (s []string, es []error) { - v, ok := i.(string) - if !ok { - es = append(es, fmt.Errorf("expected type of %s to be string", k)) - return - } - - name, capacity, err := SplitSku(v) - - if err != nil { - es = append(es, err) - return - } - - for _, str := range valid { - if name == str || (ignoreCase && strings.EqualFold(name, str)) { - if capacity < minCapacity { - es = append(es, fmt.Errorf("expected %s capacity value to be greater that %d, got %d", k, minCapacity, capacity)) - } - return - } - } - - es = append(es, fmt.Errorf("expected %s to be one of %v, got %s", k, valid, name)) - return - } -} diff --git a/azurerm/helpers/azure/ssh_key.go b/azurerm/helpers/azure/ssh_key.go new file mode 100644 index 000000000000..d9baac9bfee5 --- /dev/null +++ b/azurerm/helpers/azure/ssh_key.go @@ -0,0 +1,30 @@ +package azure + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +// NormaliseSSHKey attempts to remove invalid formatting and line breaks that can be present in some cases +// when querying the Azure APIs +func NormaliseSSHKey(input string) (*string, error) { + if input == "" { + return nil, fmt.Errorf("empty string supplied") + } + + output := input + output = strings.ReplaceAll(output, "<<~EOT", "") + output = strings.ReplaceAll(output, "EOT", "") + output = strings.ReplaceAll(output, "\r", "") + + lines := make([]string, 0) + for _, line := range strings.Split(output, "\n") { + lines = append(lines, strings.TrimSpace(line)) + } + + normalised := strings.Join(lines, "") + + return utils.String(normalised), nil +} diff --git a/azurerm/helpers/azure/ssk_key_test.go b/azurerm/helpers/azure/ssk_key_test.go new file mode 100644 index 000000000000..355610617e1c --- /dev/null +++ b/azurerm/helpers/azure/ssk_key_test.go @@ -0,0 +1,118 @@ +package azure + +import "testing" + +func TestNormaliseSSHKey(t *testing.T) { + cases := []struct { + Input string + Error bool + Expected string + }{ + { + Input: "", + Error: true, + }, + { + // Valid 2048 - no modification needed + Input: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0pA4vzGH+cmR+blZnoxO5HorOP1ubD4SxuOiW2DSNTSptlj+mPmFIL6sZeYMvSqAjXK368qL3DKHLpp2+1ws1XnYn/Zx/O4WBQAY7VbtzwFc7w7uirQaK6lVqXn8q4CnO0+5IYHgKLrNMEipwLKo+R3E3e1KrH5Xbyhj5yJzrMe3lWOAPzS27DJvjpN5SGWo65X6qFJRh3q95xOQhSOaEqZ/A2ZtfOuagq3FmASzoo/pbq7ianvnxzAYsb2Hg/9uAvypj4Beli6BP7419aP14XS0yyiW4XTKY/9XZiR/3VIKBN/stGN5NFLw82/j12E1GznbDG9PL7PQhijP7QgJh generated-by-azure", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0pA4vzGH+cmR+blZnoxO5HorOP1ubD4SxuOiW2DSNTSptlj+mPmFIL6sZeYMvSqAjXK368qL3DKHLpp2+1ws1XnYn/Zx/O4WBQAY7VbtzwFc7w7uirQaK6lVqXn8q4CnO0+5IYHgKLrNMEipwLKo+R3E3e1KrH5Xbyhj5yJzrMe3lWOAPzS27DJvjpN5SGWo65X6qFJRh3q95xOQhSOaEqZ/A2ZtfOuagq3FmASzoo/pbq7ianvnxzAYsb2Hg/9uAvypj4Beli6BP7419aP14XS0yyiW4XTKY/9XZiR/3VIKBN/stGN5NFLw82/j12E1GznbDG9PL7PQhijP7QgJh generated-by-azure", + }, + { + // Valid 2048 - multiline, as per ARM Template Cache + Input: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0pA4vzGH+cmR+blZnoxO5HorOP1ubD4SxuOiW2DSN\r\n" + + "TSptlj+mPmFIL6sZeYMvSqAjXK368qL3DKHLpp2+1ws1XnYn/Zx/O4WBQAY7VbtzwFc7w7uirQaK6lVq\r\n" + + "Xn8q4CnO0+5IYHgKLrNMEipwLKo+R3E3e1KrH5Xbyhj5yJzrMe3lWOAPzS27DJvjpN5SGWo65X6qFJRh\r\n" + + "3q95xOQhSOaEqZ/A2ZtfOuagq3FmASzoo/pbq7ianvnxzAYsb2Hg/9uAvypj4Beli6BP7419aP14XS0y\r\n" + + "yiW4XTKY/9XZiR/3VIKBN/stGN5NFLw82/j12E1GznbDG9PL7PQhijP7QgJh generated-by-azure", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0pA4vzGH+cmR+blZnoxO5HorOP1ubD4SxuOiW2DSNTSptlj+mPmFIL6sZeYMvSqAjXK368qL3DKHLpp2+1ws1XnYn/Zx/O4WBQAY7VbtzwFc7w7uirQaK6lVqXn8q4CnO0+5IYHgKLrNMEipwLKo+R3E3e1KrH5Xbyhj5yJzrMe3lWOAPzS27DJvjpN5SGWo65X6qFJRh3q95xOQhSOaEqZ/A2ZtfOuagq3FmASzoo/pbq7ianvnxzAYsb2Hg/9uAvypj4Beli6BP7419aP14XS0yyiW4XTKY/9XZiR/3VIKBN/stGN5NFLw82/j12E1GznbDG9PL7PQhijP7QgJh generated-by-azure", + }, + { + // Valid 2048 - multiline, as per ARM Template Cache Linux newlines + Input: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0pA4vzGH+cmR+blZnoxO5HorOP1ubD4SxuOiW2DSN\n" + + "TSptlj+mPmFIL6sZeYMvSqAjXK368qL3DKHLpp2+1ws1XnYn/Zx/O4WBQAY7VbtzwFc7w7uirQaK6lVq\n" + + "Xn8q4CnO0+5IYHgKLrNMEipwLKo+R3E3e1KrH5Xbyhj5yJzrMe3lWOAPzS27DJvjpN5SGWo65X6qFJRh\n" + + "3q95xOQhSOaEqZ/A2ZtfOuagq3FmASzoo/pbq7ianvnxzAYsb2Hg/9uAvypj4Beli6BP7419aP14XS0y\n" + + "yiW4XTKY/9XZiR/3VIKBN/stGN5NFLw82/j12E1GznbDG9PL7PQhijP7QgJh generated-by-azure", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0pA4vzGH+cmR+blZnoxO5HorOP1ubD4SxuOiW2DSNTSptlj+mPmFIL6sZeYMvSqAjXK368qL3DKHLpp2+1ws1XnYn/Zx/O4WBQAY7VbtzwFc7w7uirQaK6lVqXn8q4CnO0+5IYHgKLrNMEipwLKo+R3E3e1KrH5Xbyhj5yJzrMe3lWOAPzS27DJvjpN5SGWo65X6qFJRh3q95xOQhSOaEqZ/A2ZtfOuagq3FmASzoo/pbq7ianvnxzAYsb2Hg/9uAvypj4Beli6BP7419aP14XS0yyiW4XTKY/9XZiR/3VIKBN/stGN5NFLw82/j12E1GznbDG9PL7PQhijP7QgJh generated-by-azure", + }, + { + // Valid 4096 - not modification required + Input: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wzQn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGKH3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayIoiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpmrJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a3w== generated-by-azure", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wzQn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGKH3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayIoiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpmrJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a3w== generated-by-azure", + }, + { + // Valid 4096 - multiline Windows + Input: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wz\r\n" + + "Qn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB\r\n" + + "85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGK\r\n" + + "H3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x\r\n" + + "6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayI\r\n" + + "oiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5\r\n" + + "y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpm\r\n" + + "rJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6\r\n" + + "Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a\r\n" + + "3w== generated-by-azure", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wzQn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGKH3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayIoiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpmrJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a3w== generated-by-azure", + }, + { + // Valid 4096 - multiline Linux newlines + Input: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wz\n" + + "Qn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB\n" + + "85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGK\n" + + "H3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x\n" + + "6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayI\n" + + "oiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5\n" + + "y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpm\n" + + "rJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6\n" + + "Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a\n" + + "3w== generated-by-azure", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wzQn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGKH3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayIoiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpmrJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a3w== generated-by-azure", + }, + { + // Valid 4096 - multiline Windows Wrapped + Input: "<<~EOT\r\n" + + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wz\r\n" + + "Qn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB\r\n" + + "85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGK\r\n" + + "H3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x\r\n" + + "6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayI\r\n" + + "oiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5\r\n" + + "y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpm\r\n" + + "rJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6\r\n" + + "Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a\r\n" + + "3w== generated-by-azure" + + "EOT", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wzQn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGKH3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayIoiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpmrJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a3w== generated-by-azure", + }, + { + // Valid 4096 - multiline Windows Wrapped with whitespace + Input: "<<~EOT\r\n" + + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wz\r\n " + + "Qn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB\r\n " + + "85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGK\r\n" + + " H3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x\r\n" + + "6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayI\r\n" + + "oiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5\r\n" + + "y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpm\r\n" + + " rJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6\r\n" + + " Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a\r\n " + + "3w== generated-by-azure" + + "EOT", + Expected: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFP6r3wb/79MqRYI4dpgMwmjlrDDrk3A/pehysk1wzQn3lSEUtrNeQsHI6o/8au8Un1ndaZXZl/yWQQDDW4kqGw5ty8xPUZ+DB1ZVWkFOVNAgARl0bMNCgm2kB85l66g0zHWDCKLt+xi8xQiL7tGvdq3SWpogY3pWF2AABXoNDloHEN0mzzjJ09hdAHbygaDDr/9k3uyGKH3x0qo7fx5g8GqTtM3YWRxqUqdtkjsNomq94c/PMybCGR6qRoGI0Cdr/OP6/kszDHwf87B9hpTDMNa6x6FVJSDHc9v0CWePJZpjEOAFN3GCyPFFQTA9jvy026jt43wzyeH0kPe/T0ZZdr9YzQETN1b/oAKWKoayIoiLyJtFqUKcFFJSPcMz9ISgCD5Q/jRxQwMuMHpQ8TslxZ38l+41/0V1LWwKj0IkyJVFVWzu4zhgAZXr5y9Qbsis9sStRc+LU9/FQJ/VzNQfL83l86rH/u3NiPFfqisXILSybtMCD0OoRRHfQvWFsSwgt9JCIqLpmrJXRYs679aHzTHDgitlovJyprwqrbjg5N3XNSB5FohAUJUnVMF8z+qzvb4pPhly6mj6tiSJGYbXPngN6Iv8t3mRko3PbYLrWuxMb345BxcD+j9XteUgm1j/10qrSvqq+1R+/FAFPYwLXCflZgKst2g8/rEiVQz+a3w== generated-by-azure", + }, + } + + for _, tc := range cases { + t.Run(tc.Input, func(t *testing.T) { + output, err := NormaliseSSHKey(tc.Input) + if err != nil { + if !tc.Error { + t.Fatalf("expected NormaliseSSHKey to error") + } + } + if output != nil && *output != tc.Expected { + t.Fatalf("Expected %q, got %q", tc.Expected, *output) + } + }) + } +} diff --git a/azurerm/helpers/azure/storage_account.go b/azurerm/helpers/azure/storage_account.go index 590f304a6347..d98364ad0bec 100644 --- a/azurerm/helpers/azure/storage_account.go +++ b/azurerm/helpers/azure/storage_account.go @@ -14,7 +14,8 @@ func SchemaStorageAccountCorsRule(patchEnabled bool) *schema.Schema { "MERGE", "POST", "OPTIONS", - "PUT"} + "PUT", + } if patchEnabled { allowedMethods = append(allowedMethods, "PATCH") diff --git a/azurerm/helpers/validate/cosmos.go b/azurerm/helpers/validate/cosmos.go index 537269071d21..1b27a5551d87 100644 --- a/azurerm/helpers/validate/cosmos.go +++ b/azurerm/helpers/validate/cosmos.go @@ -55,11 +55,6 @@ func CosmosMaxThroughput(i interface{}, k string) (warnings []string, errors []e "%s must be a minimum of 4000", k)) } - if v > 1000000 { - errors = append(errors, fmt.Errorf( - "%s must be a maximum of 1000000", k)) - } - if v%1000 != 0 { errors = append(errors, fmt.Errorf( "%q must be set in increments of 1000", k)) diff --git a/azurerm/helpers/validate/cosmos_test.go b/azurerm/helpers/validate/cosmos_test.go index b064a09990a5..2e613d8ff994 100644 --- a/azurerm/helpers/validate/cosmos_test.go +++ b/azurerm/helpers/validate/cosmos_test.go @@ -134,7 +134,7 @@ func TestCosmosMaxThroughput(t *testing.T) { }, { Value: 1100000, - Errors: 1, + Errors: 0, }, { Value: "400", diff --git a/azurerm/helpers/validate/domain_name.go b/azurerm/helpers/validate/domain_name.go new file mode 100644 index 000000000000..7b8452f766a7 --- /dev/null +++ b/azurerm/helpers/validate/domain_name.go @@ -0,0 +1,23 @@ +package validate + +import ( + "fmt" + "regexp" +) + +// Fqdn validates that a domain name, including the host portion, is valid to RFC requirements +// e.g. portal.azure.com +func DomainName(i interface{}, k string) (warnings []string, errors []error) { + // + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be string", k)) + return + } + // The following regexp is a good example of why pre-processing support would be nice in GoLang + if matched := regexp.MustCompile(`^(?:[_a-z0-9](?:[_a-z0-9-]{0,62}\.)|(?:[0-9]+/[0-9]{2})\.)+(?:[a-z](?:[a-z0-9-]{0,61}[a-z0-9])?)?$`).Match([]byte(v)); !matched || len(v) > 253 { + errors = append(errors, fmt.Errorf("%q must be a valid CNAME", k)) + } + + return +} diff --git a/azurerm/helpers/validate/domain_name_test.go b/azurerm/helpers/validate/domain_name_test.go new file mode 100644 index 000000000000..d3b4ecfde40f --- /dev/null +++ b/azurerm/helpers/validate/domain_name_test.go @@ -0,0 +1,45 @@ +package validate + +import "testing" + +func TestDomainName(t *testing.T) { + cases := []struct { + Name string + Input string + Valid bool + }{ + { + Name: "random string", + Input: "dfdsdfds", + Valid: false, + }, + { + Name: "contains protocol scheme", + Input: "https://contoso.com", + Valid: false, + }, + { + Name: "too long", + Input: "this.hostname.is.definitely.going.to.be.altogether.far.too.long.for.a.valid.rfc.compatible.hostname.even.if.i.have.to.add.a.ludicrous.number.of.parts.to.this.test.case.input.string.including.some.random.character.strings.no.really.i.will.do.it.contoso.com", + Valid: false, + }, + { + Name: "valid", + Input: "mydomain.contoso.com", + Valid: true, + }, + { + Name: "subdomain valid", + Input: "subdomain.mydomain.contoso.com", + Valid: true, + }, + } + for _, tc := range cases { + _, err := DomainName(tc.Input, "") + + valid := err == nil + if valid != tc.Valid { + t.Errorf("Expected valid status %t but got %t for input %s: %+v", tc.Valid, valid, tc.Input, err) + } + } +} diff --git a/azurerm/helpers/validate/monitor_diagnostic_setting.go b/azurerm/helpers/validate/monitor_diagnostic_setting.go new file mode 100644 index 000000000000..0be43b145161 --- /dev/null +++ b/azurerm/helpers/validate/monitor_diagnostic_setting.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func MonitorDiagnosticSettingName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + if regexp.MustCompile(`[<>*%&:\\?+\/]+`).MatchString(value) { + errors = append(errors, fmt.Errorf( + "characters <, >, *, %%, &, :, \\, ?, +, / are not allowed in %q: %q", k, value)) + } + + if len(value) < 1 || len(value) > 260 { + errors = append(errors, fmt.Errorf( + "%q must be between 1 and 260 characters: %q", k, value)) + } + + return warnings, errors +} diff --git a/azurerm/helpers/validate/monitor_diagnostic_setting_test.go b/azurerm/helpers/validate/monitor_diagnostic_setting_test.go new file mode 100644 index 000000000000..72ac2bfb79cb --- /dev/null +++ b/azurerm/helpers/validate/monitor_diagnostic_setting_test.go @@ -0,0 +1,77 @@ +package validate + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" +) + +func TestMonitorDiagnosticSettingName(t *testing.T) { + cases := []struct { + Name string + Errors int + }{ + { + Name: "somename", + Errors: 0, + }, + { + Name: "", + Errors: 1, + }, + { + Name: acctest.RandString(261), + Errors: 1, + }, + { + Name: "somename", + Errors: 1, + }, + { + Name: "some*name", + Errors: 1, + }, + { + Name: "some%name", + Errors: 1, + }, + { + Name: "some&name", + Errors: 1, + }, + { + Name: "some:name", + Errors: 1, + }, + { + Name: "some\\name", + Errors: 1, + }, + { + Name: "some?name", + Errors: 1, + }, + { + Name: "some+name", + Errors: 1, + }, + { + Name: "some/name", + Errors: 1, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + _, errors := MonitorDiagnosticSettingName(tc.Name, "test") + + if len(errors) != tc.Errors { + t.Fatalf("Expected Name to return %d error(s) not %d", tc.Errors, len(errors)) + } + }) + } +} diff --git a/azurerm/helpers/validate/port_or_port_range.go b/azurerm/helpers/validate/port_or_port_range.go new file mode 100644 index 000000000000..5f0df421691a --- /dev/null +++ b/azurerm/helpers/validate/port_or_port_range.go @@ -0,0 +1,63 @@ +package validate + +import ( + "fmt" + "regexp" + "strconv" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +func PortOrPortRangeWithin(min int, max int) schema.SchemaValidateFunc { + return func(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + assertWithinRange := func(n int) error { + if n < min || n > max { + return fmt.Errorf("port %d is out of range (%d-%d)", n, min, max) + } + + return nil + } + + // Allowed format including: `num` or `num1-num2` (num1 < num2). + groups := regexp.MustCompile(`^(\d+)((-)(\d+))?$`).FindStringSubmatch(v) + if len(groups) != 5 { + errors = append(errors, fmt.Errorf("invalid format of %q", k)) + return + } + + if groups[2] == "" { + p1, _ := strconv.Atoi(groups[1]) + + if err := assertWithinRange(p1); err != nil { + errors = append(errors, err) + return + } + } else { + p1, _ := strconv.Atoi(groups[1]) + p2, _ := strconv.Atoi(groups[4]) + + if p1 >= p2 { + errors = append(errors, fmt.Errorf("beginning port (%d) should be less than ending port (%d)", p1, p2)) + return + } + + if err := assertWithinRange(p1); err != nil { + errors = append(errors, err) + return + } + + if err := assertWithinRange(p2); err != nil { + errors = append(errors, err) + return + } + } + + return nil, nil + } +} diff --git a/azurerm/helpers/validate/port_or_port_range_test.go b/azurerm/helpers/validate/port_or_port_range_test.go new file mode 100644 index 000000000000..e704e12c1545 --- /dev/null +++ b/azurerm/helpers/validate/port_or_port_range_test.go @@ -0,0 +1,51 @@ +package validate + +import ( + "testing" +) + +func TestPortOrPortRangeWithin(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + input: "0", + expected: false, + }, + { + input: "65536", + expected: false, + }, + { + input: "1", + expected: true, + }, + { + input: "65535", + expected: true, + }, + { + input: "634", + expected: true, + }, + { + input: "1000-50000", + expected: true, + }, + { + input: "1-65535", + expected: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := PortOrPortRangeWithin(1, 65535)(v.input, "port_or_port_range") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/helpers/validate/time_test.go b/azurerm/helpers/validate/time_test.go index aa106d7cca18..f7b6f9ddae56 100644 --- a/azurerm/helpers/validate/time_test.go +++ b/azurerm/helpers/validate/time_test.go @@ -114,6 +114,7 @@ func TestRFC3339DateInFutureBy(t *testing.T) { }) } } + func TestISO8601Duration(t *testing.T) { cases := []struct { Value string diff --git a/azurerm/helpers/validate/web_application_firewall_policy.go b/azurerm/helpers/validate/web_application_firewall_policy.go index a7945b91dcdc..5dcbf69af587 100644 --- a/azurerm/helpers/validate/web_application_firewall_policy.go +++ b/azurerm/helpers/validate/web_application_firewall_policy.go @@ -25,6 +25,7 @@ var ValidateWebApplicationFirewallPolicyRuleGroupName = validation.StringInSlice "REQUEST-941-APPLICATION-ATTACK-XSS", "REQUEST-942-APPLICATION-ATTACK-SQLI", "REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION", + "REQUEST-944-APPLICATION-ATTACK-JAVA", }, false) var ValidateWebApplicationFirewallPolicyRuleSetVersion = validation.StringInSlice([]string{ diff --git a/azurerm/internal/acceptance/check/that.go b/azurerm/internal/acceptance/check/that.go new file mode 100644 index 000000000000..02f28a5cf87b --- /dev/null +++ b/azurerm/internal/acceptance/check/that.go @@ -0,0 +1,82 @@ +package check + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/helpers" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/types" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type thatType struct { + // resourceName being the full resource name e.g. azurerm_foo.bar + resourceName string +} + +// Key returns a type which can be used for more fluent assertions for a given Resource +func That(resourceName string) thatType { + return thatType{ + resourceName: resourceName, + } +} + +// ExistsInAzure validates that the specified resource exists within Azure +func (t thatType) ExistsInAzure(testResource types.TestResource) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client) + return helpers.ExistsInAzure(client, testResource, t.resourceName)(s) + } +} + +// Key returns a type which can be used for more fluent assertions for a given Resource & Key combination +func (t thatType) Key(key string) thatWithKeyType { + return thatWithKeyType{ + resourceName: t.resourceName, + key: key, + } +} + +type thatWithKeyType struct { + // resourceName being the full resource name e.g. azurerm_foo.bar + resourceName string + + // key being the specific field we're querying e.g. bar or a nested object ala foo.0.bar + key string +} + +// DoesNotExist returns a TestCheckFunc which validates that the specific key +// does not exist on the resource +func (t thatWithKeyType) DoesNotExist() resource.TestCheckFunc { + return resource.TestCheckNoResourceAttr(t.resourceName, t.key) +} + +// Exists returns a TestCheckFunc which validates that the specific key exists on the resource +func (t thatWithKeyType) Exists() resource.TestCheckFunc { + return resource.TestCheckResourceAttrSet(t.resourceName, t.key) +} + +// IsEmpty returns a TestCheckFunc which validates that the specific key is empty on the resource +func (t thatWithKeyType) IsEmpty() resource.TestCheckFunc { + return resource.TestCheckResourceAttr(t.resourceName, t.key, "") +} + +// HasValue returns a TestCheckFunc which validates that the specific key has the +// specified value on the resource +func (t thatWithKeyType) HasValue(value string) resource.TestCheckFunc { + return resource.TestCheckResourceAttr(t.resourceName, t.key, value) +} + +// MatchesOtherKey returns a TestCheckFunc which validates that the key on this resource +// matches another other key on another resource +func (t thatWithKeyType) MatchesOtherKey(other thatWithKeyType) resource.TestCheckFunc { + return resource.TestCheckResourceAttrPair(t.resourceName, t.key, other.resourceName, other.key) +} + +// MatchesRegex returns a TestCheckFunc which validates that the key on this resource matches +// the given regular expression +func (t thatWithKeyType) MatchesRegex(r *regexp.Regexp) resource.TestCheckFunc { + return resource.TestMatchResourceAttr(t.resourceName, t.key, r) +} diff --git a/azurerm/internal/acceptance/helpers/check_destroyed.go b/azurerm/internal/acceptance/helpers/check_destroyed.go new file mode 100644 index 000000000000..7bd6aeb2563e --- /dev/null +++ b/azurerm/internal/acceptance/helpers/check_destroyed.go @@ -0,0 +1,36 @@ +package helpers + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/types" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +// CheckDestroyedFunc returns a TestCheckFunc which validates the resource no longer exists +func CheckDestroyedFunc(client *clients.Client, testResource types.TestResource, resourceType, resourceName string) func(state *terraform.State) error { + return func(state *terraform.State) error { + ctx := client.StopContext + + for label, resourceState := range state.RootModule().Resources { + if resourceState.Type != resourceType { + continue + } + if label != resourceName { + continue + } + + // Destroy is unconcerned with an error checking the status, since this is going to be "not found" + result, err := testResource.Exists(ctx, client, resourceState.Primary) + if result == nil && err == nil { + return fmt.Errorf("should have either an error or a result when checking if %q has been destroyed", resourceName) + } + if result != nil && *result { + return fmt.Errorf("%q still exists", resourceName) + } + } + + return nil + } +} diff --git a/azurerm/internal/acceptance/helpers/delete.go b/azurerm/internal/acceptance/helpers/delete.go new file mode 100644 index 000000000000..7d9ea74e703b --- /dev/null +++ b/azurerm/internal/acceptance/helpers/delete.go @@ -0,0 +1,36 @@ +package helpers + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/types" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +// DeleteResourceFunc returns a TestCheckFunc which deletes the resource within Azure +// this is only used within the Internal +func DeleteResourceFunc(client *clients.Client, testResource types.TestResourceVerifyingRemoved, resourceName string) func(state *terraform.State) error { + return func(state *terraform.State) error { + ctx := client.StopContext + + rs, ok := state.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("%q was not found in the state", resourceName) + } + + result, err := testResource.Destroy(ctx, client, rs.Primary) + if err != nil { + return fmt.Errorf("running destroy func for %q: %+v", resourceName, err) + } + if result == nil { + return fmt.Errorf("received nil for destroy result for %q", resourceName) + } + + if !*result { + return fmt.Errorf("error deleting %q but no error", resourceName) + } + + return nil + } +} diff --git a/azurerm/internal/acceptance/helpers/exists.go b/azurerm/internal/acceptance/helpers/exists.go new file mode 100644 index 000000000000..0d21e97d823f --- /dev/null +++ b/azurerm/internal/acceptance/helpers/exists.go @@ -0,0 +1,35 @@ +package helpers + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/types" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +func ExistsInAzure(client *clients.Client, testResource types.TestResource, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ctx := client.StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("%q was not found in the state", resourceName) + } + + result, err := testResource.Exists(ctx, client, rs.Primary) + if err != nil { + return fmt.Errorf("running exists func for %q: %+v", resourceName, err) + } + if result == nil { + return fmt.Errorf("received nil for exists for %q", resourceName) + } + + if !*result { + return fmt.Errorf("%q did not exist", resourceName) + } + + return nil + } +} diff --git a/azurerm/internal/acceptance/providers.go b/azurerm/internal/acceptance/providers.go index 984d49690436..17201359a1d5 100644 --- a/azurerm/internal/acceptance/providers.go +++ b/azurerm/internal/acceptance/providers.go @@ -13,8 +13,10 @@ import ( var once sync.Once func EnsureProvidersAreInitialised() { - // NOTE: (@tombuildsstuff) - opting-out of Binary Testing for the moment - os.Setenv("TF_DISABLE_BINARY_TESTING", "true") + if !enableBinaryTesting { + // NOTE: (@tombuildsstuff) - opting-out of Binary Testing for the moment + os.Setenv("TF_DISABLE_BINARY_TESTING", "true") + } once.Do(func() { azureProvider := provider.TestAzureProvider().(*schema.Provider) @@ -27,7 +29,7 @@ func EnsureProvidersAreInitialised() { // NOTE: (@tombuildsstuff) - intentionally not calling these as Binary Testing // is Disabled - //binarytestfuntime.UseBinaryDriver("azurerm", provider.TestAzureProvider) - //binarytestfuntime.UseBinaryDriver("azuread", azuread.Provider) + // binarytestfuntime.UseBinaryDriver("azurerm", provider.TestAzureProvider) + // binarytestfuntime.UseBinaryDriver("azuread", azuread.Provider) }) } diff --git a/azurerm/internal/acceptance/steps.go b/azurerm/internal/acceptance/steps.go index e6621f46b1f2..814963d86b0e 100644 --- a/azurerm/internal/acceptance/steps.go +++ b/azurerm/internal/acceptance/steps.go @@ -1,18 +1,23 @@ package acceptance -import "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/helpers" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/types" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) type DisappearsStepData struct { // Config is a function which returns the Terraform Configuration which should be used for this step Config func(data TestData) string - // CheckExists is a function which confirms that the given Resource in - // the state exists - CheckExists func(resourceName string) resource.TestCheckFunc - - // Destroy is a function which looks up the given Resource in the State - // and then ensures that it's deleted - Destroy func(resourceName string) resource.TestCheckFunc + // TestResource is a reference to a TestResource which can destroy the resource + // to enable a Disappears step + TestResource types.TestResourceVerifyingRemoved } // DisappearsStep returns a Test Step which first confirms the resource exists @@ -23,19 +28,50 @@ func (td TestData) DisappearsStep(data DisappearsStepData) resource.TestStep { return resource.TestStep{ Config: config, Check: resource.ComposeTestCheckFunc( - data.CheckExists(td.ResourceName), - data.Destroy(td.ResourceName), + func(state *terraform.State) error { + client := buildClient() + return helpers.ExistsInAzure(client, data.TestResource, td.ResourceName)(state) + }, + func(state *terraform.State) error { + client := buildClient() + return helpers.DeleteResourceFunc(client, data.TestResource, td.ResourceName)(state) + }, ), ExpectNonEmptyPlan: true, } } +type ClientCheckFunc func(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error + +// CheckWithClient returns a TestCheckFunc which will call a ClientCheckFunc +// with the provider context and clients +func (td TestData) CheckWithClient(check ClientCheckFunc) resource.TestCheckFunc { + return resource.ComposeTestCheckFunc( + func(state *terraform.State) error { + rs, ok := state.RootModule().Resources[td.ResourceName] + if !ok { + return fmt.Errorf("Resource not found found: %s", td.ResourceName) + } + + clients := buildClient() + return check(clients.StopContext, clients, rs.Primary) + }, + ) +} + // ImportStep returns a Test Step which Imports the Resource, optionally // ignoring any fields which may not be imported (for example, as they're // not returned from the API) func (td TestData) ImportStep(ignore ...string) resource.TestStep { + return td.ImportStepFor(td.ResourceName, ignore...) +} + +// ImportStepFor returns a Test Step which Imports a given resource by name, +// optionally ignoring any fields which may not be imported (for example, as they're +// not returned from the API) +func (td TestData) ImportStepFor(resourceName string, ignore ...string) resource.TestStep { step := resource.TestStep{ - ResourceName: td.ResourceName, + ResourceName: resourceName, ImportState: true, ImportStateVerify: true, } diff --git a/azurerm/internal/acceptance/testcase.go b/azurerm/internal/acceptance/testcase.go new file mode 100644 index 000000000000..98702b5175f4 --- /dev/null +++ b/azurerm/internal/acceptance/testcase.go @@ -0,0 +1,77 @@ +package acceptance + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/helpers" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/types" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +// NOTE: when Binary Testing is enabled the Check functions will need to build a client rather than relying on the +// shared one. For the moment we init the shared client when Binary Testing is Enabled & Disabled - but this needs +// fixing when we move to Binary Testing so that we can test across provider instances +var enableBinaryTesting = false + +// lintignore:AT001 +func (td TestData) DataSourceTest(t *testing.T, steps []resource.TestStep) { + // DataSources don't need a check destroy - however since this is a wrapper function + // and not matching the ignore pattern `XXX_data_source_test.go`, this needs to be explicitly opted out + testCase := resource.TestCase{ + PreCheck: func() { PreCheck(t) }, + Steps: steps, + } + + td.runAcceptanceTest(t, testCase) +} + +func (td TestData) ResourceTest(t *testing.T, testResource types.TestResource, steps []resource.TestStep) { + testCase := resource.TestCase{ + PreCheck: func() { PreCheck(t) }, + CheckDestroy: func(s *terraform.State) error { + client := buildClient() + return helpers.CheckDestroyedFunc(client, testResource, td.ResourceType, td.ResourceName)(s) + }, + Steps: steps, + } + + td.runAcceptanceTest(t, testCase) +} + +func RunTestsInSequence(t *testing.T, tests map[string]map[string]func(t *testing.T)) { + for group, m := range tests { + m := m + t.Run(group, func(t *testing.T) { + for name, tc := range m { + tc := tc + t.Run(name, func(t *testing.T) { + tc(t) + }) + } + }) + } +} + +func buildClient() *clients.Client { + // if enableBinaryTesting { + // TODO: build up a client on demand + // NOTE: this'll want caching/a singleton, and likely RP registration etc disabled, since otherwise this'll become + // extremely expensive - and this doesn't need access to the provider feature toggles + // } + + return AzureProvider.Meta().(*clients.Client) +} + +func (td TestData) runAcceptanceTest(t *testing.T, testCase resource.TestCase) { + if enableBinaryTesting { + testCase.ProviderFactories = map[string]terraform.ResourceProviderFactory{ + // TODO: switch this out for dynamic initialization? + "azurerm": terraform.ResourceProviderFactoryFixed(AzureProvider), + } + } + testCase.Providers = SupportedProviders + + resource.ParallelTest(t, testCase) +} diff --git a/azurerm/internal/acceptance/testing.go b/azurerm/internal/acceptance/testing.go index 05c7c22a6490..44a35de5a08a 100644 --- a/azurerm/internal/acceptance/testing.go +++ b/azurerm/internal/acceptance/testing.go @@ -14,8 +14,10 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/terraform" ) -var AzureProvider *schema.Provider -var SupportedProviders map[string]terraform.ResourceProvider +var ( + AzureProvider *schema.Provider + SupportedProviders map[string]terraform.ResourceProvider +) func PreCheck(t *testing.T) { variables := []string{ @@ -65,7 +67,7 @@ func GetAuthConfig(t *testing.T) *authentication.Config { TenantID: os.Getenv("ARM_TENANT_ID"), ClientSecret: os.Getenv("ARM_CLIENT_SECRET"), Environment: environment, - MetadataURL: os.Getenv("ARM_METADATA_URL"), + MetadataHost: os.Getenv("ARM_METADATA_HOST"), // we intentionally only support Client Secret auth for tests (since those variables are used all over) SupportsClientSecretAuth: true, diff --git a/azurerm/internal/acceptance/types/resource.go b/azurerm/internal/acceptance/types/resource.go new file mode 100644 index 000000000000..9005d03add6b --- /dev/null +++ b/azurerm/internal/acceptance/types/resource.go @@ -0,0 +1,17 @@ +package types + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type TestResource interface { + Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) +} + +type TestResourceVerifyingRemoved interface { + TestResource + Destroy(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) +} diff --git a/azurerm/internal/clients/auth.go b/azurerm/internal/clients/auth.go index 7575c27493d1..abc16f8e2000 100644 --- a/azurerm/internal/clients/auth.go +++ b/azurerm/internal/clients/auth.go @@ -13,11 +13,12 @@ type ResourceManagerAccount struct { ClientId string Environment azure.Environment ObjectId string + SkipResourceProviderRegistration bool SubscriptionId string TenantId string } -func NewResourceManagerAccount(ctx context.Context, config authentication.Config, env azure.Environment) (*ResourceManagerAccount, error) { +func NewResourceManagerAccount(ctx context.Context, config authentication.Config, env azure.Environment, skipResourceProviderRegistration bool) (*ResourceManagerAccount, error) { objectId := "" // TODO remove this when we confirm that MSI no longer returns nil with getAuthenticatedObjectID @@ -35,6 +36,7 @@ func NewResourceManagerAccount(ctx context.Context, config authentication.Config Environment: env, ObjectId: objectId, TenantId: config.TenantID, + SkipResourceProviderRegistration: skipResourceProviderRegistration, SubscriptionId: config.SubscriptionID, } return &account, nil diff --git a/azurerm/internal/clients/azuread/aad_client.go b/azurerm/internal/clients/azuread/aad_client.go new file mode 100644 index 000000000000..90dcec4bcb30 --- /dev/null +++ b/azurerm/internal/clients/azuread/aad_client.go @@ -0,0 +1,18 @@ +package azuread + +import ( + "github.com/Azure/azure-sdk-for-go/services/graphrbac/1.6/graphrbac" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" +) + +type Client struct { + GroupsClient *graphrbac.GroupsClient +} + +func NewClient(o *common.ClientOptions) *Client { + aadGroupsClient := graphrbac.NewGroupsClientWithBaseURI(o.GraphEndpoint, o.TenantID) + o.ConfigureClient(&aadGroupsClient.Client, o.GraphAuthorizer) + return &Client{ + GroupsClient: &aadGroupsClient, + } +} diff --git a/azurerm/internal/clients/builder.go b/azurerm/internal/clients/builder.go index 0d102beda525..5de50cd61212 100644 --- a/azurerm/internal/clients/builder.go +++ b/azurerm/internal/clients/builder.go @@ -3,13 +3,17 @@ package clients import ( "context" "fmt" + "log" "strings" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" "github.com/hashicorp/go-azure-helpers/authentication" "github.com/hashicorp/go-azure-helpers/sender" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceproviders" ) type ClientBuilder struct { @@ -38,7 +42,7 @@ func Build(ctx context.Context, builder ClientBuilder) (*Client, error) { return nil, fmt.Errorf(azureStackEnvironmentError) } - isAzureStack, err := authentication.IsEnvironmentAzureStack(ctx, builder.AuthConfig.MetadataURL, builder.AuthConfig.Environment) + isAzureStack, err := authentication.IsEnvironmentAzureStack(ctx, builder.AuthConfig.MetadataHost, builder.AuthConfig.Environment) if err != nil { return nil, err } @@ -46,17 +50,13 @@ func Build(ctx context.Context, builder ClientBuilder) (*Client, error) { return nil, fmt.Errorf(azureStackEnvironmentError) } - env, err := authentication.AzureEnvironmentByNameFromEndpoint(ctx, builder.AuthConfig.MetadataURL, builder.AuthConfig.Environment) + env, err := authentication.AzureEnvironmentByNameFromEndpoint(ctx, builder.AuthConfig.MetadataHost, builder.AuthConfig.Environment) if err != nil { return nil, err } - if features.EnhancedValidationEnabled() { - location.CacheSupportedLocations(ctx, env) - } - // client declarations: - account, err := NewResourceManagerAccount(ctx, *builder.AuthConfig, *env) + account, err := NewResourceManagerAccount(ctx, *builder.AuthConfig, *env, builder.SkipProviderRegistration) if err != nil { return nil, fmt.Errorf("Error building account: %+v", err) } @@ -98,9 +98,14 @@ func Build(ctx context.Context, builder ClientBuilder) (*Client, error) { } // Synapse Endpoints - synapseAuth, err := builder.AuthConfig.GetAuthorizationToken(sender, oauthConfig, env.ResourceIdentifiers.Synapse) - if err != nil { - return nil, err + var synapseAuth autorest.Authorizer = nil + if env.ResourceIdentifiers.Synapse != azure.NotAvailable { + synapseAuth, err = builder.AuthConfig.GetAuthorizationToken(sender, oauthConfig, env.ResourceIdentifiers.Synapse) + if err != nil { + return nil, err + } + } else { + log.Printf("[DEBUG] Skipping building the Synapse Authorizer since this is not supported in the current Azure Environment") } // Key Vault Endpoints @@ -130,5 +135,10 @@ func Build(ctx context.Context, builder ClientBuilder) (*Client, error) { return nil, fmt.Errorf("Error building Client: %+v", err) } + if features.EnhancedValidationEnabled() { + location.CacheSupportedLocations(ctx, env) + resourceproviders.CacheSupportedProviders(ctx, client.Resource.ProvidersClient) + } + return &client, nil } diff --git a/azurerm/internal/clients/client.go b/azurerm/internal/clients/client.go index 53b09ad4c031..9cbde8eafded 100644 --- a/azurerm/internal/clients/client.go +++ b/azurerm/internal/clients/client.go @@ -3,7 +3,10 @@ package clients import ( "context" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients/azuread" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" advisor "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/advisor/client" @@ -11,10 +14,10 @@ import ( apiManagement "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/client" appConfiguration "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration/client" applicationInsights "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/client" - appPlatform "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appplatform/client" attestation "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/attestation/client" authorization "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/authorization/client" automation "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/automation/client" + azureStackHCI "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/azurestackhci/client" batch "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/client" blueprints "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/blueprints/client" bot "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/bot/client" @@ -33,9 +36,11 @@ import ( desktopvirtualization "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/client" devspace "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace/client" devtestlabs "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/client" + digitaltwins "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/client" dns "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/client" eventgrid "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/client" eventhub "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/client" + firewall "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/client" frontdoor "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/client" hdinsight "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/client" healthcare "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/healthcare/client" @@ -47,6 +52,7 @@ import ( keyvault "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/keyvault/client" kusto "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/client" lighthouse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/client" + loadbalancers "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/client" loganalytics "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/client" logic "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/logic/client" machinelearning "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/machinelearning/client" @@ -80,6 +86,7 @@ import ( serviceFabric "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/servicefabric/client" serviceFabricMesh "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/servicefabricmesh/client" signalr "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/signalr/client" + appPlatform "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/springcloud/client" sql "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/sql/client" storage "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/client" streamAnalytics "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/streamanalytics/client" @@ -105,6 +112,8 @@ type Client struct { Attestation *attestation.Client Authorization *authorization.Client Automation *automation.Client + AzureAD *azuread.Client + AzureStackHCI *azureStackHCI.Client Batch *batch.Client Blueprints *blueprints.Client Bot *bot.Client @@ -123,9 +132,11 @@ type Client struct { DesktopVirtualization *desktopvirtualization.Client DevSpace *devspace.Client DevTestLabs *devtestlabs.Client + DigitalTwins *digitaltwins.Client Dns *dns.Client EventGrid *eventgrid.Client Eventhub *eventhub.Client + Firewall *firewall.Client Frontdoor *frontdoor.Client HPCCache *hpccache.Client HSM *hsm.Client @@ -137,6 +148,7 @@ type Client struct { KeyVault *keyvault.Client Kusto *kusto.Client Lighthouse *lighthouse.Client + LoadBalancers *loadbalancers.Client LogAnalytics *loganalytics.Client Logic *logic.Client MachineLearning *machinelearning.Client @@ -183,6 +195,8 @@ type Client struct { func (client *Client) Build(ctx context.Context, o *common.ClientOptions) error { autorest.Count429AsRetry = false + // Disable the Azure SDK for Go's validation since it's unhelpful for our use-case + validation.Disabled = true client.Features = o.Features client.StopContext = ctx @@ -196,6 +210,8 @@ func (client *Client) Build(ctx context.Context, o *common.ClientOptions) error client.Attestation = attestation.NewClient(o) client.Authorization = authorization.NewClient(o) client.Automation = automation.NewClient(o) + client.AzureAD = azuread.NewClient(o) + client.AzureStackHCI = azureStackHCI.NewClient(o) client.Batch = batch.NewClient(o) client.Blueprints = blueprints.NewClient(o) client.Bot = bot.NewClient(o) @@ -214,9 +230,11 @@ func (client *Client) Build(ctx context.Context, o *common.ClientOptions) error client.DesktopVirtualization = desktopvirtualization.NewClient(o) client.DevSpace = devspace.NewClient(o) client.DevTestLabs = devtestlabs.NewClient(o) + client.DigitalTwins = digitaltwins.NewClient(o) client.Dns = dns.NewClient(o) client.EventGrid = eventgrid.NewClient(o) client.Eventhub = eventhub.NewClient(o) + client.Firewall = firewall.NewClient(o) client.Frontdoor = frontdoor.NewClient(o) client.HPCCache = hpccache.NewClient(o) client.HSM = hsm.NewClient(o) @@ -229,6 +247,7 @@ func (client *Client) Build(ctx context.Context, o *common.ClientOptions) error client.Kusto = kusto.NewClient(o) client.Lighthouse = lighthouse.NewClient(o) client.LogAnalytics = loganalytics.NewClient(o) + client.LoadBalancers = loadbalancers.NewClient(o) client.Logic = logic.NewClient(o) client.MachineLearning = machinelearning.NewClient(o) client.Maintenance = maintenance.NewClient(o) diff --git a/azurerm/internal/features/three_point_oh.go b/azurerm/internal/features/three_point_oh.go index 9b5099609cd1..2e51b4c69e72 100644 --- a/azurerm/internal/features/three_point_oh.go +++ b/azurerm/internal/features/three_point_oh.go @@ -1,5 +1,6 @@ package features +// nolint gocritic // DeprecatedInThreePointOh returns the deprecation message if the provider // is running in 3.0 mode - otherwise is returns an empty string (such that // this deprecation should be ignored). diff --git a/azurerm/internal/features/user_flags.go b/azurerm/internal/features/user_flags.go index cccbff633907..009a73fe2dc5 100644 --- a/azurerm/internal/features/user_flags.go +++ b/azurerm/internal/features/user_flags.go @@ -10,6 +10,7 @@ type UserFeatures struct { type VirtualMachineFeatures struct { DeleteOSDiskOnDeletion bool + GracefulShutdown bool } type VirtualMachineScaleSetFeatures struct { diff --git a/azurerm/internal/location/normalize.go b/azurerm/internal/location/normalize.go index a85f3d4139f4..dd420edfb2fb 100644 --- a/azurerm/internal/location/normalize.go +++ b/azurerm/internal/location/normalize.go @@ -5,7 +5,7 @@ import "strings" // Normalize transforms the human readable Azure Region/Location names (e.g. `West US`) // into the canonical value to allow comparisons between user-code and API Responses func Normalize(input string) string { - return strings.Replace(strings.ToLower(input), " ", "", -1) + return strings.ReplaceAll(strings.ToLower(input), " ", "") } // NormalizeNilable normalizes the Location field even if it's nil to ensure this field diff --git a/azurerm/internal/location/supported.go b/azurerm/internal/location/supported.go index fed426e03d83..46ba0a4ce7f8 100644 --- a/azurerm/internal/location/supported.go +++ b/azurerm/internal/location/supported.go @@ -5,7 +5,6 @@ import ( "log" "github.com/Azure/go-autorest/autorest/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" ) // supportedLocations can be (validly) nil - as such this shouldn't be relied on @@ -14,7 +13,7 @@ var supportedLocations *[]string // CacheSupportedLocations attempts to retrieve the supported locations from the Azure MetaData Service // and caches them, for used in enhanced validation func CacheSupportedLocations(ctx context.Context, env *azure.Environment) { - locs, err := sdk.AvailableAzureLocations(ctx, env) + locs, err := availableAzureLocations(ctx, env) if err != nil { log.Printf("[DEBUG] error retrieving locations: %s. Enhanced validation will be unavailable", err) return diff --git a/azurerm/internal/location/supported_azure.go b/azurerm/internal/location/supported_azure.go new file mode 100644 index 000000000000..fe5953b51cfb --- /dev/null +++ b/azurerm/internal/location/supported_azure.go @@ -0,0 +1,94 @@ +package location + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/Azure/go-autorest/autorest/azure" +) + +type SupportedLocations struct { + // Locations is a list of Locations which are supported on this Azure Endpoint. + // This could be nil when the user is offline, or the Azure MetaData Service does not have this + // information and as such this should be used as best-effort, rather than guaranteed + Locations *[]string +} + +type cloudEndpoint struct { + Endpoint string `json:"endpoint"` + Locations *[]string `json:"locations"` +} + +type metaDataResponse struct { + CloudEndpoint map[string]cloudEndpoint `json:"cloudEndpoint"` +} + +// availableAzureLocations returns a list of the Azure Locations which are available on the specified endpoint +func availableAzureLocations(ctx context.Context, env *azure.Environment) (*SupportedLocations, error) { + // e.g. https://management.azure.com/ but we need management.azure.com + endpoint := strings.TrimPrefix(env.ResourceManagerEndpoint, "https://") + endpoint = strings.TrimSuffix(endpoint, "/") + + uri := fmt.Sprintf("https://%s//metadata/endpoints?api-version=2018-01-01", endpoint) + client := http.Client{ + Transport: &http.Transport{ + Proxy: http.ProxyFromEnvironment, + }, + } + req, err := http.NewRequestWithContext(ctx, "GET", uri, nil) + if err != nil { + return nil, err + } + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("retrieving supported locations from Azure MetaData service: %+v", err) + } + var out metaDataResponse + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return nil, fmt.Errorf("deserializing JSON from Azure MetaData service: %+v", err) + } + + var locations *[]string + for _, v := range out.CloudEndpoint { + // one of the endpoints on this endpoint should reference itself + // however this is best-effort, so if it doesn't, it's not the end of the world + if strings.EqualFold(v.Endpoint, endpoint) { + locations = v.Locations + } + } + + // TODO: remove this once Microsoft fixes the API + // the Azure API returns the india locations the wrong way around + // e.g. 'southindia' is returned as 'indiasouth' + // so we need to conditionally switch these out until Microsoft fixes the API + // $ az account list-locations -o table | grep india + // Central India centralindia (Asia Pacific) Central India + // South India southindia (Asia Pacific) South India + // West India westindia (Asia Pacific) West India + if env.Name == azure.PublicCloud.Name && locations != nil { + out := *locations + out = switchLocationIfExists("indiacentral", "centralindia", out) + out = switchLocationIfExists("indiasouth", "southindia", out) + out = switchLocationIfExists("indiawest", "westindia", out) + locations = &out + } + + return &SupportedLocations{ + Locations: locations, + }, nil +} + +func switchLocationIfExists(find, replace string, locations []string) []string { + out := locations + + for i, v := range out { + if v == find { + out[i] = replace + } + } + + return locations +} diff --git a/azurerm/internal/location/validation_test.go b/azurerm/internal/location/validation_test.go index f9fb1e36fab1..ab3e07267234 100644 --- a/azurerm/internal/location/validation_test.go +++ b/azurerm/internal/location/validation_test.go @@ -176,49 +176,51 @@ func TestEnhancedValidationEnabled(t *testing.T) { } } -var chinaLocations = []string{"chinaeast", "chinanorth", "chinanorth2", "chinaeast2"} -var publicLocations = []string{ - "westus", - "westus2", - "eastus", - "centralus", - "southcentralus", - "northcentralus", - "westcentralus", - "eastus2", - "brazilsouth", - "brazilus", - "northeurope", - "westeurope", - "eastasia", - "southeastasia", - "japanwest", - "japaneast", - "koreacentral", - "koreasouth", - "indiasouth", - "indiawest", - "indiacentral", - "australiaeast", - "australiasoutheast", - "canadacentral", - "canadaeast", - "uknorth", - "uksouth2", - "uksouth", - "ukwest", - "francecentral", - "francesouth", - "australiacentral", - "australiacentral2", - "uaecentral", - "uaenorth", - "southafricanorth", - "southafricawest", - "switzerlandnorth", - "switzerlandwest", - "germanynorth", - "germanywestcentral", - "norwayeast", - "norwaywest", -} +var ( + chinaLocations = []string{"chinaeast", "chinanorth", "chinanorth2", "chinaeast2"} + publicLocations = []string{ + "westus", + "westus2", + "eastus", + "centralus", + "southcentralus", + "northcentralus", + "westcentralus", + "eastus2", + "brazilsouth", + "brazilus", + "northeurope", + "westeurope", + "eastasia", + "southeastasia", + "japanwest", + "japaneast", + "koreacentral", + "koreasouth", + "indiasouth", + "indiawest", + "indiacentral", + "australiaeast", + "australiasoutheast", + "canadacentral", + "canadaeast", + "uknorth", + "uksouth2", + "uksouth", + "ukwest", + "francecentral", + "francesouth", + "australiacentral", + "australiacentral2", + "uaecentral", + "uaenorth", + "southafricanorth", + "southafricawest", + "switzerlandnorth", + "switzerlandwest", + "germanynorth", + "germanywestcentral", + "norwayeast", + "norwaywest", + } +) diff --git a/azurerm/internal/provider/features.go b/azurerm/internal/provider/features.go index 7987aae295c3..04d7a4ee1f1e 100644 --- a/azurerm/internal/provider/features.go +++ b/azurerm/internal/provider/features.go @@ -19,7 +19,6 @@ func schemaFeatures(supportLegacyTestSuite bool) *schema.Schema { Type: schema.TypeBool, Optional: true, }, - "purge_soft_delete_on_destroy": { Type: schema.TypeBool, Optional: true, @@ -64,7 +63,11 @@ func schemaFeatures(supportLegacyTestSuite bool) *schema.Schema { Schema: map[string]*schema.Schema{ "delete_os_disk_on_deletion": { Type: schema.TypeBool, - Required: true, + Optional: true, + }, + "graceful_shutdown": { + Type: schema.TypeBool, + Optional: true, }, }, }, @@ -124,6 +127,7 @@ func expandFeatures(input []interface{}) features.UserFeatures { }, VirtualMachine: features.VirtualMachineFeatures{ DeleteOSDiskOnDeletion: true, + GracefulShutdown: false, }, VirtualMachineScaleSet: features.VirtualMachineScaleSetFeatures{ RollInstancesWhenRequired: true, @@ -176,6 +180,9 @@ func expandFeatures(input []interface{}) features.UserFeatures { if v, ok := virtualMachinesRaw["delete_os_disk_on_deletion"]; ok { features.VirtualMachine.DeleteOSDiskOnDeletion = v.(bool) } + if v, ok := virtualMachinesRaw["graceful_shutdown"]; ok { + features.VirtualMachine.GracefulShutdown = v.(bool) + } } } diff --git a/azurerm/internal/provider/features_test.go b/azurerm/internal/provider/features_test.go index b5b8d320eb69..0cd7ee892d82 100644 --- a/azurerm/internal/provider/features_test.go +++ b/azurerm/internal/provider/features_test.go @@ -59,6 +59,7 @@ func TestExpandFeatures(t *testing.T) { "virtual_machine": []interface{}{ map[string]interface{}{ "delete_os_disk_on_deletion": true, + "graceful_shutdown": true, }, }, "virtual_machine_scale_set": []interface{}{ @@ -81,6 +82,7 @@ func TestExpandFeatures(t *testing.T) { }, VirtualMachine: features.VirtualMachineFeatures{ DeleteOSDiskOnDeletion: true, + GracefulShutdown: true, }, VirtualMachineScaleSet: features.VirtualMachineScaleSetFeatures{ RollInstancesWhenRequired: true, @@ -94,6 +96,7 @@ func TestExpandFeatures(t *testing.T) { "virtual_machine": []interface{}{ map[string]interface{}{ "delete_os_disk_on_deletion": false, + "graceful_shutdown": false, }, }, "network_locking": []interface{}{ @@ -132,6 +135,7 @@ func TestExpandFeatures(t *testing.T) { }, VirtualMachine: features.VirtualMachineFeatures{ DeleteOSDiskOnDeletion: false, + GracefulShutdown: false, }, VirtualMachineScaleSet: features.VirtualMachineScaleSetFeatures{ RollInstancesWhenRequired: false, @@ -366,16 +370,18 @@ func TestExpandFeaturesVirtualMachine(t *testing.T) { Expected: features.UserFeatures{ VirtualMachine: features.VirtualMachineFeatures{ DeleteOSDiskOnDeletion: true, + GracefulShutdown: false, }, }, }, { - Name: "Delete OS Disk Enabled", + Name: "Delete OS Disk and Graceful Shutdown Enabled", Input: []interface{}{ map[string]interface{}{ "virtual_machine": []interface{}{ map[string]interface{}{ "delete_os_disk_on_deletion": true, + "graceful_shutdown": true, }, }, }, @@ -383,16 +389,18 @@ func TestExpandFeaturesVirtualMachine(t *testing.T) { Expected: features.UserFeatures{ VirtualMachine: features.VirtualMachineFeatures{ DeleteOSDiskOnDeletion: true, + GracefulShutdown: true, }, }, }, { - Name: "Delete OS Disk Disabled", + Name: "Delete OS Disk and Graceful Shutdown Disabled", Input: []interface{}{ map[string]interface{}{ "virtual_machine": []interface{}{ map[string]interface{}{ "delete_os_disk_on_deletion": false, + "graceful_shutdown": false, }, }, }, @@ -400,6 +408,7 @@ func TestExpandFeaturesVirtualMachine(t *testing.T) { Expected: features.UserFeatures{ VirtualMachine: features.VirtualMachineFeatures{ DeleteOSDiskOnDeletion: false, + GracefulShutdown: false, }, }, }, diff --git a/azurerm/internal/provider/provider.go b/azurerm/internal/provider/provider.go index aeb45c8a7932..923196d5bdea 100644 --- a/azurerm/internal/provider/provider.go +++ b/azurerm/internal/provider/provider.go @@ -12,6 +12,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceproviders" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) @@ -25,7 +26,7 @@ func TestAzureProvider() terraform.ResourceProvider { func azureProvider(supportLegacyTestSuite bool) terraform.ResourceProvider { // avoids this showing up in test output - var debugLog = func(f string, v ...interface{}) { + debugLog := func(f string, v ...interface{}) { if os.Getenv("TF_LOG") == "" { return } @@ -39,7 +40,43 @@ func azureProvider(supportLegacyTestSuite bool) terraform.ResourceProvider { dataSources := make(map[string]*schema.Resource) resources := make(map[string]*schema.Resource) - for _, service := range SupportedServices() { + + // first handle the typed services + for _, service := range SupportedTypedServices() { + debugLog("[DEBUG] Registering Data Sources for %q..", service.Name()) + for _, ds := range service.DataSources() { + key := ds.ResourceType() + if existing := dataSources[key]; existing != nil { + panic(fmt.Sprintf("An existing Data Source exists for %q", key)) + } + + wrapper := sdk.NewDataSourceWrapper(ds) + dataSource, err := wrapper.DataSource() + if err != nil { + panic(fmt.Errorf("creating Wrapper for Data Source %q: %+v", key, err)) + } + + dataSources[key] = dataSource + } + + debugLog("[DEBUG] Registering Resources for %q..", service.Name()) + for _, r := range service.Resources() { + key := r.ResourceType() + if existing := resources[key]; existing != nil { + panic(fmt.Sprintf("An existing Resource exists for %q", key)) + } + + wrapper := sdk.NewResourceWrapper(r) + resource, err := wrapper.Resource() + if err != nil { + panic(fmt.Errorf("creating Wrapper for Resource %q: %+v", key, err)) + } + resources[key] = resource + } + } + + // then handle the untyped services + for _, service := range SupportedUntypedServices() { debugLog("[DEBUG] Registering Data Sources for %q..", service.Name()) for k, v := range service.SupportedDataSources() { if existing := dataSources[k]; existing != nil { @@ -236,7 +273,7 @@ func providerConfigure(p *schema.Provider) schema.ConfigureFunc { TenantID: d.Get("tenant_id").(string), AuxiliaryTenantIDs: auxTenants, Environment: d.Get("environment").(string), - MetadataURL: metadataHost, // TODO: rename this in Helpers too + MetadataHost: metadataHost, MsiEndpoint: d.Get("msi_endpoint").(string), ClientCertPassword: d.Get("client_certificate_password").(string), ClientCertPath: d.Get("client_certificate_path").(string), @@ -249,7 +286,7 @@ func providerConfigure(p *schema.Provider) schema.ConfigureFunc { SupportsAuxiliaryTenants: len(auxTenants) > 0, // Doc Links - ClientSecretDocsLink: "https://www.terraform.io/docs/providers/azurerm/guides/service_principal_client_secret.html", + ClientSecretDocsLink: "https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/guides/service_principal_client_secret", } config, err := builder.Build() diff --git a/azurerm/internal/provider/provider_test.go b/azurerm/internal/provider/provider_test.go index 6342a3be21fb..966777128da0 100644 --- a/azurerm/internal/provider/provider_test.go +++ b/azurerm/internal/provider/provider_test.go @@ -87,5 +87,5 @@ func TestResourcesSupportCustomTimeouts(t *testing.T) { } func TestProvider_impl(t *testing.T) { - var _ = AzureProvider() + _ = AzureProvider() } diff --git a/azurerm/internal/provider/services.go b/azurerm/internal/provider/services.go index 5d7d816f8075..e6163f338390 100644 --- a/azurerm/internal/provider/services.go +++ b/azurerm/internal/provider/services.go @@ -1,21 +1,21 @@ package provider import ( + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/advisor" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/analysisservices" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appplatform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/attestation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/authorization" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/automation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/azurestackhci" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/blueprints" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/bot" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cognitive" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/common" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/containers" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos" @@ -29,9 +29,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/healthcare" @@ -43,6 +45,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/keyvault" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/logic" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/machinelearning" @@ -76,6 +79,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/servicefabric" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/servicefabricmesh" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/signalr" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/springcloud" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/sql" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/streamanalytics" @@ -87,17 +91,25 @@ import ( //go:generate go run ../tools/generator-services/main.go -path=../../../ -func SupportedServices() []common.ServiceRegistration { - return []common.ServiceRegistration{ +func SupportedTypedServices() []sdk.TypedServiceRegistration { + return []sdk.TypedServiceRegistration{ + eventhub.Registration{}, + resource.Registration{}, + } +} + +func SupportedUntypedServices() []sdk.UntypedServiceRegistration { + return []sdk.UntypedServiceRegistration{ advisor.Registration{}, analysisservices.Registration{}, apimanagement.Registration{}, appconfiguration.Registration{}, - appplatform.Registration{}, + springcloud.Registration{}, applicationinsights.Registration{}, attestation.Registration{}, authorization.Registration{}, automation.Registration{}, + azurestackhci.Registration{}, batch.Registration{}, blueprints.Registration{}, bot.Registration{}, @@ -116,9 +128,11 @@ func SupportedServices() []common.ServiceRegistration { desktopvirtualization.Registration{}, devspace.Registration{}, devtestlabs.Registration{}, + digitaltwins.Registration{}, dns.Registration{}, eventgrid.Registration{}, eventhub.Registration{}, + firewall.Registration{}, frontdoor.Registration{}, hpccache.Registration{}, hsm.Registration{}, @@ -128,6 +142,7 @@ func SupportedServices() []common.ServiceRegistration { iotcentral.Registration{}, keyvault.Registration{}, kusto.Registration{}, + loadbalancer.Registration{}, loganalytics.Registration{}, logic.Registration{}, machinelearning.Registration{}, diff --git a/azurerm/internal/provider/services_test.go b/azurerm/internal/provider/services_test.go new file mode 100644 index 000000000000..3ff4d8e8be55 --- /dev/null +++ b/azurerm/internal/provider/services_test.go @@ -0,0 +1,33 @@ +package provider + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" +) + +func TestTypedDataSourcesContainValidModelObjects(t *testing.T) { + for _, service := range SupportedTypedServices() { + t.Logf("Service %q..", service.Name()) + for _, resource := range service.DataSources() { + t.Logf("- DataSources %q..", resource.ResourceType()) + obj := resource.ModelObject() + if err := sdk.ValidateModelObject(&obj); err != nil { + t.Fatalf("validating model: %+v", err) + } + } + } +} + +func TestTypedResourcesContainValidModelObjects(t *testing.T) { + for _, service := range SupportedTypedServices() { + t.Logf("Service %q..", service.Name()) + for _, resource := range service.Resources() { + t.Logf("- Resource %q..", resource.ResourceType()) + obj := resource.ModelObject() + if err := sdk.ValidateModelObject(&obj); err != nil { + t.Fatalf("validating model: %+v", err) + } + } + } +} diff --git a/azurerm/internal/resourceid/interface.go b/azurerm/internal/resourceid/interface.go index 4f6a793f6a5e..4bde7d7863b8 100644 --- a/azurerm/internal/resourceid/interface.go +++ b/azurerm/internal/resourceid/interface.go @@ -1,5 +1,5 @@ package resourceid type Formatter interface { - ID(subscriptionId string) string + ID() string } diff --git a/azurerm/internal/resourceproviders/azure.go b/azurerm/internal/resourceproviders/azure.go new file mode 100644 index 000000000000..9f4584901001 --- /dev/null +++ b/azurerm/internal/resourceproviders/azure.go @@ -0,0 +1,28 @@ +package resourceproviders + +import ( + "context" + "fmt" + + "github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/resources/mgmt/resources" +) + +func availableResourceProviders(ctx context.Context, client *resources.ProvidersClient) (*[]string, error) { + providerNames := make([]string, 0) + providers, err := client.ListComplete(ctx, nil, "") + if err != nil { + return nil, fmt.Errorf("listing Resource Providers: %+v", err) + } + for providers.NotDone() { + provider := providers.Value() + if provider.Namespace != nil { + providerNames = append(providerNames, *provider.Namespace) + } + + if err := providers.NextWithContext(ctx); err != nil { + return nil, err + } + } + + return &providerNames, nil +} diff --git a/azurerm/internal/resourceproviders/cache.go b/azurerm/internal/resourceproviders/cache.go new file mode 100644 index 000000000000..37088100d1a8 --- /dev/null +++ b/azurerm/internal/resourceproviders/cache.go @@ -0,0 +1,23 @@ +package resourceproviders + +import ( + "context" + "log" + + "github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/resources/mgmt/resources" +) + +// cachedResourceProviders can be (validly) nil - as such this shouldn't be relied on +var cachedResourceProviders *[]string + +// CacheSupportedProviders attempts to retrieve the supported Resource Providers from the Resource Manager API +// and caches them, for used in enhanced validation +func CacheSupportedProviders(ctx context.Context, client *resources.ProvidersClient) { + providers, err := availableResourceProviders(ctx, client) + if err != nil { + log.Printf("[DEBUG] error retrieving providers: %s. Enhanced validation will be unavailable", err) + return + } + + cachedResourceProviders = providers +} diff --git a/azurerm/internal/resourceproviders/validation.go b/azurerm/internal/resourceproviders/validation.go new file mode 100644 index 000000000000..822761636cbb --- /dev/null +++ b/azurerm/internal/resourceproviders/validation.go @@ -0,0 +1,57 @@ +package resourceproviders + +import ( + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" +) + +// this is only here to aid testing +var enhancedEnabled = features.EnhancedValidationEnabled() + +// EnhancedValidate returns a validation function which attempts to validate the Resource Provider +// against the list of Resource Provider supported by this Azure Environment. +// +// NOTE: this is best-effort - if the users offline, or the API doesn't return it we'll +// fall back to the original approach +func EnhancedValidate(i interface{}, k string) ([]string, []error) { + if !enhancedEnabled || cachedResourceProviders == nil { + return validation.StringIsNotEmpty(i, k) + } + + return enhancedValidation(i, k) +} + +func enhancedValidation(i interface{}, k string) ([]string, []error) { + v, ok := i.(string) + if !ok { + return nil, []error{fmt.Errorf("expected type of %q to be string", k)} + } + + if v == "" { + return nil, []error{fmt.Errorf("%q must not be empty", k)} + } + + // enhanced validation is unavailable, but we're in this method.. + if cachedResourceProviders == nil { + return nil, nil + } + + found := false + for _, provider := range *cachedResourceProviders { + if provider == v { + found = true + } + } + + if !found { + providersJoined := strings.Join(*cachedResourceProviders, ", ") + return nil, []error{ + fmt.Errorf("%q was not found in the list of supported Resource Providers: %q", v, providersJoined), + } + } + + return nil, nil +} diff --git a/azurerm/internal/resourceproviders/validation_test.go b/azurerm/internal/resourceproviders/validation_test.go new file mode 100644 index 000000000000..de1e5cc10e09 --- /dev/null +++ b/azurerm/internal/resourceproviders/validation_test.go @@ -0,0 +1,86 @@ +package resourceproviders + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" +) + +func TestEnhancedValidationDisabled(t *testing.T) { + testCases := []struct { + input string + valid bool + }{ + { + input: "", + valid: false, + }, + { + input: "micr0soft", + valid: true, + }, + { + input: "microsoft.compute", + valid: true, + }, + { + input: "Microsoft.Compute", + valid: true, + }, + } + enhancedEnabled = false + defer func() { + enhancedEnabled = features.EnhancedValidationEnabled() + cachedResourceProviders = nil + }() + + for _, testCase := range testCases { + t.Logf("Testing %q..", testCase.input) + + warnings, errors := EnhancedValidate(testCase.input, "name") + valid := len(warnings) == 0 && len(errors) == 0 + if testCase.valid != valid { + t.Errorf("Expected %t but got %t", testCase.valid, valid) + } + } +} + +func TestEnhancedValidationEnabled(t *testing.T) { + testCases := []struct { + input string + valid bool + }{ + { + input: "", + valid: false, + }, + { + input: "micr0soft", + valid: false, + }, + { + input: "microsoft.compute", + valid: false, + }, + { + input: "Microsoft.Compute", + valid: true, + }, + } + enhancedEnabled = true + cachedResourceProviders = &[]string{"Microsoft.Compute"} + defer func() { + enhancedEnabled = features.EnhancedValidationEnabled() + cachedResourceProviders = nil + }() + + for _, testCase := range testCases { + t.Logf("Testing %q..", testCase.input) + + warnings, errors := EnhancedValidate(testCase.input, "name") + valid := len(warnings) == 0 && len(errors) == 0 + if testCase.valid != valid { + t.Errorf("Expected %t but got %t", testCase.valid, valid) + } + } +} diff --git a/azurerm/internal/sdk/README.md b/azurerm/internal/sdk/README.md new file mode 100644 index 000000000000..5ba45011c402 --- /dev/null +++ b/azurerm/internal/sdk/README.md @@ -0,0 +1,216 @@ +## SDK for Strongly-Typed Resources + +This package is a prototype for creating strongly-typed Data Sources and Resources - and in future will likely form the foundation for Terraform Data Sources and Resources in this Provider going forward. + +## Should I use this package to build resources? + +Not at this time - please use Terraform's Plugin SDK instead - reference examples can be found in `./azurerm/internal/services/notificationhub`. + +More documentation for this package will ship in the future when this is ready for general use. + +--- + +## What's the long-term intention for this package? + +Each Service Package contains the following: + +* Client - giving reference to the SDK Client which should be used to interact with Azure +* ID Parsers, Formatters and a Validator - giving a canonical ID for each Resource +* Validation functions specific to this service package, for example for the Name + +This package can be used to tie these together in a more strongly typed fashion, for example: + +``` +package example + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2020-06-01/resources" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ResourceGroup struct { + Name string `tfschema:"name"` + Location string `tfschema:"location"` + Tags map[string]string `tfschema:"tags"` +} + +type ResourceGroupResource struct { +} + +func (r ResourceGroupResource) Arguments() map[string]*schema.Schema { + return map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "location": location.Schema(), + + "tags": tags.Schema(), + } +} + +func (r ResourceGroupResource) Attributes() map[string]*schema.Schema { + return map[string]*schema.Schema{} +} + +func (r ResourceGroupResource) ResourceType() string { + return "azurerm_example" +} + +func (r ResourceGroupResource) Create() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + metadata.Logger.Info("Decoding state..") + var state ResourceGroup + if err := metadata.Decode(&state); err != nil { + return err + } + + metadata.Logger.Infof("creating Resource Group %q..", state.Name) + client := metadata.Client.Resource.GroupsClient + subscriptionId := metadata.Client.Account.SubscriptionId + + id := parse.NewResourceGroupID(subscriptionId, state.Name) + existing, err := client.Get(ctx, state.Name) + if err != nil && !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for the presence of an existing Resource Group %q: %+v", state.Name, err) + } + if !utils.ResponseWasNotFound(existing.Response) { + return metadata.ResourceRequiresImport(r.ResourceType(), id) + } + + input := resources.Group{ + Location: utils.String(state.Location), + Tags: tags.FromTypedObject(state.Tags), + } + if _, err := client.CreateOrUpdate(ctx, state.Name, input); err != nil { + return fmt.Errorf("creating Resource Group %q: %+v", state.Name, err) + } + + metadata.SetID(id) + return nil + }, + Timeout: 30 * time.Minute, + } +} + +func (r ResourceGroupResource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.Resource.GroupsClient + id, err := parse.ResourceGroupID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + metadata.Logger.Infof("retrieving Resource Group %q..", id.Name) + group, err := client.Get(ctx, id.Name) + if err != nil { + if utils.ResponseWasNotFound(group.Response) { + metadata.Logger.Infof("Resource Group %q was not found - removing from state!", id.Name) + return metadata.MarkAsGone() + } + + return fmt.Errorf("retrieving Resource Group %q: %+v", id.Name, err) + } + + return metadata.Encode(&ResourceGroup{ + Name: id.Name, + Location: location.NormalizeNilable(group.Location), + Tags: tags.ToTypedObject(group.Tags), + }) + }, + Timeout: 5 * time.Minute, + } +} + +func (r ResourceGroupResource) Update() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + id, err := parse.ResourceGroupID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + metadata.Logger.Info("Decoding state..") + var state ResourceGroup + if err := metadata.Decode(&state); err != nil { + return err + } + + metadata.Logger.Infof("updating Resource Group %q..", id.Name) + client := metadata.Client.Resource.GroupsClient + + input := resources.GroupPatchable{ + Tags: tags.FromTypedObject(state.Tags), + } + + if _, err := client.Update(ctx, id.Name, input); err != nil { + return fmt.Errorf("updating Resource Group %q: %+v", id.Name, err) + } + + return nil + }, + Timeout: 30 * time.Minute, + } +} + +func (r ResourceGroupResource) Delete() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.Resource.GroupsClient + id, err := parse.ResourceGroupID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + metadata.Logger.Infof("deleting Resource Group %q..", id.Name) + future, err := client.Delete(ctx, id.Name) + if err != nil { + if response.WasNotFound(future.Response()) { + return metadata.MarkAsGone() + } + + return fmt.Errorf("deleting Resource Group %q: %+v", id.Name, err) + } + + metadata.Logger.Infof("waiting for the deletion of Resource Group %q..", id.Name) + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of Resource Group %q: %+v", id.Name, err) + } + + return nil + }, + Timeout: 30 * time.Minute, + } +} + +func (r ResourceGroupResource) IDValidationFunc() schema.SchemaValidateFunc { + return validate.ResourceGroupID +} + +func (r ResourceGroupResource) ModelObject() interface{} { + return ResourceGroup{} +} +``` + +The end result being the removal of a lot of common bugs by moving to a convention - for example: + +* The Context object passed into each method _always_ has a deadline/timeout attached to it +* The Read function is automatically called at the end of a Create and Update function - meaning users don't have to do this +* Each Resource has to have an ID Formatter and Validation Function +* The Model Object is validated via unit tests to ensure it contains the relevant struct tags (TODO: also confirming these exist in the state and are of the correct type, so no Set errors occur) + +Ultimately this allows bugs to be caught by the Compiler (for example if a Read function is unimplemented) - or Unit Tests (for example should the `tfschema` struct tags be missing) - rather than during Provider Initialization, which reduces the feedback loop. diff --git a/azurerm/internal/sdk/locations.go b/azurerm/internal/sdk/locations.go deleted file mode 100644 index 6e173e9d3c9e..000000000000 --- a/azurerm/internal/sdk/locations.go +++ /dev/null @@ -1,94 +0,0 @@ -package sdk - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "strings" - - "github.com/Azure/go-autorest/autorest/azure" -) - -type SupportedLocations struct { - // Locations is a list of Locations which are supported on this Azure Endpoint. - // This could be nil when the user is offline, or the Azure MetaData Service does not have this - // information and as such this should be used as best-effort, rather than guaranteed - Locations *[]string -} - -type cloudEndpoint struct { - Endpoint string `json:"endpoint"` - Locations *[]string `json:"locations"` -} - -type metaDataResponse struct { - CloudEndpoint map[string]cloudEndpoint `json:"cloudEndpoint"` -} - -// AvailableAzureLocations returns a list of the Azure Locations which are available on the specified endpoint -func AvailableAzureLocations(ctx context.Context, env *azure.Environment) (*SupportedLocations, error) { - // e.g. https://management.azure.com/ but we need management.azure.com - endpoint := strings.TrimPrefix(env.ResourceManagerEndpoint, "https://") - endpoint = strings.TrimSuffix(endpoint, "/") - - uri := fmt.Sprintf("https://%s//metadata/endpoints?api-version=2018-01-01", endpoint) - client := http.Client{ - Transport: &http.Transport{ - Proxy: http.ProxyFromEnvironment, - }, - } - req, err := http.NewRequestWithContext(ctx, "GET", uri, nil) - if err != nil { - return nil, err - } - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("retrieving supported locations from Azure MetaData service: %+v", err) - } - var out metaDataResponse - if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { - return nil, fmt.Errorf("deserializing JSON from Azure MetaData service: %+v", err) - } - - var locations *[]string - for _, v := range out.CloudEndpoint { - // one of the endpoints on this endpoint should reference itself - // however this is best-effort, so if it doesn't, it's not the end of the world - if strings.EqualFold(v.Endpoint, endpoint) { - locations = v.Locations - } - } - - // TODO: remove this once Microsoft fixes the API - // the Azure API returns the india locations the wrong way around - // e.g. 'southindia' is returned as 'indiasouth' - // so we need to conditionally switch these out until Microsoft fixes the API - // $ az account list-locations -o table | grep india - // Central India centralindia (Asia Pacific) Central India - // South India southindia (Asia Pacific) South India - // West India westindia (Asia Pacific) West India - if env.Name == azure.PublicCloud.Name && locations != nil { - out := *locations - out = switchLocationIfExists("indiacentral", "centralindia", out) - out = switchLocationIfExists("indiasouth", "southindia", out) - out = switchLocationIfExists("indiawest", "westindia", out) - locations = &out - } - - return &SupportedLocations{ - Locations: locations, - }, nil -} - -func switchLocationIfExists(find, replace string, locations []string) []string { - out := locations - - for i, v := range out { - if v == find { - out[i] = replace - } - } - - return locations -} diff --git a/azurerm/internal/sdk/logger.go b/azurerm/internal/sdk/logger.go new file mode 100644 index 000000000000..40221ac10f3b --- /dev/null +++ b/azurerm/internal/sdk/logger.go @@ -0,0 +1,18 @@ +package sdk + +// Logger is an interface for switching out the Logger implementation +type Logger interface { + // Info prints out a message prefixed with `[INFO]` verbatim + Info(message string) + + // Infof prints out a message prefixed with `[INFO]` formatted + // with the specified arguments + Infof(format string, args ...interface{}) + + // Warn prints out a message prefixed with `[WARN]` formatted verbatim + Warn(message string) + + // Warnf prints out a message prefixed with `[WARN]` formatted + // with the specified arguments + Warnf(format string, args ...interface{}) +} diff --git a/azurerm/internal/sdk/logger_console.go b/azurerm/internal/sdk/logger_console.go new file mode 100644 index 000000000000..001665131493 --- /dev/null +++ b/azurerm/internal/sdk/logger_console.go @@ -0,0 +1,33 @@ +package sdk + +import ( + "fmt" + "log" +) + +// ConsoleLogger provides a Logger implementation which writes the log messages +// to StdOut - in Terraform's perspective that's proxied via the Plugin SDK +type ConsoleLogger struct { +} + +// Info prints out a message prefixed with `[INFO]` verbatim +func (ConsoleLogger) Info(message string) { + log.Print(fmt.Sprintf("[INFO] %s", message)) +} + +// Infof prints out a message prefixed with `[INFO]` formatted +// with the specified arguments +func (l ConsoleLogger) Infof(format string, args ...interface{}) { + l.Info(fmt.Sprintf(format, args...)) +} + +// Warn prints out a message prefixed with `[WARN]` formatted verbatim +func (l ConsoleLogger) Warn(message string) { + log.Print(fmt.Sprintf("[WARN] %s", message)) +} + +// Warnf prints out a message prefixed with `[WARN]` formatted +// with the specified arguments +func (l ConsoleLogger) Warnf(format string, args ...interface{}) { + l.Warn(fmt.Sprintf(format, args...)) +} diff --git a/azurerm/internal/sdk/logger_null.go b/azurerm/internal/sdk/logger_null.go new file mode 100644 index 000000000000..edb8dcb3ba5c --- /dev/null +++ b/azurerm/internal/sdk/logger_null.go @@ -0,0 +1,25 @@ +package sdk + +// NullLogger disregards the log output - and is intended to be used +// when the contents of the debug logger aren't interesting +// to reduce console output +type NullLogger struct { +} + +// Info prints out a message prefixed with `[INFO]` verbatim +func (NullLogger) Info(_ string) { +} + +// Infof prints out a message prefixed with `[INFO]` formatted +// with the specified arguments +func (NullLogger) Infof(_ string, _ ...interface{}) { +} + +// Warn prints out a message prefixed with `[WARN]` formatted verbatim +func (NullLogger) Warn(_ string) { +} + +// Warnf prints out a message prefixed with `[WARN]` formatted +// with the specified arguments +func (NullLogger) Warnf(_ string, _ ...interface{}) { +} diff --git a/azurerm/internal/sdk/plugin_sdk_test.go b/azurerm/internal/sdk/plugin_sdk_test.go new file mode 100644 index 000000000000..7627b2b7ee02 --- /dev/null +++ b/azurerm/internal/sdk/plugin_sdk_test.go @@ -0,0 +1,1013 @@ +package sdk + +import ( + "fmt" + "os" + "reflect" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/terraform" +) + +func TestAccPluginSDKAndDecoder(t *testing.T) { + os.Setenv("TF_ACC", "1") + + type NestedType struct { + Key string `tfschema:"key"` + } + type MyType struct { + Hello string `tfschema:"hello"` + RandomNumber int `tfschema:"random_number"` + Enabled bool `tfschema:"enabled"` + ListOfStrings []string `tfschema:"list_of_strings"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfBools []bool `tfschema:"list_of_bools"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + NestedObject []NestedType `tfschema:"nested_object"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfFloats map[string]float64 `tfschema:"map_of_floats"` + // Sets are handled in a separate test, since the orders can be different + } + + expected := MyType{ + Hello: "world", + RandomNumber: 42, + Enabled: true, + ListOfStrings: []string{"hello", "there"}, + ListOfNumbers: []int{1, 2, 4}, + ListOfBools: []bool{true, false}, + ListOfFloats: []float64{-1.234567894321, 2.3456789}, + NestedObject: []NestedType{ + { + Key: "value", + }, + }, + MapOfStrings: map[string]string{ + "bingo": "bango", + }, + MapOfNumbers: map[string]int{ + "lucky": 21, + }, + MapOfBools: map[string]bool{ + "friday": true, + }, + MapOfFloats: map[string]float64{ + "pi": 3.14159, + }, + } + + // lintignore:AT001 + resource.ParallelTest(t, resource.TestCase{ + ProviderFactories: map[string]terraform.ResourceProviderFactory{ + "validator": func() (terraform.ResourceProvider, error) { + return &schema.Provider{ + DataSourcesMap: map[string]*schema.Resource{}, + ResourcesMap: map[string]*schema.Resource{ + "validator_decoder": { + Schema: map[string]*schema.Schema{ + "hello": { + Type: schema.TypeString, + Computed: true, + }, + "random_number": { + Type: schema.TypeInt, + Computed: true, + }, + "enabled": { + Type: schema.TypeBool, + Computed: true, + }, + "list_of_strings": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "list_of_numbers": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "list_of_bools": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "list_of_floats": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + "nested_object": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + "map_of_strings": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "map_of_numbers": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "map_of_bools": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "map_of_floats": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + }, + Create: func(d *schema.ResourceData, i interface{}) error { + d.SetId("some-id") + d.Set("hello", "world") + d.Set("random_number", 42) + d.Set("enabled", true) + d.Set("list_of_strings", []string{"hello", "there"}) + d.Set("list_of_numbers", []int{1, 2, 4}) + d.Set("list_of_bools", []bool{true, false}) + d.Set("list_of_floats", []float64{-1.234567894321, 2.3456789}) + d.Set("nested_object", []interface{}{ + map[string]interface{}{ + "key": "value", + }, + }) + d.Set("map_of_strings", map[string]string{ + "bingo": "bango", + }) + d.Set("map_of_numbers", map[string]int{ + "lucky": 21, + }) + d.Set("map_of_bools", map[string]bool{ + "friday": true, + }) + d.Set("map_of_floats", map[string]float64{ + "pi": 3.14159, + }) + return nil + }, + Read: func(d *schema.ResourceData, _ interface{}) error { + wrapper := ResourceMetaData{ + ResourceData: d, + Logger: ConsoleLogger{}, + serializationDebugLogger: ConsoleLogger{}, + } + + var actual MyType + if err := wrapper.Decode(&actual); err != nil { + return fmt.Errorf("decoding: %+v", err) + } + + if !reflect.DeepEqual(actual, expected) { + return fmt.Errorf("Values did not match - Expected:\n%+v\n\nActual:\n%+v", expected, actual) + } + + return nil + }, + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + }, + }, + }, nil + }, + }, + Steps: []resource.TestStep{ + { + Config: `resource "validator_decoder" "test" {}`, + }, + }, + }) +} + +func TestAccPluginSDKAndDecoderOptionalComputed(t *testing.T) { + os.Setenv("TF_ACC", "1") + + type MyType struct { + Hello string `tfschema:"hello"` + Number int `tfschema:"number"` + Enabled bool `tfschema:"enabled"` + // TODO: do we need other field types, or is this sufficient? + } + + var commonSchema = map[string]*schema.Schema{ + "hello": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "number": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + "enabled": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + }, + } + var readFunc = func(expected MyType) func(*schema.ResourceData, interface{}) error { + return func(d *schema.ResourceData, _ interface{}) error { + wrapper := ResourceMetaData{ + ResourceData: d, + Logger: ConsoleLogger{}, + serializationDebugLogger: ConsoleLogger{}, + } + + var actual MyType + if err := wrapper.Decode(&actual); err != nil { + return fmt.Errorf("decoding: %+v", err) + } + + if !reflect.DeepEqual(actual, expected) { + return fmt.Errorf("Values did not match - Expected:\n%+v\n\nActual:\n%+v", expected, actual) + } + + return nil + } + } + + // lintignore:AT001 + resource.ParallelTest(t, resource.TestCase{ + ProviderFactories: map[string]terraform.ResourceProviderFactory{ + "validator": func() (terraform.ResourceProvider, error) { + return &schema.Provider{ + DataSourcesMap: map[string]*schema.Resource{}, + ResourcesMap: map[string]*schema.Resource{ + "validator_decoder_specified": { + Schema: commonSchema, + Create: func(d *schema.ResourceData, i interface{}) error { + d.SetId("some-id") + return nil + }, + Read: readFunc(MyType{ // expected + Hello: "value-from-config", + Number: 21, + Enabled: true, + }), + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + }, + + "validator_decoder_unspecified": { + Schema: commonSchema, + Create: func(d *schema.ResourceData, i interface{}) error { + d.SetId("some-id") + d.Set("hello", "value-from-create") + d.Set("number", 42) + d.Set("enabled", false) + return nil + }, + Read: readFunc(MyType{ // expected + Hello: "value-from-create", + Number: 42, + Enabled: false, + }), + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + }, + }, + }, nil + }, + }, + Steps: []resource.TestStep{ + { + Config: ` +resource "validator_decoder_specified" "test" { + hello = "value-from-config" + number = 21 + enabled = true +} +resource "validator_decoder_unspecified" "test" {} +`, + Check: resource.ComposeTestCheckFunc( + testCheckResourceStateMatches("validator_decoder_specified.test", map[string]interface{}{ + "id": "some-id", + "enabled": "true", + "hello": "value-from-config", + "number": "21", + }), + testCheckResourceStateMatches("validator_decoder_unspecified.test", map[string]interface{}{ + "id": "some-id", + "enabled": "false", + "hello": "value-from-create", + "number": "42", + }), + ), + }, + }, + }) +} + +func TestAccPluginSDKAndDecoderOptionalComputedOverride(t *testing.T) { + os.Setenv("TF_ACC", "1") + + type MyType struct { + Hello string `tfschema:"hello"` + Number int `tfschema:"number"` + Enabled bool `tfschema:"enabled"` + // TODO: do we need other field types, or is this sufficient? + } + + // lintignore:AT001 + resource.ParallelTest(t, resource.TestCase{ + ProviderFactories: map[string]terraform.ResourceProviderFactory{ + "validator": func() (terraform.ResourceProvider, error) { + return &schema.Provider{ + DataSourcesMap: map[string]*schema.Resource{}, + ResourcesMap: map[string]*schema.Resource{ + "validator_decoder_override": { + Schema: map[string]*schema.Schema{ + "hello": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "number": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + "enabled": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + }, + }, + Create: func(d *schema.ResourceData, i interface{}) error { + d.SetId("some-id") + d.Set("hello", "value-from-create") + d.Set("number", 42) + d.Set("enabled", false) + return nil + }, + Read: func(d *schema.ResourceData, _ interface{}) error { + wrapper := ResourceMetaData{ + ResourceData: d, + Logger: ConsoleLogger{}, + serializationDebugLogger: ConsoleLogger{}, + } + + var actual MyType + if err := wrapper.Decode(&actual); err != nil { + return fmt.Errorf("decoding: %+v", err) + } + + expected := MyType{ + Hello: "value-from-create", + Number: 42, + Enabled: false, + } + + if !reflect.DeepEqual(actual, expected) { + return fmt.Errorf("Values did not match - Expected:\n%+v\n\nActual:\n%+v", expected, actual) + } + + return nil + }, + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + }, + }, + }, nil + }, + }, + Steps: []resource.TestStep{ + { + // Apply it + Config: ` +resource "validator_decoder_override" "test" { + hello = "" +} +`, + }, + { + // Then run a plan, to detect that the default value of an empty string is picked up during the Decode + Config: ` +resource "validator_decoder_override" "test" { + hello = "" +} +`, + Check: resource.ComposeTestCheckFunc( + testCheckResourceStateMatches("validator_decoder_override.test", map[string]interface{}{ + "id": "some-id", + "enabled": "false", + "hello": "", + "number": "42", + }), + ), + PlanOnly: true, + }, + }, + }) +} + +func TestAccPluginSDKAndDecoderSets(t *testing.T) { + os.Setenv("TF_ACC", "1") + + type MyType struct { + SetOfStrings []string `tfschema:"set_of_strings"` + SetOfNumbers []int `tfschema:"set_of_numbers"` + SetOfBools []bool `tfschema:"set_of_bools"` + SetOfFloats []float64 `tfschema:"set_of_floats"` + // we could arguably extend this with nested Sets, but they're tested in the Decode function + // so we should be covered via this test alone + } + + // lintignore:AT001 + resource.ParallelTest(t, resource.TestCase{ + ProviderFactories: map[string]terraform.ResourceProviderFactory{ + "validator": func() (terraform.ResourceProvider, error) { + return &schema.Provider{ + DataSourcesMap: map[string]*schema.Resource{}, + ResourcesMap: map[string]*schema.Resource{ + "validator_decoder": { + Schema: map[string]*schema.Schema{ + "set_of_strings": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "set_of_numbers": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "set_of_bools": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "set_of_floats": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + }, + Create: func(d *schema.ResourceData, i interface{}) error { + d.SetId("some-id") + d.Set("set_of_strings", []string{ + "some", + "value", + }) + d.Set("set_of_numbers", []int{ + 1, + 2, + }) + d.Set("set_of_bools", []bool{ + true, + false, + }) + d.Set("set_of_floats", []float64{ + 1.1, + 2.2, + }) + return nil + }, + Read: func(d *schema.ResourceData, _ interface{}) error { + wrapper := ResourceMetaData{ + ResourceData: d, + Logger: ConsoleLogger{}, + serializationDebugLogger: ConsoleLogger{}, + } + + var actual MyType + if err := wrapper.Decode(&actual); err != nil { + return fmt.Errorf("decoding: %+v", err) + } + + expectedStrings := []string{ + "some", + "value", + } + if len(actual.SetOfStrings) != len(expectedStrings) { + return fmt.Errorf("expected %d strings but got %d", len(expectedStrings), len(actual.SetOfStrings)) + } + for _, v := range expectedStrings { + exists := false + for _, a := range actual.SetOfStrings { + if v == a { + exists = true + break + } + } + if !exists { + return fmt.Errorf("expected the string %q to exist but it didn't", v) + } + } + + expectedNumbers := []int{ + 1, + 2, + } + if len(actual.SetOfNumbers) != len(expectedNumbers) { + return fmt.Errorf("expected %d ints but got %d", len(expectedNumbers), len(actual.SetOfNumbers)) + } + for _, v := range expectedNumbers { + exists := false + for _, a := range actual.SetOfNumbers { + if v == a { + exists = true + break + } + } + if !exists { + return fmt.Errorf("expected the number %d to exist but it didn't", v) + } + } + + expectedBools := []bool{ + true, + false, + } + if len(actual.SetOfBools) != len(expectedBools) { + return fmt.Errorf("expected %d bools but got %d", len(expectedBools), len(actual.SetOfBools)) + } + for _, v := range expectedBools { + exists := false + for _, a := range actual.SetOfBools { + if v == a { + exists = true + break + } + } + if !exists { + return fmt.Errorf("expected the bool %t to exist but it didn't", v) + } + } + + expectedFloats := []float64{ + 1.1, + 2.2, + } + if len(actual.SetOfFloats) != len(expectedFloats) { + return fmt.Errorf("expected %d floats but got %d", len(expectedFloats), len(actual.SetOfFloats)) + } + for _, v := range expectedFloats { + exists := false + for _, a := range actual.SetOfFloats { + if v == a { + exists = true + break + } + } + if !exists { + return fmt.Errorf("expected the float %f to exist but it didn't", v) + } + } + + return nil + }, + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + }, + }, + }, nil + }, + }, + Steps: []resource.TestStep{ + { + Config: `resource "validator_decoder" "test" {}`, + }, + }, + }) +} + +func TestAccPluginSDKAndEncoder(t *testing.T) { + os.Setenv("TF_ACC", "1") + + type NestedType struct { + Key string `tfschema:"key"` + } + type MyType struct { + Hello string `tfschema:"hello"` + RandomNumber int `tfschema:"random_number"` + Enabled bool `tfschema:"enabled"` + ListOfStrings []string `tfschema:"list_of_strings"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfBools []bool `tfschema:"list_of_bools"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + NestedObject []NestedType `tfschema:"nested_object"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfFloats map[string]float64 `tfschema:"map_of_floats"` + SetOfStrings []string `tfschema:"set_of_strings"` + SetOfNumbers []int `tfschema:"set_of_numbers"` + SetOfBools []bool `tfschema:"set_of_bools"` + SetOfFloats []float64 `tfschema:"set_of_floats"` + } + + // lintignore:AT001 + resource.ParallelTest(t, resource.TestCase{ + ProviderFactories: map[string]terraform.ResourceProviderFactory{ + "validator": func() (terraform.ResourceProvider, error) { + return &schema.Provider{ + DataSourcesMap: map[string]*schema.Resource{}, + ResourcesMap: map[string]*schema.Resource{ + "validator_encoder": { + Schema: map[string]*schema.Schema{ + "hello": { + Type: schema.TypeString, + Computed: true, + }, + "random_number": { + Type: schema.TypeInt, + Computed: true, + }, + "enabled": { + Type: schema.TypeBool, + Computed: true, + }, + "list_of_strings": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "list_of_numbers": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "list_of_bools": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "list_of_floats": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + "nested_object": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + "map_of_strings": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "map_of_numbers": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "map_of_bools": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "map_of_floats": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + "set_of_strings": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "set_of_numbers": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "set_of_bools": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "set_of_floats": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + }, + Create: func(d *schema.ResourceData, i interface{}) error { + wrapper := ResourceMetaData{ + ResourceData: d, + Logger: ConsoleLogger{}, + serializationDebugLogger: ConsoleLogger{}, + } + + input := MyType{ + Hello: "world", + RandomNumber: 42, + Enabled: true, + ListOfStrings: []string{"hello", "there"}, + ListOfNumbers: []int{1, 2, 4}, + ListOfBools: []bool{true, false}, + ListOfFloats: []float64{-1.234567894321, 2.3456789}, + NestedObject: []NestedType{ + { + Key: "value", + }, + }, + MapOfStrings: map[string]string{ + "bingo": "bango", + }, + MapOfNumbers: map[string]int{ + "lucky": 21, + }, + MapOfBools: map[string]bool{ + "friday": true, + }, + MapOfFloats: map[string]float64{ + "pi": 3.14159, + }, + } + + d.SetId("some-id") + if err := wrapper.Encode(&input); err != nil { + return fmt.Errorf("encoding: %+v", err) + } + return nil + }, + Read: func(d *schema.ResourceData, _ interface{}) error { + return nil + }, + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + }, + }, + }, nil + }, + }, + Steps: []resource.TestStep{ + { + Config: `resource "validator_encoder" "test" {}`, + Check: resource.ComposeTestCheckFunc( + testCheckResourceStateMatches("validator_encoder.test", map[string]interface{}{ + "id": "some-id", + "hello": "world", + "random_number": "42", + "enabled": "true", + "list_of_strings.#": "2", + "list_of_strings.0": "hello", + "list_of_strings.1": "there", + "list_of_numbers.#": "3", + "list_of_numbers.0": "1", + "list_of_numbers.1": "2", + "list_of_numbers.2": "4", + "list_of_bools.#": "2", + "list_of_bools.0": "true", + "list_of_bools.1": "false", + "list_of_floats.#": "2", + "list_of_floats.0": "-1.234567894321", + "list_of_floats.1": "2.3456789", + "nested_object.#": "1", + "nested_object.0.key": "value", + "map_of_strings.%": "1", + "map_of_strings.bingo": "bango", + "map_of_numbers.%": "1", + "map_of_numbers.lucky": "21", + "map_of_bools.%": "1", + "map_of_bools.friday": "true", + "map_of_floats.%": "1", + "map_of_floats.pi": "3.14159", + "set_of_bools.#": "0", + "set_of_floats.#": "0", + "set_of_numbers.#": "0", + "set_of_strings.#": "0", + }), + ), + }, + }, + }) +} + +func TestAccPluginSDKReturnsComputedFields(t *testing.T) { + os.Setenv("TF_ACC", "1") + + resourceName := "validator_computed.test" + // lintignore:AT001 + resource.ParallelTest(t, resource.TestCase{ + ProviderFactories: map[string]terraform.ResourceProviderFactory{ + "validator": func() (terraform.ResourceProvider, error) { + return &schema.Provider{ + DataSourcesMap: map[string]*schema.Resource{}, + ResourcesMap: map[string]*schema.Resource{ + "validator_computed": computedFieldsResource(), + }, + }, nil + }, + }, + Steps: []resource.TestStep{ + { + Config: `resource "validator_computed" "test" {}`, + Check: resource.ComposeTestCheckFunc( + testCheckResourceStateMatches(resourceName, map[string]interface{}{ + "id": "does-not-matter", + "hello": "world", + "random_number": "42", + "enabled": "true", + "list_of_strings.#": "2", + "list_of_strings.0": "hello", + "list_of_strings.1": "there", + "list_of_numbers.#": "3", + "list_of_numbers.0": "1", + "list_of_numbers.1": "2", + "list_of_numbers.2": "4", + "list_of_bools.#": "2", + "list_of_bools.0": "true", + "list_of_bools.1": "false", + "list_of_floats.#": "2", + "list_of_floats.0": "-1.234567894321", + "list_of_floats.1": "2.3456789", + "nested_object.#": "1", + "nested_object.0.key": "value", + // Sets can't really be computed, so this isn't that big a deal + }), + ), + }, + }, + }) +} + +func computedFieldsResource() *schema.Resource { + var readFunc = func(d *schema.ResourceData, _ interface{}) error { + d.Set("hello", "world") + d.Set("random_number", 42) + d.Set("enabled", true) + d.Set("list_of_strings", []string{"hello", "there"}) + d.Set("list_of_numbers", []int{1, 2, 4}) + d.Set("list_of_bools", []bool{true, false}) + d.Set("list_of_floats", []float64{-1.234567894321, 2.3456789}) + d.Set("nested_object", []interface{}{ + map[string]interface{}{ + "key": "value", + }, + }) + return nil + } + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "hello": { + Type: schema.TypeString, + Computed: true, + }, + "random_number": { + Type: schema.TypeInt, + Computed: true, + }, + "enabled": { + Type: schema.TypeBool, + Computed: true, + }, + "list_of_strings": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "list_of_numbers": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "list_of_bools": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeBool, + }, + }, + "list_of_floats": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + "nested_object": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + Create: func(d *schema.ResourceData, meta interface{}) error { + d.SetId("does-not-matter") + return readFunc(d, meta) + }, + Read: readFunc, + Delete: func(_ *schema.ResourceData, _ interface{}) error { + return nil + }, + } +} + +func testCheckResourceStateMatches(resourceName string, values map[string]interface{}) resource.TestCheckFunc { + return func(s *terraform.State) error { + resources, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("resource %q was not found in the state", resourceName) + } + + state := resources.Primary + if len(state.Attributes) != len(values) { + return fmt.Errorf("expected %d values but got %d.\n\nExpected: %+v\n\nActual: %+v", len(values), len(state.Attributes), values, state.Attributes) + } + + for key, expectedValue := range values { + actualValue, exists := state.Attributes[key] + if !exists { + return fmt.Errorf("key %q was not found", key) + } + + if !reflect.DeepEqual(expectedValue, actualValue) { + return fmt.Errorf("values didn't match for %q.\n\nExpected: %+v\n\nActual: %+v", key, expectedValue, actualValue) + } + } + + return nil + } +} diff --git a/azurerm/internal/sdk/resource.go b/azurerm/internal/sdk/resource.go new file mode 100644 index 000000000000..d156e78427ab --- /dev/null +++ b/azurerm/internal/sdk/resource.go @@ -0,0 +1,144 @@ +package sdk + +import ( + "context" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +type resourceBase interface { + // Arguments is a list of user-configurable (that is: Required, Optional, or Optional and Computed) + // arguments for this Resource + Arguments() map[string]*schema.Schema + + // Attributes is a list of read-only (e.g. Computed-only) attributes + Attributes() map[string]*schema.Schema + + // ModelObject is an instance of the object the Schema is decoded/encoded into + ModelObject() interface{} + + // ResourceType is the exposed name of this resource (e.g. `azurerm_example`) + ResourceType() string +} + +// A Data Source is an object which looks up information about an existing resource and returns +// this information for use elsewhere +// +// Notably not all Terraform Resources/Azure API's make sense as a Data Source - this information +// has to be available consistently since these are queried on-demand +type DataSource interface { + resourceBase + + // Read is a ResourceFunc which looks up and sets field values into the Terraform State + Read() ResourceFunc +} + +// A Resource is an object which can be provisioned and managed by Terraform +// that is, Created, Retrieved, Deleted, Imported (and optionally, Updated, by implementing +// the 'ResourceWithUpdate' interface) +// +// It's worth calling out that not all Azure API's make sense as Terraform Resources - as a +// general rule if it supports CR(U)D it could, however. +type Resource interface { + resourceBase + + // Create will provision this resource using the information from the Terraform Configuration + // NOTE: the shim layer will automatically call the Read function once this has been created + // so it's no longer necessary to call this explicitly + Create() ResourceFunc + + // Read retrieves the latest values for this object and saves them into Terraform's State + Read() ResourceFunc + + // Delete will remove an existing resource using the information available in Terraform's State + Delete() ResourceFunc + + // IDValidationFunc returns the SchemaValidateFunc used to validate the ID is valid during + // `terraform import` - ensuring users don't inadvertently specify the incorrect Resource ID + IDValidationFunc() schema.SchemaValidateFunc +} + +// TODO: ResourceWithCustomizeDiff +// TODO: ResourceWithStateMigration +// TODO: a generic state migration for updating ID's + +type ResourceWithCustomImporter interface { + Resource + + // CustomImporter returns a ResourceRunFunc which allows overriding the import + CustomImporter() ResourceRunFunc +} + +// ResourceWithUpdate is an optional interface +// +// Notably the Arguments for Resources implementing this interface +// cannot be entirely ForceNew - else this interface implementation +// is superfluous. +type ResourceWithUpdate interface { + Resource + + // Update will make changes to this resource using the information from the Terraform Configuration/Plan + // NOTE: the shim layer will automatically call the Read function once this has been created + // so it's no longer necessary to call this explicitly + Update() ResourceFunc +} + +// ResourceWithDeprecation is an optional interface +// +// Resources implementing this interface will be marked as Deprecated +// and output the DeprecationMessage during Terraform operations. +type ResourceWithDeprecation interface { + Resource + + // DeprecationMessage returns the Deprecation message for this resource + // NOTE: this must return a non-empty string + DeprecationMessage() string +} + +// ResourceRunFunc is the function which can be run +// ctx provides a Context instance with the user-provided timeout +// metadata is a reference to an object containing the Client, ResourceData and a Logger +type ResourceRunFunc func(ctx context.Context, metadata ResourceMetaData) error + +type ResourceFunc struct { + // Func is the function which should be called for this Resource Func + // for example, during Read this is the Read function, during Update this is the Update function + Func ResourceRunFunc + + // Timeout is the default timeout, which can be overridden by users + // for this method - in-turn used for the Azure API + Timeout time.Duration +} + +type ResourceMetaData struct { + // Client is a reference to the Azure Providers Client - providing a typed reference to this object + Client *clients.Client + + // Logger provides a logger for debug purposes + Logger Logger + + // ResourceData is a reference to the ResourceData object from Terraform's Plugin SDK + // This is used to be able to call operations directly should Encode/Decode be insufficient + // for example, to determine if a field has changes + ResourceData *schema.ResourceData + + // serializationDebugLogger is used for testing purposes + serializationDebugLogger Logger +} + +// MarkAsGone marks this resource as removed in the Remote API, so this is no longer available +func (rmd ResourceMetaData) MarkAsGone() error { + rmd.ResourceData.SetId("") + return nil +} + +// ResourceRequiresImport returns an error saying that this resource must be imported with instructions +// on how to do this (namely, using `terraform import` +func (rmd ResourceMetaData) ResourceRequiresImport(resourceName string, idFormatter resourceid.Formatter) error { + resourceId := idFormatter.ID() + return tf.ImportAsExistsError(resourceName, resourceId) +} diff --git a/azurerm/internal/sdk/resource_decode.go b/azurerm/internal/sdk/resource_decode.go new file mode 100644 index 000000000000..cf9fac874616 --- /dev/null +++ b/azurerm/internal/sdk/resource_decode.go @@ -0,0 +1,205 @@ +package sdk + +import ( + "fmt" + "reflect" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +// Decode will decode the Terraform Schema into the specified object +// NOTE: this object must be passed by value - and must contain `tfschema` +// struct tags for all fields +// +// Example Usage: +// +// type Person struct { +// Name string `tfschema:"name" +// } +// var person Person +// if err := metadata.Decode(&person); err != nil { .. } +func (rmd ResourceMetaData) Decode(input interface{}) error { + return decodeReflectedType(input, rmd.ResourceData, rmd.serializationDebugLogger) +} + +// stateRetriever is a convenience wrapper around the Plugin SDK to be able to test it more accurately +type stateRetriever interface { + Get(key string) interface{} + GetOk(key string) (interface{}, bool) + GetOkExists(key string) (interface{}, bool) +} + +func decodeReflectedType(input interface{}, stateRetriever stateRetriever, debugLogger Logger) error { + if reflect.TypeOf(input).Kind() != reflect.Ptr { + return fmt.Errorf("need a pointer") + } + + objType := reflect.TypeOf(input).Elem() + for i := 0; i < objType.NumField(); i++ { + field := objType.Field(i) + debugLogger.Infof("Field", field) + + if val, exists := field.Tag.Lookup("tfschema"); exists { + tfschemaValue, valExists := stateRetriever.GetOkExists(val) + if !valExists { + continue + } + + debugLogger.Infof("TFSchemaValue: ", tfschemaValue) + debugLogger.Infof("Input Type: ", reflect.ValueOf(input).Elem().Field(i).Type()) + + fieldName := reflect.ValueOf(input).Elem().Field(i).String() + if err := setValue(input, tfschemaValue, i, fieldName, debugLogger); err != nil { + return err + } + } + } + return nil +} + +func setValue(input, tfschemaValue interface{}, index int, fieldName string, debugLogger Logger) (errOut error) { + debugLogger.Infof("setting list value for %q..", fieldName) + defer func() { + if r := recover(); r != nil { + debugLogger.Warnf("error setting value for %q: %+v", fieldName, r) + out, ok := r.(error) + if !ok { + return + } + + errOut = out + } + }() + + if v, ok := tfschemaValue.(string); ok { + debugLogger.Infof("[String] Decode %+v", v) + debugLogger.Infof("Input %+v", reflect.ValueOf(input)) + debugLogger.Infof("Input Elem %+v", reflect.ValueOf(input).Elem()) + reflect.ValueOf(input).Elem().Field(index).SetString(v) + return nil + } + + if v, ok := tfschemaValue.(int); ok { + debugLogger.Infof("[INT] Decode %+v", v) + reflect.ValueOf(input).Elem().Field(index).SetInt(int64(v)) + return nil + } + + if v, ok := tfschemaValue.(int32); ok { + debugLogger.Infof("[INT] Decode %+v", v) + reflect.ValueOf(input).Elem().Field(index).SetInt(int64(v)) + return nil + } + + if v, ok := tfschemaValue.(int64); ok { + debugLogger.Infof("[INT] Decode %+v", v) + reflect.ValueOf(input).Elem().Field(index).SetInt(v) + return nil + } + + if v, ok := tfschemaValue.(float64); ok { + debugLogger.Infof("[Float] Decode %+v", v) + reflect.ValueOf(input).Elem().Field(index).SetFloat(v) + return nil + } + + // Doesn't work for empty bools? + if v, ok := tfschemaValue.(bool); ok { + debugLogger.Infof("[BOOL] Decode %+v", v) + + reflect.ValueOf(input).Elem().Field(index).SetBool(v) + return nil + } + + if v, ok := tfschemaValue.(*schema.Set); ok { + return setListValue(input, index, fieldName, v.List(), debugLogger) + } + + if mapConfig, ok := tfschemaValue.(map[string]interface{}); ok { + mapOutput := reflect.MakeMap(reflect.ValueOf(input).Elem().Field(index).Type()) + for key, val := range mapConfig { + mapOutput.SetMapIndex(reflect.ValueOf(key), reflect.ValueOf(val)) + } + + reflect.ValueOf(input).Elem().Field(index).Set(mapOutput) + return nil + } + + if v, ok := tfschemaValue.([]interface{}); ok { + return setListValue(input, index, fieldName, v, debugLogger) + } + + return nil +} + +func setListValue(input interface{}, index int, fieldName string, v []interface{}, debugLogger Logger) error { + switch fieldType := reflect.ValueOf(input).Elem().Field(index).Type(); fieldType { + case reflect.TypeOf([]string{}): + stringSlice := reflect.MakeSlice(reflect.TypeOf([]string{}), len(v), len(v)) + for i, stringVal := range v { + stringSlice.Index(i).SetString(stringVal.(string)) + } + reflect.ValueOf(input).Elem().Field(index).Set(stringSlice) + + case reflect.TypeOf([]int{}): + iSlice := reflect.MakeSlice(reflect.TypeOf([]int{}), len(v), len(v)) + for i, iVal := range v { + iSlice.Index(i).SetInt(int64(iVal.(int))) + } + reflect.ValueOf(input).Elem().Field(index).Set(iSlice) + + case reflect.TypeOf([]float64{}): + fSlice := reflect.MakeSlice(reflect.TypeOf([]float64{}), len(v), len(v)) + for i, fVal := range v { + fSlice.Index(i).SetFloat(fVal.(float64)) + } + reflect.ValueOf(input).Elem().Field(index).Set(fSlice) + + case reflect.TypeOf([]bool{}): + bSlice := reflect.MakeSlice(reflect.TypeOf([]bool{}), len(v), len(v)) + for i, bVal := range v { + bSlice.Index(i).SetBool(bVal.(bool)) + } + reflect.ValueOf(input).Elem().Field(index).Set(bSlice) + + default: + valueToSet := reflect.MakeSlice(reflect.ValueOf(input).Elem().Field(index).Type(), 0, 0) + debugLogger.Infof("List Type", valueToSet.Type()) + + for _, mapVal := range v { + if test, ok := mapVal.(map[string]interface{}); ok && test != nil { + elem := reflect.New(fieldType.Elem()) + debugLogger.Infof("element ", elem) + for j := 0; j < elem.Type().Elem().NumField(); j++ { + nestedField := elem.Type().Elem().Field(j) + debugLogger.Infof("nestedField ", nestedField) + + if val, exists := nestedField.Tag.Lookup("tfschema"); exists { + nestedTFSchemaValue := test[val] + if err := setValue(elem.Interface(), nestedTFSchemaValue, j, fieldName, debugLogger); err != nil { + return err + } + } + } + + if !elem.CanSet() { + elem = elem.Elem() + } + + if valueToSet.Kind() == reflect.Ptr { + valueToSet.Elem().Set(reflect.Append(valueToSet.Elem(), elem)) + } else { + valueToSet = reflect.Append(valueToSet, elem) + } + + debugLogger.Infof("value to set type after changes", valueToSet.Type()) + } + } + + valueToSet = reflect.Indirect(valueToSet) + fieldToSet := reflect.ValueOf(input).Elem().Field(index) + fieldToSet.Set(reflect.Indirect(valueToSet)) + } + + return nil +} diff --git a/azurerm/internal/sdk/resource_decode_test.go b/azurerm/internal/sdk/resource_decode_test.go new file mode 100644 index 000000000000..a87e1f9ad99c --- /dev/null +++ b/azurerm/internal/sdk/resource_decode_test.go @@ -0,0 +1,742 @@ +package sdk + +import ( + "reflect" + "testing" +) + +type decodeTestData struct { + State map[string]interface{} + Input interface{} + Expected interface{} + ExpectError bool +} + +func TestDecode_TopLevelFieldsRequired(t *testing.T) { + type SimpleType struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + decodeTestData{ + State: map[string]interface{}{ + "number": int64(42), + "price": float64(129.99), + "string": "world", + "enabled": true, + "list_of_floats": []interface{}{ + 1.0, + 2.0, + 3.0, + 1.234567890, + }, + "list_of_numbers": []interface{}{1, 2, 3}, + "list_of_strings": []interface{}{ + "have", + "you", + "heard", + }, + "map_of_bools": map[string]interface{}{ + "awesome_feature": true, + }, + "map_of_numbers": map[string]interface{}{ + "hello": 1, + "there": 3, + }, + "map_of_strings": map[string]interface{}{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + Input: &SimpleType{}, + Expected: &SimpleType{ + String: "world", + Price: 129.99, + Enabled: true, + Number: 42, + ListOfFloats: []float64{ + 1.0, + 2.0, + 3.0, + 1.234567890}, + ListOfNumbers: []int{1, 2, 3}, + ListOfStrings: []string{ + "have", + "you", + "heard", + }, + MapOfBools: map[string]bool{ + "awesome_feature": true, + }, + MapOfNumbers: map[string]int{ + "hello": 1, + "there": 3, + }, + MapOfStrings: map[string]string{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + ExpectError: false, + }.test(t) +} + +func TestDecode_TopLevelFieldsComputedNoValues(t *testing.T) { + // NOTE: this scenario covers Create without any existing Computed values + type SimpleType struct { + ComputedMapOfBools map[string]bool `tfschema:"computed_map_of_bools"` + ComputedMapOfFloats map[string]float64 `tfschema:"computed_map_of_floats"` + ComputedMapOfInts map[string]int `tfschema:"computed_map_of_ints"` + ComputedMapOfStrings map[string]string `tfschema:"computed_map_of_strings"` + } + decodeTestData{ + State: map[string]interface{}{ + "computed_map_of_bools": map[string]interface{}{}, + "computed_map_of_floats": map[string]interface{}{}, + "computed_map_of_ints": map[string]interface{}{}, + "computed_map_of_strings": map[string]interface{}{}, + }, + Input: &SimpleType{}, + Expected: &SimpleType{ + ComputedMapOfBools: map[string]bool{}, + ComputedMapOfFloats: map[string]float64{}, + ComputedMapOfInts: map[string]int{}, + ComputedMapOfStrings: map[string]string{}, + }, + ExpectError: false, + }.test(t) +} + +func TestDecode_TopLevelFieldsComputedWithValues(t *testing.T) { + // NOTE: this scenario covers Update/Read with existing Computed values or Computed/Optional + type SimpleType struct { + ComputedMapOfBools map[string]bool `tfschema:"computed_map_of_bools"` + ComputedMapOfFloats map[string]float64 `tfschema:"computed_map_of_floats"` + ComputedMapOfInts map[string]int `tfschema:"computed_map_of_ints"` + ComputedMapOfStrings map[string]string `tfschema:"computed_map_of_strings"` + } + decodeTestData{ + State: map[string]interface{}{ + "computed_map_of_bools": map[string]interface{}{ + "bingo": true, + "bango": false, + }, + "computed_map_of_floats": map[string]interface{}{ + "bingo": -2.197234, + "bango": 3.123456789, + }, + "computed_map_of_ints": map[string]interface{}{ + "bingo": 2197234, + "bango": 3123456789, + }, + "computed_map_of_strings": map[string]interface{}{ + "matthew": "brisket", + "tom": "coffee", + }, + }, + Input: &SimpleType{}, + Expected: &SimpleType{ + ComputedMapOfBools: map[string]bool{ + "bingo": true, + "bango": false, + }, + ComputedMapOfFloats: map[string]float64{ + "bingo": -2.197234, + "bango": 3.123456789, + }, + ComputedMapOfInts: map[string]int{ + "bingo": 2197234, + "bango": 3123456789, + }, + ComputedMapOfStrings: map[string]string{ + "matthew": "brisket", + "tom": "coffee", + }, + }, + ExpectError: false, + }.test(t) +} + +func TestDecode_TopLevelFieldsOptional(t *testing.T) { + type SimpleType struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + decodeTestData{ + State: map[string]interface{}{ + "number": int64(0), + "price": float64(0), + "string": "", + "enabled": false, + "list_of_floats": []float64{}, + "list_of_numbers": []int{}, + "list_of_strings": []string{}, + "map_of_bools": map[string]interface{}{}, + "map_of_numbers": map[string]interface{}{}, + "map_of_strings": map[string]interface{}{}, + }, + Input: &SimpleType{}, + Expected: &SimpleType{ + MapOfBools: map[string]bool{}, + MapOfNumbers: map[string]int{}, + MapOfStrings: map[string]string{}, + }, + ExpectError: false, + }.test(t) +} + +func TestDecode_TopLevelFieldsComputed(t *testing.T) { + type SimpleType struct { + ComputedString string `tfschema:"computed_string"` + ComputedNumber int `tfschema:"computed_number"` + ComputedBool bool `tfschema:"computed_bool"` + ComputedListOfNumbers []int `tfschema:"computed_list_of_numbers"` + ComputedListOfStrings []string `tfschema:"computed_list_of_strings"` + } + decodeTestData{ + State: map[string]interface{}{ + "computed_string": "je suis computed", + "computed_number": int64(732), + "computed_bool": true, + "computed_list_of_numbers": []interface{}{1, 2, 3}, + "computed_list_of_strings": []interface{}{ + "have", + "you", + "heard", + }, + }, + Input: &SimpleType{}, + Expected: &SimpleType{ + ComputedString: "je suis computed", + ComputedNumber: 732, + ComputedBool: true, + ComputedListOfNumbers: []int{1, 2, 3}, + ComputedListOfStrings: []string{ + "have", + "you", + "heard", + }, + }, + ExpectError: false, + }.test(t) +} + +func TestResourceDecode_NestedOneLevelDeepEmpty(t *testing.T) { + type Inner struct { + Value string `tfschema:"value"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + decodeTestData{ + State: map[string]interface{}{ + "inner": []interface{}{}, + }, + Input: &Type{}, + Expected: &Type{ + NestedObject: []Inner{}, + }, + }.test(t) +} + +func TestResourceDecode_NestedOneLevelDeepSingle(t *testing.T) { + type Inner struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + decodeTestData{ + State: map[string]interface{}{ + "inner": []interface{}{ + map[string]interface{}{ + "number": int64(42), + "price": float64(129.99), + "string": "world", + "enabled": true, + "list_of_floats": []interface{}{ + 1.0, + 2.0, + 3.0, + 1.234567890, + }, + "list_of_numbers": []interface{}{1, 2, 3}, + "list_of_strings": []interface{}{ + "have", + "you", + "heard", + }, + "map_of_bools": map[string]interface{}{ + "awesome_feature": true, + }, + "map_of_numbers": map[string]interface{}{ + "hello": 1, + "there": 3, + }, + "map_of_strings": map[string]interface{}{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + NestedObject: []Inner{ + { + String: "world", + Number: 42, + Price: 129.99, + Enabled: true, + ListOfFloats: []float64{ + 1.0, + 2.0, + 3.0, + 1.234567890}, + ListOfNumbers: []int{1, 2, 3}, + ListOfStrings: []string{ + "have", + "you", + "heard", + }, + MapOfBools: map[string]bool{ + "awesome_feature": true, + }, + MapOfNumbers: map[string]int{ + "hello": 1, + "there": 3, + }, + MapOfStrings: map[string]string{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + }, + }, + }.test(t) +} + +func TestResourceDecode_NestedOneLevelDeepSingleOmittedValues(t *testing.T) { + type Inner struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + decodeTestData{ + State: map[string]interface{}{ + "inner": []interface{}{ + map[string]interface{}{ + "number": int64(0), + "price": float64(0), + "string": "", + "enabled": false, + "list_of_floats": []float64{}, + "list_of_numbers": []int{}, + "list_of_strings": []string{}, + "map_of_bools": map[string]interface{}{}, + "map_of_numbers": map[string]interface{}{}, + "map_of_strings": map[string]interface{}{}, + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + NestedObject: []Inner{ + { + MapOfBools: map[string]bool{}, + MapOfNumbers: map[string]int{}, + MapOfStrings: map[string]string{}, + }, + }, + }, + }.test(t) +} + +func TestResourceDecode_NestedOneLevelDeepSingleMultiple(t *testing.T) { + type Inner struct { + Value string `tfschema:"value"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + decodeTestData{ + State: map[string]interface{}{ + "inner": []interface{}{ + map[string]interface{}{ + "value": "first", + }, + map[string]interface{}{ + "value": "second", + }, + map[string]interface{}{ + "value": "third", + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + NestedObject: []Inner{ + { + Value: "first", + }, + { + Value: "second", + }, + { + Value: "third", + }, + }, + }, + }.test(t) +} + +func TestResourceDecode_NestedThreeLevelsDeepEmpty(t *testing.T) { + type ThirdInner struct { + Value string `tfschema:"value"` + } + type SecondInner struct { + Third []ThirdInner `tfschema:"third"` + } + type FirstInner struct { + Second []SecondInner `tfschema:"second"` + } + type Type struct { + First []FirstInner `tfschema:"first"` + } + + t.Log("Top Level Empty") + decodeTestData{ + State: map[string]interface{}{ + "first": []interface{}{}, + }, + Input: &Type{}, + Expected: &Type{ + First: []FirstInner{}, + }, + }.test(t) + + t.Log("Second Level Empty") + decodeTestData{ + State: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "second": []interface{}{}, + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + First: []FirstInner{ + { + Second: []SecondInner{}, + }, + }, + }, + }.test(t) + + t.Log("Third Level Empty") + decodeTestData{ + State: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "second": []interface{}{ + map[string]interface{}{ + "third": []interface{}{}, + }, + }, + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + First: []FirstInner{ + { + Second: []SecondInner{ + { + Third: []ThirdInner{}, + }, + }, + }, + }, + }, + }.test(t) +} + +func TestResourceDecode_NestedThreeLevelsDeepSingleItem(t *testing.T) { + type ThirdInner struct { + Value string `tfschema:"value"` + } + type SecondInner struct { + Third []ThirdInner `tfschema:"third"` + } + type FirstInner struct { + Second []SecondInner `tfschema:"second"` + } + type Type struct { + First []FirstInner `tfschema:"first"` + } + + decodeTestData{ + State: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "second": []interface{}{ + map[string]interface{}{ + "third": []interface{}{ + map[string]interface{}{ + "value": "salut", + }, + }, + }, + }, + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + First: []FirstInner{ + { + Second: []SecondInner{ + { + Third: []ThirdInner{ + { + Value: "salut", + }, + }, + }, + }, + }, + }, + }, + }.test(t) +} + +func TestResourceDecode_NestedThreeLevelsDeepMultipleItems(t *testing.T) { + type ThirdInner struct { + Value string `tfschema:"value"` + } + type SecondInner struct { + Value string `tfschema:"value"` + Third []ThirdInner `tfschema:"third"` + } + type FirstInner struct { + Value string `tfschema:"value"` + Second []SecondInner `tfschema:"second"` + } + type Type struct { + First []FirstInner `tfschema:"first"` + } + + decodeTestData{ + State: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "value": "first - 1", + "second": []interface{}{ + map[string]interface{}{ + "value": "second - 1", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 1", + }, + map[string]interface{}{ + "value": "third - 2", + }, + map[string]interface{}{ + "value": "third - 3", + }, + }, + }, + map[string]interface{}{ + "value": "second - 2", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 4", + }, + map[string]interface{}{ + "value": "third - 5", + }, + map[string]interface{}{ + "value": "third - 6", + }, + }, + }, + }, + }, + map[string]interface{}{ + "value": "first - 2", + "second": []interface{}{ + map[string]interface{}{ + "value": "second - 3", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 7", + }, + map[string]interface{}{ + "value": "third - 8", + }, + }, + }, + map[string]interface{}{ + "value": "second - 4", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 9", + }, + }, + }, + }, + }, + }, + }, + Input: &Type{}, + Expected: &Type{ + First: []FirstInner{ + { + Value: "first - 1", + Second: []SecondInner{ + { + Value: "second - 1", + Third: []ThirdInner{ + { + Value: "third - 1", + }, + { + Value: "third - 2", + }, + { + Value: "third - 3", + }, + }, + }, + { + Value: "second - 2", + Third: []ThirdInner{ + { + Value: "third - 4", + }, + { + Value: "third - 5", + }, + { + Value: "third - 6", + }, + }, + }, + }, + }, + { + Value: "first - 2", + Second: []SecondInner{ + { + Value: "second - 3", + Third: []ThirdInner{ + { + Value: "third - 7", + }, + { + Value: "third - 8", + }, + }, + }, + { + Value: "second - 4", + Third: []ThirdInner{ + { + Value: "third - 9", + }, + }, + }, + }, + }, + }, + }, + }.test(t) +} + +func (testData decodeTestData) test(t *testing.T) { + debugLogger := ConsoleLogger{} + state := testData.stateWrapper() + if err := decodeReflectedType(testData.Input, state, debugLogger); err != nil { + if testData.ExpectError { + // we're good + return + } + + t.Fatalf("unexpected error: %+v", err) + } + if testData.ExpectError { + t.Fatalf("expected an error but didn't get one!") + } + + if !reflect.DeepEqual(testData.Input, testData.Expected) { + t.Fatalf("\nExpected: %+v\n\n Received %+v\n\n", testData.Expected, testData.Input) + } +} + +func (testData decodeTestData) stateWrapper() testDataGetter { + return testDataGetter{ + values: testData.State, + } +} + +type testDataGetter struct { + values map[string]interface{} +} + +func (td testDataGetter) Get(key string) interface{} { + return td.values[key] +} + +func (td testDataGetter) GetOk(key string) (interface{}, bool) { + val, ok := td.values[key] + return val, ok +} + +func (td testDataGetter) GetOkExists(key string) (interface{}, bool) { + // for the purposes of this test this should be sufficient, maybe? + val, ok := td.values[key] + return val, ok +} diff --git a/azurerm/internal/sdk/resource_encode.go b/azurerm/internal/sdk/resource_encode.go new file mode 100644 index 000000000000..12f15d46129c --- /dev/null +++ b/azurerm/internal/sdk/resource_encode.go @@ -0,0 +1,141 @@ +package sdk + +import ( + "fmt" + "reflect" +) + +// Encode will encode the specified object into the Terraform State +// NOTE: this requires that the object passed in is a pointer and +// all fields contain `tfschema` struct tags +func (rmd ResourceMetaData) Encode(input interface{}) error { + if reflect.TypeOf(input).Kind() != reflect.Ptr { + return fmt.Errorf("need a pointer") + } + + objType := reflect.TypeOf(input).Elem() + objVal := reflect.ValueOf(input).Elem() + + fieldName := reflect.ValueOf(input).Elem().String() + serialized, err := recurse(objType, objVal, fieldName, rmd.serializationDebugLogger) + if err != nil { + return err + } + + for k, v := range serialized { + // lintignore:R001 + if err := rmd.ResourceData.Set(k, v); err != nil { + return fmt.Errorf("setting %q: %+v", k, err) + } + } + return nil +} + +func recurse(objType reflect.Type, objVal reflect.Value, fieldName string, debugLogger Logger) (output map[string]interface{}, errOut error) { + defer func() { + if r := recover(); r != nil { + debugLogger.Warnf("error setting value for %q: %+v", fieldName, r) + out, ok := r.(error) + if !ok { + return + } + + errOut = out + } + }() + + output = make(map[string]interface{}) + for i := 0; i < objType.NumField(); i++ { + field := objType.Field(i) + fieldVal := objVal.Field(i) + if tfschemaTag, exists := field.Tag.Lookup("tfschema"); exists { + switch field.Type.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + iv := fieldVal.Int() + debugLogger.Infof("Setting %q to %d", tfschemaTag, iv) + output[tfschemaTag] = iv + + case reflect.Float32, reflect.Float64: + fv := fieldVal.Float() + debugLogger.Infof("Setting %q to %f", tfschemaTag, fv) + output[tfschemaTag] = fv + + case reflect.String: + sv := fieldVal.String() + debugLogger.Infof("Setting %q to %q", tfschemaTag, sv) + output[tfschemaTag] = sv + + case reflect.Bool: + bv := fieldVal.Bool() + debugLogger.Infof("Setting %q to %t", tfschemaTag, bv) + output[tfschemaTag] = bv + + case reflect.Map: + iter := fieldVal.MapRange() + attr := make(map[string]interface{}) + for iter.Next() { + attr[iter.Key().String()] = iter.Value().Interface() + } + output[tfschemaTag] = attr + + case reflect.Slice: + sv := fieldVal.Slice(0, fieldVal.Len()) + attr := make([]interface{}, sv.Len()) + switch sv.Type() { + case reflect.TypeOf([]string{}): + debugLogger.Infof("Setting %q to []string", tfschemaTag) + if sv.Len() > 0 { + output[tfschemaTag] = sv.Interface() + } else { + output[tfschemaTag] = make([]string, 0) + } + + case reflect.TypeOf([]int{}): + debugLogger.Infof("Setting %q to []int", tfschemaTag) + if sv.Len() > 0 { + output[tfschemaTag] = sv.Interface() + } else { + output[tfschemaTag] = make([]int, 0) + } + + case reflect.TypeOf([]float64{}): + debugLogger.Infof("Setting %q to []float64", tfschemaTag) + if sv.Len() > 0 { + output[tfschemaTag] = sv.Interface() + } else { + output[tfschemaTag] = make([]float64, 0) + } + + case reflect.TypeOf([]bool{}): + debugLogger.Infof("Setting %q to []bool", tfschemaTag) + if sv.Len() > 0 { + output[tfschemaTag] = sv.Interface() + } else { + output[tfschemaTag] = make([]bool, 0) + } + + default: + for i := 0; i < sv.Len(); i++ { + debugLogger.Infof("[SLICE] Index %d is %q", i, sv.Index(i).Interface()) + debugLogger.Infof("[SLICE] Type %+v", sv.Type()) + nestedType := sv.Index(i).Type() + nestedValue := sv.Index(i) + + fieldName := field.Name + serialized, err := recurse(nestedType, nestedValue, fieldName, debugLogger) + if err != nil { + return nil, fmt.Errorf("serializing nested object %q: %+v", sv.Type(), exists) + } + attr[i] = serialized + } + debugLogger.Infof("[SLICE] Setting %q to %+v", tfschemaTag, attr) + output[tfschemaTag] = attr + } + default: + return output, fmt.Errorf("unknown type %+v for key %q", field.Type.Kind(), tfschemaTag) + } + } + } + + return output, nil +} diff --git a/azurerm/internal/sdk/resource_encode_test.go b/azurerm/internal/sdk/resource_encode_test.go new file mode 100644 index 000000000000..15273128c6d3 --- /dev/null +++ b/azurerm/internal/sdk/resource_encode_test.go @@ -0,0 +1,670 @@ +package sdk + +import ( + "reflect" + "testing" + + "github.com/google/go-cmp/cmp" +) + +type encodeTestData struct { + Input interface{} + Expected map[string]interface{} + ExpectError bool +} + +func TestResourceEncode_TopLevel(t *testing.T) { + type SimpleType struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + + encodeTestData{ + Input: &SimpleType{ + String: "world", + Number: 42, + Price: 129.99, + Enabled: true, + ListOfFloats: []float64{ + 1.0, + 2.0, + 3.0, + 1.234567890}, + ListOfNumbers: []int{1, 2, 3}, + ListOfStrings: []string{ + "have", + "you", + "heard", + }, + MapOfBools: map[string]bool{ + "awesome_feature": true, + }, + MapOfNumbers: map[string]int{ + "hello": 1, + "there": 3, + }, + MapOfStrings: map[string]string{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + Expected: map[string]interface{}{ + "number": int64(42), + "price": float64(129.99), + "string": "world", + "enabled": true, + "list_of_floats": []float64{ + 1.0, + 2.0, + 3.0, + 1.234567890, + }, + "list_of_numbers": []int{1, 2, 3}, + "list_of_strings": []string{ + "have", + "you", + "heard", + }, + "map_of_bools": map[string]interface{}{ + "awesome_feature": true, + }, + "map_of_numbers": map[string]interface{}{ + "hello": 1, + "there": 3, + }, + "map_of_strings": map[string]interface{}{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + }.test(t) +} + +func TestResourceEncode_TopLevelOmitted(t *testing.T) { + type SimpleType struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + encodeTestData{ + Input: &SimpleType{}, + Expected: map[string]interface{}{ + "number": int64(0), + "price": float64(0), + "string": "", + "enabled": false, + "list_of_floats": []float64{}, + "list_of_numbers": []int{}, + "list_of_strings": []string{}, + "map_of_bools": map[string]interface{}{}, + "map_of_numbers": map[string]interface{}{}, + "map_of_strings": map[string]interface{}{}, + }, + }.test(t) +} + +func TestResourceEncode_TopLevelComputed(t *testing.T) { + type SimpleType struct { + ComputedString string `tfschema:"computed_string" computed:"true"` + ComputedNumber int `tfschema:"computed_number" computed:"true"` + ComputedBool bool `tfschema:"computed_bool" computed:"true"` + ComputedListOfNumbers []int `tfschema:"computed_list_of_numbers" computed:"true"` + ComputedListOfStrings []string `tfschema:"computed_list_of_strings" computed:"true"` + ComputedMapOfBools map[string]bool `tfschema:"computed_map_of_bools" computed:"true"` + ComputedMapOfFloats map[string]float64 `tfschema:"computed_map_of_floats" computed:"true"` + ComputedMapOfInts map[string]int `tfschema:"computed_map_of_ints" computed:"true"` + ComputedMapOfStrings map[string]string `tfschema:"computed_map_of_strings" computed:"true"` + } + encodeTestData{ + Input: &SimpleType{ + ComputedString: "je suis computed", + ComputedNumber: 732, + ComputedBool: true, + ComputedListOfNumbers: []int{1, 2, 3}, + ComputedListOfStrings: []string{ + "have", + "you", + "heard", + }, + ComputedMapOfBools: map[string]bool{ + "hello": true, + "world": false, + }, + ComputedMapOfFloats: map[string]float64{ + "hello": 1.8965345678, + "world": 2.0, + }, + ComputedMapOfInts: map[string]int{ + "first": 1, + "second": 2, + "third": 3, + }, + ComputedMapOfStrings: map[string]string{ + "hello": "world", + "bingo": "bango", + }, + }, + Expected: map[string]interface{}{ + "computed_string": "je suis computed", + "computed_number": int64(732), + "computed_bool": true, + "computed_list_of_numbers": []int{1, 2, 3}, + "computed_list_of_strings": []string{ + "have", + "you", + "heard", + }, + "computed_map_of_bools": map[string]interface{}{ + "hello": true, + "world": false, + }, + "computed_map_of_floats": map[string]interface{}{ + "hello": 1.8965345678, + "world": 2.0, + }, + "computed_map_of_ints": map[string]interface{}{ + "first": 1, + "second": 2, + "third": 3, + }, + "computed_map_of_strings": map[string]interface{}{ + "hello": "world", + "bingo": "bango", + }, + }, + }.test(t) +} + +func TestResourceEncode_NestedOneLevelDeepEmpty(t *testing.T) { + type Inner struct { + Value string `tfschema:"value"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + encodeTestData{ + Input: &Type{ + NestedObject: []Inner{}, + }, + Expected: map[string]interface{}{ + "inner": []interface{}{}, + }, + }.test(t) +} + +func TestResourceEncode_NestedOneLevelDeepSingle(t *testing.T) { + type Inner struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + encodeTestData{ + Input: &Type{ + NestedObject: []Inner{ + { + String: "world", + Number: 42, + Price: 129.99, + Enabled: true, + ListOfFloats: []float64{ + 1.0, + 2.0, + 3.0, + 1.234567890}, + ListOfNumbers: []int{1, 2, 3}, + ListOfStrings: []string{ + "have", + "you", + "heard", + }, + MapOfBools: map[string]bool{ + "awesome_feature": true, + }, + MapOfNumbers: map[string]int{ + "hello": 1, + "there": 3, + }, + MapOfStrings: map[string]string{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + }, + }, + Expected: map[string]interface{}{ + "inner": []interface{}{ + map[string]interface{}{ + "number": int64(42), + "price": float64(129.99), + "string": "world", + "enabled": true, + "list_of_floats": []float64{ + 1.0, + 2.0, + 3.0, + 1.234567890, + }, + "list_of_numbers": []int{1, 2, 3}, + "list_of_strings": []string{ + "have", + "you", + "heard", + }, + "map_of_bools": map[string]interface{}{ + "awesome_feature": true, + }, + "map_of_numbers": map[string]interface{}{ + "hello": 1, + "there": 3, + }, + "map_of_strings": map[string]interface{}{ + "hello": "there", + "salut": "tous les monde", + "guten": "tag", + "morning": "alvaro", + }, + }, + }, + }, + }.test(t) +} + +func TestResourceEncode_NestedOneLevelDeepSingleOmittedValues(t *testing.T) { + type Inner struct { + String string `tfschema:"string"` + Number int `tfschema:"number"` + Price float64 `tfschema:"price"` + Enabled bool `tfschema:"enabled"` + ListOfFloats []float64 `tfschema:"list_of_floats"` + ListOfNumbers []int `tfschema:"list_of_numbers"` + ListOfStrings []string `tfschema:"list_of_strings"` + MapOfBools map[string]bool `tfschema:"map_of_bools"` + MapOfNumbers map[string]int `tfschema:"map_of_numbers"` + MapOfStrings map[string]string `tfschema:"map_of_strings"` + ComputedMapOfBools map[string]bool `tfschema:"computed_map_of_bools" computed:"true"` + ComputedMapOfFloats map[string]float64 `tfschema:"computed_map_of_floats" computed:"true"` + ComputedMapOfInts map[string]int `tfschema:"computed_map_of_ints" computed:"true"` + ComputedMapOfStrings map[string]string `tfschema:"computed_map_of_strings" computed:"true"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + encodeTestData{ + Input: &Type{ + NestedObject: []Inner{ + {}, + }, + }, + Expected: map[string]interface{}{ + "inner": []interface{}{ + map[string]interface{}{ + "number": int64(0), + "price": float64(0), + "string": "", + "enabled": false, + "list_of_floats": []float64{}, + "list_of_numbers": []int{}, + "list_of_strings": []string{}, + "map_of_bools": map[string]interface{}{}, + "map_of_numbers": map[string]interface{}{}, + "map_of_strings": map[string]interface{}{}, + "computed_map_of_bools": map[string]interface{}{}, + "computed_map_of_floats": map[string]interface{}{}, + "computed_map_of_ints": map[string]interface{}{}, + "computed_map_of_strings": map[string]interface{}{}, + }, + }, + }, + }.test(t) +} + +func TestResourceEncode_NestedOneLevelDeepSingleMultiple(t *testing.T) { + type Inner struct { + Value string `tfschema:"value"` + } + type Type struct { + NestedObject []Inner `tfschema:"inner"` + } + encodeTestData{ + Input: &Type{ + NestedObject: []Inner{ + { + Value: "first", + }, + { + Value: "second", + }, + { + Value: "third", + }, + }, + }, + Expected: map[string]interface{}{ + "inner": []interface{}{ + map[string]interface{}{ + "value": "first", + }, + map[string]interface{}{ + "value": "second", + }, + map[string]interface{}{ + "value": "third", + }, + }, + }, + }.test(t) +} + +func TestResourceEncode_NestedThreeLevelsDeepEmpty(t *testing.T) { + type ThirdInner struct { + Value string `tfschema:"value"` + } + type SecondInner struct { + Third []ThirdInner `tfschema:"third"` + } + type FirstInner struct { + Second []SecondInner `tfschema:"second"` + } + type Type struct { + First []FirstInner `tfschema:"first"` + } + + t.Log("Top Level Empty") + encodeTestData{ + Input: &Type{ + First: []FirstInner{}, + }, + Expected: map[string]interface{}{ + "first": []interface{}{}, + }, + }.test(t) + + t.Log("Second Level Empty") + encodeTestData{ + Input: &Type{ + First: []FirstInner{ + { + Second: []SecondInner{}, + }, + }, + }, + Expected: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "second": []interface{}{}, + }, + }, + }, + }.test(t) + + t.Log("Third Level Empty") + encodeTestData{ + Input: &Type{ + First: []FirstInner{ + { + Second: []SecondInner{ + { + Third: []ThirdInner{}, + }, + }, + }, + }, + }, + Expected: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "second": []interface{}{ + map[string]interface{}{ + "third": []interface{}{}, + }, + }, + }, + }, + }, + }.test(t) +} + +func TestResourceEncode_NestedThreeLevelsDeepSingleItem(t *testing.T) { + type ThirdInner struct { + Value string `tfschema:"value"` + } + type SecondInner struct { + Third []ThirdInner `tfschema:"third"` + } + type FirstInner struct { + Second []SecondInner `tfschema:"second"` + } + type Type struct { + First []FirstInner `tfschema:"first"` + } + + encodeTestData{ + Input: &Type{ + First: []FirstInner{ + { + Second: []SecondInner{ + { + Third: []ThirdInner{ + { + Value: "salut", + }, + }, + }, + }, + }, + }, + }, + Expected: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "second": []interface{}{ + map[string]interface{}{ + "third": []interface{}{ + map[string]interface{}{ + "value": "salut", + }, + }, + }, + }, + }, + }, + }, + }.test(t) +} + +func TestResourceEncode_NestedThreeLevelsDeepMultipleItems(t *testing.T) { + type ThirdInner struct { + Value string `tfschema:"value"` + } + type SecondInner struct { + Value string `tfschema:"value"` + Third []ThirdInner `tfschema:"third"` + } + type FirstInner struct { + Value string `tfschema:"value"` + Second []SecondInner `tfschema:"second"` + } + type Type struct { + First []FirstInner `tfschema:"first"` + } + + encodeTestData{ + Input: &Type{ + First: []FirstInner{ + { + Value: "first - 1", + Second: []SecondInner{ + { + Value: "second - 1", + Third: []ThirdInner{ + { + Value: "third - 1", + }, + { + Value: "third - 2", + }, + { + Value: "third - 3", + }, + }, + }, + { + Value: "second - 2", + Third: []ThirdInner{ + { + Value: "third - 4", + }, + { + Value: "third - 5", + }, + { + Value: "third - 6", + }, + }, + }, + }, + }, + { + Value: "first - 2", + Second: []SecondInner{ + { + Value: "second - 3", + Third: []ThirdInner{ + { + Value: "third - 7", + }, + { + Value: "third - 8", + }, + }, + }, + { + Value: "second - 4", + Third: []ThirdInner{ + { + Value: "third - 9", + }, + }, + }, + }, + }, + }, + }, + Expected: map[string]interface{}{ + "first": []interface{}{ + map[string]interface{}{ + "value": "first - 1", + "second": []interface{}{ + map[string]interface{}{ + "value": "second - 1", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 1", + }, + map[string]interface{}{ + "value": "third - 2", + }, + map[string]interface{}{ + "value": "third - 3", + }, + }, + }, + map[string]interface{}{ + "value": "second - 2", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 4", + }, + map[string]interface{}{ + "value": "third - 5", + }, + map[string]interface{}{ + "value": "third - 6", + }, + }, + }, + }, + }, + map[string]interface{}{ + "value": "first - 2", + "second": []interface{}{ + map[string]interface{}{ + "value": "second - 3", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 7", + }, + map[string]interface{}{ + "value": "third - 8", + }, + }, + }, + map[string]interface{}{ + "value": "second - 4", + "third": []interface{}{ + map[string]interface{}{ + "value": "third - 9", + }, + }, + }, + }, + }, + }, + }, + }.test(t) +} + +func (testData encodeTestData) test(t *testing.T) { + objType := reflect.TypeOf(testData.Input).Elem() + objVal := reflect.ValueOf(testData.Input).Elem() + fieldName := reflect.ValueOf(testData.Input).Elem().String() + debugLogger := ConsoleLogger{} + + output, err := recurse(objType, objVal, fieldName, debugLogger) + if err != nil { + if testData.ExpectError { + // we're good + return + } + + t.Fatalf("encoding error: %+v", err) + } + if testData.ExpectError { + t.Fatalf("expected an error but didn't get one!") + } + + if !cmp.Equal(output, testData.Expected) { + t.Fatalf("Output mismatch:\n\n Expected: %+v\n\n Received: %+v\n\n", testData.Expected, output) + } +} diff --git a/azurerm/internal/sdk/resource_id.go b/azurerm/internal/sdk/resource_id.go new file mode 100644 index 000000000000..101da17c5ee6 --- /dev/null +++ b/azurerm/internal/sdk/resource_id.go @@ -0,0 +1,8 @@ +package sdk + +import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" + +// SetID uses the specified ID Formatter to set the Resource ID +func (rmd ResourceMetaData) SetID(formatter resourceid.Formatter) { + rmd.ResourceData.SetId(formatter.ID()) +} diff --git a/azurerm/internal/sdk/resource_test.go b/azurerm/internal/sdk/resource_test.go new file mode 100644 index 000000000000..22f116fac667 --- /dev/null +++ b/azurerm/internal/sdk/resource_test.go @@ -0,0 +1,47 @@ +package sdk + +// TODO: make these more granular for the tests + +type ExampleObj struct { + Name string `tfschema:"name"` + Number int `tfschema:"number"` + Output string `tfschema:"output" computed:"true"` + Enabled bool `tfschema:"enabled"` + Networks []string `tfschema:"networks"` + NetworksSet []string `tfschema:"networks_set"` + IntList []int `tfschema:"int_list"` + IntSet []int `tfschema:"int_set"` + FloatList []float64 `tfschema:"float_list"` + FloatSet []float64 `tfschema:"float_set"` + BoolList []bool `tfschema:"bool_list"` + BoolSet []bool `tfschema:"bool_set"` + List []NetworkList `tfschema:"list"` + Set []NetworkSet `tfschema:"set"` + Float float64 `tfschema:"float"` + Map map[string]string `tfschema:"map"` +} + +type NetworkList struct { + Name string `tfschema:"name"` + Inner []NetworkInner `tfschema:"inner"` +} + +type NetworkListSet struct { + Name string `tfschema:"name"` +} + +type NetworkSet struct { + Name string `tfschema:"name"` + Inner []InnerInner `tfschema:"inner"` +} + +type NetworkInner struct { + Name string `tfschema:"name"` + Inner []InnerInner `tfschema:"inner"` + Set []NetworkListSet `tfschema:"set"` +} + +type InnerInner struct { + Name string `tfschema:"name"` + ShouldBeFine bool `tfschema:"should_be_fine"` +} diff --git a/azurerm/internal/sdk/service_registration.go b/azurerm/internal/sdk/service_registration.go new file mode 100644 index 000000000000..008bb05105fe --- /dev/null +++ b/azurerm/internal/sdk/service_registration.go @@ -0,0 +1,40 @@ +package sdk + +import "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + +// TypedServiceRegistration is a Service Registration using Types +// meaning that we can abstract on top of the Plugin SDK and use +// Native Types where possible +type TypedServiceRegistration interface { + // Name is the name of this Service + Name() string + + // PackagePath is the relative path to this package + PackagePath() string + + // DataSources returns a list of Data Sources supported by this Service + DataSources() []DataSource + + // Resources returns a list of Resources supported by this Service + Resources() []Resource + + // WebsiteCategories returns a list of categories which can be used for the sidebar + WebsiteCategories() []string +} + +// UntypedServiceRegistration is the interface used for untyped/raw Plugin SDK resources +// in the future this'll be superseded by the TypedServiceRegistration which allows for +// stronger Typed resources to be used. +type UntypedServiceRegistration interface { + // Name is the name of this Service + Name() string + + // WebsiteCategories returns a list of categories which can be used for the sidebar + WebsiteCategories() []string + + // SupportedDataSources returns the supported Data Sources supported by this Service + SupportedDataSources() map[string]*schema.Resource + + // SupportedResources returns the supported Resources supported by this Service + SupportedResources() map[string]*schema.Resource +} diff --git a/azurerm/internal/sdk/wrapper_data_source.go b/azurerm/internal/sdk/wrapper_data_source.go new file mode 100644 index 000000000000..26dbcc8aa968 --- /dev/null +++ b/azurerm/internal/sdk/wrapper_data_source.go @@ -0,0 +1,56 @@ +package sdk + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +// DataSourceWrapper is a wrapper for converting a DataSource implementation +// into the object used by the Terraform Plugin SDK +type DataSourceWrapper struct { + dataSource DataSource + logger Logger +} + +// NewDataSourceWrapper returns a DataSourceWrapper for this Data Source implementation +func NewDataSourceWrapper(dataSource DataSource) DataSourceWrapper { + return DataSourceWrapper{ + dataSource: dataSource, + logger: ConsoleLogger{}, + } +} + +// DataSource returns the Terraform Plugin SDK type for this DataSource implementation +func (rw *DataSourceWrapper) DataSource() (*schema.Resource, error) { + resourceSchema, err := combineSchema(rw.dataSource.Arguments(), rw.dataSource.Attributes()) + if err != nil { + return nil, fmt.Errorf("building Schema: %+v", err) + } + + modelObj := rw.dataSource.ModelObject() + if err := ValidateModelObject(&modelObj); err != nil { + return nil, fmt.Errorf("validating model for %q: %+v", rw.dataSource.ResourceType(), err) + } + + var d = func(duration time.Duration) *time.Duration { + return &duration + } + + resource := schema.Resource{ + Schema: *resourceSchema, + Read: func(d *schema.ResourceData, meta interface{}) error { + ctx, metaData := runArgs(d, meta, rw.logger) + wrappedCtx, cancel := timeouts.ForRead(ctx, d) + defer cancel() + return rw.dataSource.Read().Func(wrappedCtx, metaData) + }, + Timeouts: &schema.ResourceTimeout{ + Read: d(rw.dataSource.Read().Timeout), + }, + } + + return &resource, nil +} diff --git a/azurerm/internal/sdk/wrapper_helpers.go b/azurerm/internal/sdk/wrapper_helpers.go new file mode 100644 index 000000000000..155e884153b7 --- /dev/null +++ b/azurerm/internal/sdk/wrapper_helpers.go @@ -0,0 +1,60 @@ +package sdk + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +// combineSchema combines the arguments (user-configurable) and attributes (read-only) schema fields +// into a canonical object - ensuring that each contains the relevant information +// +// whilst this may look overkill, this allows for simpler implementations in other tooling, for example +// when generating documentation +func combineSchema(arguments map[string]*schema.Schema, attributes map[string]*schema.Schema) (*map[string]*schema.Schema, error) { + out := make(map[string]*schema.Schema) + + for k, v := range arguments { + if _, alreadyExists := out[k]; alreadyExists { + return nil, fmt.Errorf("%q already exists in the schema", k) + } + + if v.Computed && !(v.Optional || v.Required) { + return nil, fmt.Errorf("%q is a Computed-only field - this should be specified as an Attribute", k) + } + + out[k] = v + } + + for k, v := range attributes { + if _, alreadyExists := out[k]; alreadyExists { + return nil, fmt.Errorf("%q already exists in the schema", k) + } + + if v.Optional || v.Required { + return nil, fmt.Errorf("%q is a user-specifyable field - this should be specified as an Argument", k) + } + + // every attribute has to be computed + v.Computed = true + out[k] = v + } + + return &out, nil +} + +func runArgs(d *schema.ResourceData, meta interface{}, logger Logger) (context.Context, ResourceMetaData) { + // NOTE: this is wrapped as a result of this function, so this is "fine" being unwrapped + stopContext := meta.(*clients.Client).StopContext + client := meta.(*clients.Client) + metaData := ResourceMetaData{ + Client: client, + Logger: logger, + ResourceData: d, + serializationDebugLogger: NullLogger{}, + } + + return stopContext, metaData +} diff --git a/azurerm/internal/sdk/wrapper_resource.go b/azurerm/internal/sdk/wrapper_resource.go new file mode 100644 index 000000000000..40e047a7b7fb --- /dev/null +++ b/azurerm/internal/sdk/wrapper_resource.go @@ -0,0 +1,147 @@ +package sdk + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +// ResourceWrapper is a wrapper for converting a Resource implementation +// into the object used by the Terraform Plugin SDK +type ResourceWrapper struct { + logger Logger + resource Resource +} + +// NewResourceWrapper returns a ResourceWrapper for this Resource implementation +func NewResourceWrapper(resource Resource) ResourceWrapper { + return ResourceWrapper{ + logger: ConsoleLogger{}, + resource: resource, + } +} + +// Resource returns the Terraform Plugin SDK type for this Resource implementation +func (rw *ResourceWrapper) Resource() (*schema.Resource, error) { + resourceSchema, err := combineSchema(rw.resource.Arguments(), rw.resource.Attributes()) + if err != nil { + return nil, fmt.Errorf("building Schema: %+v", err) + } + + modelObj := rw.resource.ModelObject() + if err := ValidateModelObject(&modelObj); err != nil { + return nil, fmt.Errorf("validating model for %q: %+v", rw.resource.ResourceType(), err) + } + + var d = func(duration time.Duration) *time.Duration { + return &duration + } + + resource := schema.Resource{ + Schema: *resourceSchema, + + Create: func(d *schema.ResourceData, meta interface{}) error { + ctx, metaData := runArgs(d, meta, rw.logger) + wrappedCtx, cancel := timeouts.ForCreate(ctx, d) + defer cancel() + err := rw.resource.Create().Func(wrappedCtx, metaData) + if err != nil { + return err + } + // NOTE: whilst this may look like we should use the Read + // functions timeout here, we're still /technically/ in the + // Create function so reusing that timeout should be sufficient + return rw.resource.Read().Func(wrappedCtx, metaData) + }, + + // looks like these could be reused, easiest if they're not + Read: func(d *schema.ResourceData, meta interface{}) error { + ctx, metaData := runArgs(d, meta, rw.logger) + wrappedCtx, cancel := timeouts.ForRead(ctx, d) + defer cancel() + return rw.resource.Read().Func(wrappedCtx, metaData) + }, + Delete: func(d *schema.ResourceData, meta interface{}) error { + ctx, metaData := runArgs(d, meta, rw.logger) + wrappedCtx, cancel := timeouts.ForDelete(ctx, d) + defer cancel() + return rw.resource.Delete().Func(wrappedCtx, metaData) + }, + + Timeouts: &schema.ResourceTimeout{ + Create: d(rw.resource.Create().Timeout), + Read: d(rw.resource.Read().Timeout), + Delete: d(rw.resource.Delete().Timeout), + }, + Importer: azSchema.ValidateResourceIDPriorToImportThen(func(id string) error { + fn := rw.resource.IDValidationFunc() + warnings, errors := fn(id, "id") + if len(warnings) > 0 { + for _, warning := range warnings { + rw.logger.Warn(warning) + } + } + if len(errors) > 0 { + out := "" + for _, error := range errors { + out += error.Error() + } + return fmt.Errorf(out) + } + + return err + }, func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + if v, ok := rw.resource.(ResourceWithCustomImporter); ok { + ctx, metaData := runArgs(d, meta, rw.logger) + wrappedCtx, cancel := timeouts.ForRead(ctx, d) + defer cancel() + + err := v.CustomImporter()(wrappedCtx, metaData) + if err != nil { + return nil, err + } + + return []*schema.ResourceData{metaData.ResourceData}, nil + } + + return schema.ImportStatePassthrough(d, meta) + }), + } + + // Not all resources support update - so this is an separate interface + // implementations can opt to interface + if v, ok := rw.resource.(ResourceWithUpdate); ok { + resource.Update = func(d *schema.ResourceData, meta interface{}) error { + ctx, metaData := runArgs(d, meta, rw.logger) + wrappedCtx, cancel := timeouts.ForUpdate(ctx, d) + defer cancel() + + err := v.Update().Func(wrappedCtx, metaData) + if err != nil { + return err + } + // whilst this may look like we should use the Update timeout here + // we're still "technically" in the update method, so reusing the + // Update's timeout should be fine + return rw.resource.Read().Func(wrappedCtx, metaData) + } + resource.Timeouts.Update = d(v.Update().Timeout) + } + + if v, ok := rw.resource.(ResourceWithDeprecation); ok { + message := v.DeprecationMessage() + if message == "" { + return nil, fmt.Errorf("Resource %q must return a non-empty DeprecationMessage if implementing ResourceWithDeprecation", rw.resource.ResourceType()) + } + + resource.DeprecationMessage = message + } + + // TODO: CustomizeDiff + // TODO: State Migrations + + return &resource, nil +} diff --git a/azurerm/internal/sdk/wrapper_validate.go b/azurerm/internal/sdk/wrapper_validate.go new file mode 100644 index 000000000000..4d53234e7e57 --- /dev/null +++ b/azurerm/internal/sdk/wrapper_validate.go @@ -0,0 +1,56 @@ +package sdk + +import ( + "fmt" + "reflect" + "strings" +) + +// ValidateModelObject validates that the object contains the specified `tfschema` tags +// required to be used with the Encode and Decode functions +func ValidateModelObject(input interface{}) error { + if reflect.TypeOf(input).Kind() != reflect.Ptr { + return fmt.Errorf("need a pointer") + } + + // TODO: could we also validate that each `tfschema` tag exists in the schema? + + objType := reflect.TypeOf(input).Elem() + objVal := reflect.ValueOf(input).Elem() + return validateModelObjectRecursively("", objType, objVal) +} + +func validateModelObjectRecursively(prefix string, objType reflect.Type, objVal reflect.Value) (errOut error) { + defer func() { + if r := recover(); r != nil { + out, ok := r.(error) + if !ok { + return + } + + errOut = out + } + }() + + for i := 0; i < objType.NumField(); i++ { + field := objType.Field(i) + fieldVal := objVal.Field(i) + + if field.Type.Kind() == reflect.Slice { + sv := fieldVal.Slice(0, fieldVal.Len()) + innerType := sv.Type().Elem() + innerVal := reflect.Indirect(reflect.New(innerType)) + fieldName := strings.TrimPrefix(fmt.Sprintf("%s.%s", prefix, field.Name), ".") + if err := validateModelObjectRecursively(fieldName, innerType, innerVal); err != nil { + return err + } + } + + if _, exists := field.Tag.Lookup("tfschema"); !exists { + fieldName := strings.TrimPrefix(fmt.Sprintf("%s.%s", prefix, field.Name), ".") + return fmt.Errorf("field %q is missing an `tfschema` label", fieldName) + } + } + + return nil +} diff --git a/azurerm/internal/sdk/wrapper_validate_test.go b/azurerm/internal/sdk/wrapper_validate_test.go new file mode 100644 index 000000000000..1d9d59276ca2 --- /dev/null +++ b/azurerm/internal/sdk/wrapper_validate_test.go @@ -0,0 +1,58 @@ +package sdk + +import "testing" + +func TestValidateTopLevelObjectValid(t *testing.T) { + type Person struct { + Name string `tfschema:"name"` + Age int `tfschema:"int"` + } + if err := ValidateModelObject(&Person{}); err != nil { + t.Fatalf("error: %+v", err) + } +} + +func TestValidateTopLevelObjectInvalid(t *testing.T) { + t.Log("Person1") + type Person1 struct { + Age int `json:"int"` + } + if err := ValidateModelObject(&Person1{}); err == nil { + t.Fatalf("expected an error but didn't get one") + } + + t.Log("Person2") + type Person2 struct { + Name string + } + if err := ValidateModelObject(&Person2{}); err == nil { + t.Fatalf("expected an error but didn't get one") + } +} + +func TestValidateNestedObjectValid(t *testing.T) { + type Pet struct { + Name string `tfschema:"name"` + } + type Person struct { + Name string `tfschema:"name"` + Pets []Pet `tfschema:"pets"` + } + if err := ValidateModelObject(&Person{}); err != nil { + t.Fatalf("error: %+v", err) + } +} + +func TestValidateNestedObjectInvalid(t *testing.T) { + type Pet struct { + Name string `tfschema:"name"` + Age int + } + type Person struct { + Name string `tfschema:"name"` + Pets []Pet `tfschema:"pets"` + } + if err := ValidateModelObject(&Person{}); err == nil { + t.Fatalf("expected an error but didn't get one") + } +} diff --git a/azurerm/internal/services/advisor/advisor_recommendations_data_source.go b/azurerm/internal/services/advisor/advisor_recommendations_data_source.go new file mode 100644 index 000000000000..c8466b79230e --- /dev/null +++ b/azurerm/internal/services/advisor/advisor_recommendations_data_source.go @@ -0,0 +1,213 @@ +package advisor + +import ( + "fmt" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/advisor/mgmt/2020-01-01/advisor" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + uuid "github.com/satori/go.uuid" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceAdvisorRecommendations() *schema.Resource { + return &schema.Resource{ + Read: dataSourceAdvisorRecommendationsRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(10 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "filter_by_category": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringInSlice([]string{ + string(advisor.HighAvailability), + string(advisor.Security), + string(advisor.Performance), + string(advisor.Cost), + string(advisor.OperationalExcellence), + }, true), + }, + }, + + "filter_by_resource_groups": azure.SchemaResourceGroupNameSetOptional(), + + "recommendations": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "category": { + Type: schema.TypeString, + Computed: true, + }, + + "description": { + Type: schema.TypeString, + Computed: true, + }, + + "impact": { + Type: schema.TypeString, + Computed: true, + }, + + "recommendation_name": { + Type: schema.TypeString, + Computed: true, + }, + + "recommendation_type_id": { + Type: schema.TypeString, + Computed: true, + }, + + "resource_name": { + Type: schema.TypeString, + Computed: true, + }, + + "resource_type": { + Type: schema.TypeString, + Computed: true, + }, + + "suppression_names": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "updated_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceAdvisorRecommendationsRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Advisor.RecommendationsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + filterList := make([]string, 0) + if categories := expandAzureRmAdvisorRecommendationsMapString("Category", d.Get("filter_by_category").(*schema.Set).List()); categories != "" { + filterList = append(filterList, categories) + } + if resGroups := expandAzureRmAdvisorRecommendationsMapString("ResourceGroup", d.Get("filter_by_resource_groups").(*schema.Set).List()); resGroups != "" { + filterList = append(filterList, resGroups) + } + + var recommends []advisor.ResourceRecommendationBase + for recommendationIterator, err := client.ListComplete(ctx, strings.Join(filterList, " and "), nil, ""); recommendationIterator.NotDone(); err = recommendationIterator.NextWithContext(ctx) { + if err != nil { + return fmt.Errorf("loading Advisor Recommendation List: %+v", err) + } + + if recommendationIterator.Value().Name == nil || *recommendationIterator.Value().Name == "" { + return fmt.Errorf("advisor Recommendation Name was nil or empty") + } + + recommends = append(recommends, recommendationIterator.Value()) + } + + if err := d.Set("recommendations", flattenAzureRmAdvisorRecommendations(recommends)); err != nil { + return fmt.Errorf("setting `recommendations`: %+v", err) + } + + d.SetId(fmt.Sprintf("avdisor/recommendations/%s", time.Now().UTC().String())) + + return nil +} + +func flattenAzureRmAdvisorRecommendations(recommends []advisor.ResourceRecommendationBase) []interface{} { + result := make([]interface{}, 0) + + if len(recommends) == 0 { + return result + } + + for _, v := range recommends { + var category, description, impact, recTypeId, resourceName, resourceType, updatedTime string + var suppressionIds []interface{} + if v.Category != "" { + category = string(v.Category) + } + + if v.ShortDescription != nil && v.ShortDescription.Problem != nil { + description = *v.ShortDescription.Problem + } + + if v.Impact != "" { + impact = string(v.Impact) + } + + if v.RecommendationTypeID != nil { + recTypeId = *v.RecommendationTypeID + } + + if v.ImpactedValue != nil { + resourceName = *v.ImpactedValue + } + + if v.ImpactedField != nil { + resourceType = *v.ImpactedField + } + + if v.SuppressionIds != nil { + suppressionIds = flattenSuppressionSlice(v.SuppressionIds) + } + if v.LastUpdated != nil && !v.LastUpdated.IsZero() { + updatedTime = v.LastUpdated.Format(time.RFC3339) + } + + result = append(result, map[string]interface{}{ + "category": category, + "description": description, + "impact": impact, + "recommendation_name": *v.Name, + "recommendation_type_id": recTypeId, + "resource_name": resourceName, + "resource_type": resourceType, + "suppression_names": suppressionIds, + "updated_time": updatedTime, + }) + } + + return result +} + +func expandAzureRmAdvisorRecommendationsMapString(t string, input []interface{}) string { + if len(input) == 0 { + return "" + } + result := make([]string, 0) + for _, v := range input { + result = append(result, fmt.Sprintf("%s eq '%s'", t, v.(string))) + } + return "(" + strings.Join(result, " or ") + ")" +} + +func flattenSuppressionSlice(input *[]uuid.UUID) []interface{} { + result := make([]interface{}, 0) + if input != nil { + for _, item := range *input { + result = append(result, item.String()) + } + } + return result +} diff --git a/azurerm/internal/services/advisor/advisor_recommendations_data_source_test.go b/azurerm/internal/services/advisor/advisor_recommendations_data_source_test.go new file mode 100644 index 000000000000..6eeb2d157751 --- /dev/null +++ b/azurerm/internal/services/advisor/advisor_recommendations_data_source_test.go @@ -0,0 +1,109 @@ +package advisor_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type AdvisorRecommendationsDataSourceTests struct{} + +func TestAccAdvisorRecommendationsDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_advisor_recommendations", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: AdvisorRecommendationsDataSourceTests{}.basicConfig(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("recommendations.#").Exists(), + check.That(data.ResourceName).Key("recommendations.0.category").Exists(), + check.That(data.ResourceName).Key("recommendations.0.description").Exists(), + check.That(data.ResourceName).Key("recommendations.0.impact").Exists(), + check.That(data.ResourceName).Key("recommendations.0.recommendation_name").Exists(), + check.That(data.ResourceName).Key("recommendations.0.recommendation_type_id").Exists(), + check.That(data.ResourceName).Key("recommendations.0.resource_name").Exists(), + check.That(data.ResourceName).Key("recommendations.0.resource_type").Exists(), + check.That(data.ResourceName).Key("recommendations.0.updated_time").Exists(), + ), + }, + }) +} + +func TestAccAdvisorRecommendationsDataSource_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_advisor_recommendations", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: AdvisorRecommendationsDataSourceTests{}.completeConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("recommendations.#").Exists(), + ), + }, + }) +} + +func TestAccAdvisorRecommendationsDataSource_categoriesFilter(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_advisor_recommendations", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: AdvisorRecommendationsDataSourceTests{}.categoriesFilterConfig(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("recommendations.#").Exists(), + check.That(data.ResourceName).Key("recommendations.0.category").HasValue("Cost"), + ), + }, + }) +} + +func (AdvisorRecommendationsDataSourceTests) basicConfig() string { + return `provider "azurerm" { + features {} +} + +data "azurerm_advisor_recommendations" "test" {}` +} + +// Advisor generated recommendations needs long time to take effects, sometimes up to one day or more, +// Please refer to the issue https://github.com/Azure/azure-rest-api-specs/issues/9284 +// So here we get an empty list of recommendations +func (AdvisorRecommendationsDataSourceTests) completeConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-advisor-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "accteststr%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + enable_https_traffic_only = false + account_tier = "Standard" + account_replication_type = "LRS" +} + +data "azurerm_advisor_recommendations" "test" { + filter_by_category = ["security"] + filter_by_resource_groups = [azurerm_resource_group.test.name] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (AdvisorRecommendationsDataSourceTests) categoriesFilterConfig() string { + return ` +provider "azurerm" { + features {} +} + +data "azurerm_advisor_recommendations" "test" { + filter_by_category = ["cost"] +}` +} diff --git a/azurerm/internal/services/advisor/data_source_advisor_recommendations.go b/azurerm/internal/services/advisor/data_source_advisor_recommendations.go deleted file mode 100644 index 8e8e3d2df081..000000000000 --- a/azurerm/internal/services/advisor/data_source_advisor_recommendations.go +++ /dev/null @@ -1,212 +0,0 @@ -package advisor - -import ( - "fmt" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/advisor/mgmt/2020-01-01/advisor" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - uuid "github.com/satori/go.uuid" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" -) - -func dataSourceArmAdvisorRecommendations() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmAdvisorRecommendationsRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(10 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "filter_by_category": { - Type: schema.TypeSet, - Optional: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringInSlice([]string{ - string(advisor.HighAvailability), - string(advisor.Security), - string(advisor.Performance), - string(advisor.Cost), - string(advisor.OperationalExcellence)}, true), - }, - }, - - "filter_by_resource_groups": azure.SchemaResourceGroupNameSetOptional(), - - "recommendations": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "category": { - Type: schema.TypeString, - Computed: true, - }, - - "description": { - Type: schema.TypeString, - Computed: true, - }, - - "impact": { - Type: schema.TypeString, - Computed: true, - }, - - "recommendation_name": { - Type: schema.TypeString, - Computed: true, - }, - - "recommendation_type_id": { - Type: schema.TypeString, - Computed: true, - }, - - "resource_name": { - Type: schema.TypeString, - Computed: true, - }, - - "resource_type": { - Type: schema.TypeString, - Computed: true, - }, - - "suppression_names": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "updated_time": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - }, - } -} - -func dataSourceArmAdvisorRecommendationsRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Advisor.RecommendationsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - filterList := make([]string, 0) - if categories := expandAzureRmAdvisorRecommendationsMapString("Category", d.Get("filter_by_category").(*schema.Set).List()); categories != "" { - filterList = append(filterList, categories) - } - if resGroups := expandAzureRmAdvisorRecommendationsMapString("ResourceGroup", d.Get("filter_by_resource_groups").(*schema.Set).List()); resGroups != "" { - filterList = append(filterList, resGroups) - } - - var recommends []advisor.ResourceRecommendationBase - for recommendationIterator, err := client.ListComplete(ctx, strings.Join(filterList, " and "), nil, ""); recommendationIterator.NotDone(); err = recommendationIterator.NextWithContext(ctx) { - if err != nil { - return fmt.Errorf("loading Advisor Recommendation List: %+v", err) - } - - if recommendationIterator.Value().Name == nil || *recommendationIterator.Value().Name == "" { - return fmt.Errorf("advisor Recommendation Name was nil or empty") - } - - recommends = append(recommends, recommendationIterator.Value()) - } - - if err := d.Set("recommendations", flattenAzureRmAdvisorRecommendations(recommends)); err != nil { - return fmt.Errorf("setting `recommendations`: %+v", err) - } - - d.SetId(fmt.Sprintf("avdisor/recommendations/%s", time.Now().UTC().String())) - - return nil -} - -func flattenAzureRmAdvisorRecommendations(recommends []advisor.ResourceRecommendationBase) []interface{} { - result := make([]interface{}, 0) - - if len(recommends) == 0 { - return result - } - - for _, v := range recommends { - var category, description, impact, recTypeId, resourceName, resourceType, updatedTime string - var suppressionIds []interface{} - if v.Category != "" { - category = string(v.Category) - } - - if v.ShortDescription != nil && v.ShortDescription.Problem != nil { - description = *v.ShortDescription.Problem - } - - if v.Impact != "" { - impact = string(v.Impact) - } - - if v.RecommendationTypeID != nil { - recTypeId = *v.RecommendationTypeID - } - - if v.ImpactedValue != nil { - resourceName = *v.ImpactedValue - } - - if v.ImpactedField != nil { - resourceType = *v.ImpactedField - } - - if v.SuppressionIds != nil { - suppressionIds = flattenSuppressionSlice(v.SuppressionIds) - } - if v.LastUpdated != nil && !v.LastUpdated.IsZero() { - updatedTime = v.LastUpdated.Format(time.RFC3339) - } - - result = append(result, map[string]interface{}{ - "category": category, - "description": description, - "impact": impact, - "recommendation_name": *v.Name, - "recommendation_type_id": recTypeId, - "resource_name": resourceName, - "resource_type": resourceType, - "suppression_names": suppressionIds, - "updated_time": updatedTime, - }) - } - - return result -} - -func expandAzureRmAdvisorRecommendationsMapString(t string, input []interface{}) string { - if len(input) == 0 { - return "" - } - result := make([]string, 0) - for _, v := range input { - result = append(result, fmt.Sprintf("%s eq '%s'", t, v.(string))) - } - return "(" + strings.Join(result, " or ") + ")" -} - -func flattenSuppressionSlice(input *[]uuid.UUID) []interface{} { - result := make([]interface{}, 0) - if input != nil { - for _, item := range *input { - result = append(result, item.String()) - } - } - return result -} diff --git a/azurerm/internal/services/advisor/registration.go b/azurerm/internal/services/advisor/registration.go index c342faee6b35..ba522a5415f2 100644 --- a/azurerm/internal/services/advisor/registration.go +++ b/azurerm/internal/services/advisor/registration.go @@ -21,7 +21,7 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_advisor_recommendations": dataSourceArmAdvisorRecommendations(), + "azurerm_advisor_recommendations": dataSourceAdvisorRecommendations(), } } diff --git a/azurerm/internal/services/advisor/tests/data_source_advisor_recommendations_test.go b/azurerm/internal/services/advisor/tests/data_source_advisor_recommendations_test.go deleted file mode 100644 index b0c34990b247..000000000000 --- a/azurerm/internal/services/advisor/tests/data_source_advisor_recommendations_test.go +++ /dev/null @@ -1,117 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMAdvisorRecommendations_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_advisor_recommendations", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccCheckArmAdvisorRecommendations_basic, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.#"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.category"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.description"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.impact"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.recommendation_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.recommendation_type_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.resource_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.resource_type"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.0.updated_time"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMAdvisorRecommendations_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_advisor_recommendations", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccCheckArmAdvisorRecommendations_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.#"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMAdvisorRecommendations_categoriesFilter(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_advisor_recommendations", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccCheckArmAdvisorRecommendations_categoriesFilter, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "recommendations.#"), - resource.TestCheckResourceAttr(data.ResourceName, "recommendations.0.category", "Cost"), - ), - }, - }, - }) -} - -const testAccCheckArmAdvisorRecommendations_basic = ` -provider "azurerm" { - features {} -} - -data "azurerm_advisor_recommendations" "test" { } -` - -//Advisor genereate recommendations needs long time to take effects, sometimes up to one day or more, -//Please refer to the issue https://github.com/Azure/azure-rest-api-specs/issues/9284 -//So here we get an empty list of recommendations -func testAccCheckArmAdvisorRecommendations_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-advisor-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "accteststr%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - enable_https_traffic_only = false - account_tier = "Standard" - account_replication_type = "LRS" -} - -data "azurerm_advisor_recommendations" "test" { - filter_by_category = ["security"] - filter_by_resource_groups = [azurerm_resource_group.test.name] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -const testAccCheckArmAdvisorRecommendations_categoriesFilter = ` -provider "azurerm" { - features {} -} - -data "azurerm_advisor_recommendations" "test" { - filter_by_category = ["cost"] -} -` diff --git a/azurerm/internal/services/analysisservices/analysis_services_server_resource.go b/azurerm/internal/services/analysisservices/analysis_services_server_resource.go index 898c28d37c94..93911d41f7d7 100644 --- a/azurerm/internal/services/analysisservices/analysis_services_server_resource.go +++ b/azurerm/internal/services/analysisservices/analysis_services_server_resource.go @@ -23,12 +23,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmAnalysisServicesServer() *schema.Resource { +func resourceAnalysisServicesServer() *schema.Resource { return &schema.Resource{ - Create: resourceArmAnalysisServicesServerCreate, - Read: resourceArmAnalysisServicesServerRead, - Update: resourceArmAnalysisServicesServerUpdate, - Delete: resourceArmAnalysisServicesServerDelete, + Create: resourceAnalysisServicesServerCreate, + Read: resourceAnalysisServicesServerRead, + Update: resourceAnalysisServicesServerUpdate, + Delete: resourceAnalysisServicesServerDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -38,7 +38,7 @@ func resourceArmAnalysisServicesServer() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.AnalysisServicesServerID(id) + _, err := parse.ServerID(id) return err }), @@ -131,7 +131,7 @@ func resourceArmAnalysisServicesServer() *schema.Resource { } } -func resourceArmAnalysisServicesServerCreate(d *schema.ResourceData, meta interface{}) error { +func resourceAnalysisServicesServerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AnalysisServices.ServerClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -189,15 +189,15 @@ func resourceArmAnalysisServicesServerCreate(d *schema.ResourceData, meta interf d.SetId(*resp.ID) - return resourceArmAnalysisServicesServerRead(d, meta) + return resourceAnalysisServicesServerRead(d, meta) } -func resourceArmAnalysisServicesServerRead(d *schema.ResourceData, meta interface{}) error { +func resourceAnalysisServicesServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AnalysisServices.ServerClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AnalysisServicesServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return err } @@ -246,14 +246,14 @@ func resourceArmAnalysisServicesServerRead(d *schema.ResourceData, meta interfac return tags.FlattenAndSet(d, server.Tags) } -func resourceArmAnalysisServicesServerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceAnalysisServicesServerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AnalysisServices.ServerClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for Azure ARM Analysis Services Server update.") - id, err := parse.AnalysisServicesServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return err } @@ -310,15 +310,15 @@ func resourceArmAnalysisServicesServerUpdate(d *schema.ResourceData, meta interf } } - return resourceArmAnalysisServicesServerRead(d, meta) + return resourceAnalysisServicesServerRead(d, meta) } -func resourceArmAnalysisServicesServerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceAnalysisServicesServerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AnalysisServices.ServerClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AnalysisServicesServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/analysisservices/analysis_services_server_resource_test.go b/azurerm/internal/services/analysisservices/analysis_services_server_resource_test.go index 9b045f07d49d..f19b51b21b8f 100644 --- a/azurerm/internal/services/analysisservices/analysis_services_server_resource_test.go +++ b/azurerm/internal/services/analysisservices/analysis_services_server_resource_test.go @@ -1,6 +1,7 @@ package analysisservices_test import ( + "context" "fmt" "os" "strings" @@ -10,232 +11,208 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/analysisservices/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type AnalysisServicesServerResource struct { +} + func TestAccAnalysisServicesServer_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - ), - }, - data.ImportStep(), + r := AnalysisServicesServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } func TestAccAnalysisServicesServer_withTags(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.label", "test"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMAnalysisServicesServer_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.label", "test1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "prod"), - ), - }, - data.ImportStep(), + r := AnalysisServicesServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.label").HasValue("test"), + ), + }, + data.ImportStep(), + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.label").HasValue("test1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("prod"), + ), }, + data.ImportStep(), }) } func TestAccAnalysisServicesServer_querypoolConnectionMode(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_querypoolConnectionMode(data, "All"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "querypool_connection_mode", "All"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMAnalysisServicesServer_querypoolConnectionMode(data, "ReadOnly"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "querypool_connection_mode", "ReadOnly"), - ), - }, - data.ImportStep(), + r := AnalysisServicesServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.querypoolConnectionMode(data, "All"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("querypool_connection_mode").HasValue("All"), + ), + }, + data.ImportStep(), + { + Config: r.querypoolConnectionMode(data, "ReadOnly"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("querypool_connection_mode").HasValue("ReadOnly"), + ), }, + data.ImportStep(), }) } func TestAccAnalysisServicesServer_firewallSettings(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - config1 := testAccAzureRMAnalysisServicesServer_firewallSettings1(data, true) - - config2 := testAccAzureRMAnalysisServicesServer_firewallSettings2(data, false) - - config3 := testAccAzureRMAnalysisServicesServer_firewallSettings3(data, true) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: config1, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_power_bi_service", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "ipv4_firewall_rule.#", "0"), - ), - }, - data.ImportStep(), - { - Config: config2, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_power_bi_service", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "ipv4_firewall_rule.#", "1"), - ), - }, - data.ImportStep(), - { - Config: config3, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_power_bi_service", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "ipv4_firewall_rule.#", "2"), - ), - }, - data.ImportStep(), + r := AnalysisServicesServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.firewallSettings1(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enable_power_bi_service").HasValue("true"), + check.That(data.ResourceName).Key("ipv4_firewall_rule.#").HasValue("0"), + ), + }, + data.ImportStep(), + { + Config: r.firewallSettings2(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enable_power_bi_service").HasValue("false"), + check.That(data.ResourceName).Key("ipv4_firewall_rule.#").HasValue("1"), + ), + }, + data.ImportStep(), + { + Config: r.firewallSettings3(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enable_power_bi_service").HasValue("true"), + check.That(data.ResourceName).Key("ipv4_firewall_rule.#").HasValue("2"), + ), }, + data.ImportStep(), }) } // ARM_ACC_EMAIL1 and ARM_ACC_EMAIL2 must be set and existing emails in the tenant's AD to work properly func TestAccAzureRMAnalysisServicesServer_adminUsers(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") + const ArmAccAdminEmail1 = "ARM_ACCTEST_ADMIN_EMAIL1" const ArmAccAdminEmail2 = "ARM_ACCTEST_ADMIN_EMAIL2" + if os.Getenv(ArmAccAdminEmail1) == "" || os.Getenv(ArmAccAdminEmail2) == "" { t.Skip(fmt.Sprintf("Acceptance test skipped unless env '%s' and '%s' set", ArmAccAdminEmail1, ArmAccAdminEmail2)) return } - data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") email1 := os.Getenv(ArmAccAdminEmail1) email2 := os.Getenv(ArmAccAdminEmail2) preAdminUsers := []string{email1} postAdminUsers := []string{email1, email2} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_adminUsers(data, preAdminUsers), - }, - data.ImportStep(), - { - Config: testAccAzureRMAnalysisServicesServer_adminUsers(data, postAdminUsers), - }, - data.ImportStep(), + r := AnalysisServicesServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.adminUsers(data, preAdminUsers), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), + { + Config: r.adminUsers(data, postAdminUsers), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), }) } func TestAccAzureRMAnalysisServicesServer_serverFullName(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_serverFullName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "server_full_name"), - ), - }, - data.ImportStep(), + r := AnalysisServicesServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serverFullName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("server_full_name").Exists(), + ), }, + data.ImportStep(), }) } func TestAccAzureRMAnalysisServicesServer_backupBlobContainerUri(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_backupBlobContainerUri(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "backup_blob_container_uri"), - ), - }, + r := AnalysisServicesServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.backupBlobContainerUri(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backup_blob_container_uri").Exists(), + ), }, + data.ImportStep("backup_blob_container_uri"), }) } func TestAccAzureRMAnalysisServicesServer_suspended(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_analysis_services_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAnalysisServicesServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMAnalysisServicesServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAnalysisServicesServerExists(data.ResourceName), - testSuspendAzureRMAnalysisServicesServer(data.ResourceName), - testCheckAzureRMAnalysisServicesServerState(data.ResourceName, analysisservices.StatePaused), - ), - }, - { - Config: testAccAzureRMAnalysisServicesServer_scale(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "sku", "S1"), - testCheckAzureRMAnalysisServicesServerState(data.ResourceName, analysisservices.StatePaused), - ), - }, + r := AnalysisServicesServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + data.CheckWithClient(r.suspend), + data.CheckWithClient(r.checkState(analysisservices.StatePaused)), + ), }, + data.ImportStep(), + { + Config: r.scale(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("S1"), + data.CheckWithClient(r.checkState(analysisservices.StatePaused)), + ), + }, + data.ImportStep(), }) } -func testAccAzureRMAnalysisServicesServer_basic(data acceptance.TestData) string { +func (t AnalysisServicesServerResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -255,7 +232,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMAnalysisServicesServer_withTags(data acceptance.TestData) string { +func (t AnalysisServicesServerResource) withTags(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -279,7 +256,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMAnalysisServicesServer_withTagsUpdate(data acceptance.TestData) string { +func (t AnalysisServicesServerResource) withTagsUpdate(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -304,7 +281,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMAnalysisServicesServer_querypoolConnectionMode(data acceptance.TestData, connectionMode string) string { +func (t AnalysisServicesServerResource) querypoolConnectionMode(data acceptance.TestData, connectionMode string) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -325,7 +302,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, connectionMode) } -func testAccAzureRMAnalysisServicesServer_firewallSettings1(data acceptance.TestData, enablePowerBIService bool) string { +func (t AnalysisServicesServerResource) firewallSettings1(data acceptance.TestData, enablePowerBIService bool) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -346,7 +323,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enablePowerBIService) } -func testAccAzureRMAnalysisServicesServer_firewallSettings2(data acceptance.TestData, enablePowerBIService bool) string { +func (t AnalysisServicesServerResource) firewallSettings2(data acceptance.TestData, enablePowerBIService bool) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -373,7 +350,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enablePowerBIService) } -func testAccAzureRMAnalysisServicesServer_firewallSettings3(data acceptance.TestData, enablePowerBIService bool) string { +func (t AnalysisServicesServerResource) firewallSettings3(data acceptance.TestData, enablePowerBIService bool) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -406,7 +383,7 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enablePowerBIService) } -func testAccAzureRMAnalysisServicesServer_adminUsers(data acceptance.TestData, adminUsers []string) string { +func (t AnalysisServicesServerResource) adminUsers(data acceptance.TestData, adminUsers []string) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -427,14 +404,14 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, strings.Join(adminUsers, "\", \"")) } -func testAccAzureRMAnalysisServicesServer_serverFullName(data acceptance.TestData) string { +func (t AnalysisServicesServerResource) serverFullName(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" + name = "acctestRG-analysis-%d" location = "%s" } @@ -447,14 +424,14 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMAnalysisServicesServer_backupBlobContainerUri(data acceptance.TestData) string { +func (t AnalysisServicesServerResource) backupBlobContainerUri(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" + name = "acctestRG-analysis-%d" location = "%s" } @@ -502,14 +479,14 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) } -func testAccAzureRMAnalysisServicesServer_scale(data acceptance.TestData) string { +func (t AnalysisServicesServerResource) scale(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" + name = "acctestRG-analysis-%d" location = "%s" } @@ -522,103 +499,46 @@ resource "azurerm_analysis_services_server" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testCheckAzureRMAnalysisServicesServerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AnalysisServices.ServerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_analysis_services_server" { - continue - } - - id, err := parse.AnalysisServicesServerID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.GetDetails(ctx, id.ResourceGroup, id.Name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } +func (t AnalysisServicesServerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerID(state.ID) + if err != nil { + return nil, err + } - return nil + resp, err := clients.AnalysisServices.ServerClient.GetDetails(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Analysis Services Server %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.ServerProperties != nil), nil } -func testCheckAzureRMAnalysisServicesServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AnalysisServices.ServerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } +func (t AnalysisServicesServerResource) suspend(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + client := clients.AnalysisServices.ServerClient - id, err := parse.AnalysisServicesServerID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.GetDetails(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Analysis Services Server %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get on analysisServicesServerClient: %+v", err) - } - - return nil + id, err := parse.ServerID(state.ID) + if err != nil { + return err } -} -func testSuspendAzureRMAnalysisServicesServer(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AnalysisServices.ServerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.AnalysisServicesServerID(rs.Primary.ID) - if err != nil { - return err - } - - suspendFuture, err := client.Suspend(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Suspend on analysisServicesServerClient: %+v", err) - } - - err = suspendFuture.WaitForCompletionRef(ctx, client.Client) - if err != nil { - return fmt.Errorf("Bad: Wait for Suspend completion on analysisServicesServerClient: %+v", err) - } + suspendFuture, err := client.Suspend(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("suspending Analysis Services Server %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } - return nil + err = suspendFuture.WaitForCompletionRef(ctx, client.Client) + if err != nil { + return fmt.Errorf("Wait for Suspend on Analysis Services Server %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) } -} -func testCheckAzureRMAnalysisServicesServerState(resourceName string, state analysisservices.State) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AnalysisServices.ServerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + return nil +} - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } +func (t AnalysisServicesServerResource) checkState(serverState analysisservices.State) acceptance.ClientCheckFunc { + return func(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + client := clients.AnalysisServices.ServerClient - id, err := parse.AnalysisServicesServerID(rs.Primary.ID) + id, err := parse.ServerID(state.ID) if err != nil { return err } @@ -628,8 +548,8 @@ func testCheckAzureRMAnalysisServicesServerState(resourceName string, state anal return fmt.Errorf("Bad: Get on analysisServicesServerClient: %+v", err) } - if resp.State != state { - return fmt.Errorf("Unexpected state. Expected %s but is %s", state, resp.State) + if resp.State != serverState { + return fmt.Errorf("Unexpected state. Expected %s but is %s", serverState, resp.State) } return nil diff --git a/azurerm/internal/services/analysisservices/parse/analysis_services_server.go b/azurerm/internal/services/analysisservices/parse/analysis_services_server.go deleted file mode 100644 index 65ed5f5f5c22..000000000000 --- a/azurerm/internal/services/analysisservices/parse/analysis_services_server.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type AnalysisServicesServerId struct { - ResourceGroup string - Name string -} - -func AnalysisServicesServerID(input string) (*AnalysisServicesServerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Analysis Services Server ID %q: %+v", input, err) - } - - server := AnalysisServicesServerId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/analysisservices/parse/analysis_services_server_test.go b/azurerm/internal/services/analysisservices/parse/analysis_services_server_test.go deleted file mode 100644 index af118e67e61e..000000000000 --- a/azurerm/internal/services/analysisservices/parse/analysis_services_server_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestAnalysisServicesServerId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *AnalysisServicesServerId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Servers Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Web/servers/", - Expected: nil, - }, - { - Name: "Analysis Service Server ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Web/servers/Server1", - Expected: &AnalysisServicesServerId{ - Name: "Server1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Web/Servers/", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := AnalysisServicesServerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/analysisservices/parse/server.go b/azurerm/internal/services/analysisservices/parse/server.go new file mode 100644 index 000000000000..9f8d42e6032f --- /dev/null +++ b/azurerm/internal/services/analysisservices/parse/server.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServerID(subscriptionId, resourceGroup, name string) ServerId { + return ServerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server", segmentsStr) +} + +func (id ServerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.AnalysisServices/servers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServerID parses a Server ID into an ServerId struct +func ServerID(input string) (*ServerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("servers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/analysisservices/parse/server_test.go b/azurerm/internal/services/analysisservices/parse/server_test.go new file mode 100644 index 000000000000..15b9f974c3a6 --- /dev/null +++ b/azurerm/internal/services/analysisservices/parse/server_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerId{} + +func TestServerIDFormatter(t *testing.T) { + actual := NewServerID("12345678-1234-9876-4563-123456789012", "resGroup1", "Server1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/Server1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/Server1", + Expected: &ServerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "Server1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.ANALYSISSERVICES/SERVERS/SERVER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/analysisservices/registration.go b/azurerm/internal/services/analysisservices/registration.go index b37fe9fbba15..5e4166a8c286 100644 --- a/azurerm/internal/services/analysisservices/registration.go +++ b/azurerm/internal/services/analysisservices/registration.go @@ -26,6 +26,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_analysis_services_server": resourceArmAnalysisServicesServer(), + "azurerm_analysis_services_server": resourceAnalysisServicesServer(), } } diff --git a/azurerm/internal/services/analysisservices/resourceids.go b/azurerm/internal/services/analysisservices/resourceids.go new file mode 100644 index 000000000000..108cabdaf8e7 --- /dev/null +++ b/azurerm/internal/services/analysisservices/resourceids.go @@ -0,0 +1,3 @@ +package analysisservices + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Server -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/Server1 diff --git a/azurerm/internal/services/analysisservices/validate/server_id.go b/azurerm/internal/services/analysisservices/validate/server_id.go new file mode 100644 index 000000000000..a6dd5df888ba --- /dev/null +++ b/azurerm/internal/services/analysisservices/validate/server_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/analysisservices/parse" +) + +func ServerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/analysisservices/validate/server_id_test.go b/azurerm/internal/services/analysisservices/validate/server_id_test.go new file mode 100644 index 000000000000..7a4051304f97 --- /dev/null +++ b/azurerm/internal/services/analysisservices/validate/server_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AnalysisServices/servers/Server1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.ANALYSISSERVICES/SERVERS/SERVER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_data_source.go b/azurerm/internal/services/apimanagement/api_management_api_data_source.go index 90837a02907b..4a3128073229 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_data_source.go +++ b/azurerm/internal/services/apimanagement/api_management_api_data_source.go @@ -180,6 +180,7 @@ func flattenApiManagementApiDataSourceProtocols(input *[]apimanagement.Protocol) return results } + func flattenApiManagementApiDataSourceSubscriptionKeyParamNames(paramNames *apimanagement.SubscriptionKeyParameterNamesContract) []interface{} { if paramNames == nil { return make([]interface{}, 0) diff --git a/azurerm/internal/services/apimanagement/api_management_api_data_source_test.go b/azurerm/internal/services/apimanagement/api_management_api_data_source_test.go new file mode 100644 index 000000000000..adc8e30041ad --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_data_source_test.go @@ -0,0 +1,83 @@ +package apimanagement_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ApiManagementApiDataSourceResource struct { +} + +func TestAccDataSourceAzureRMApiManagementApi_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management_api", "test") + r := ApiManagementApiDataSourceResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("display_name").HasValue("api1"), + check.That(data.ResourceName).Key("path").HasValue("api1"), + check.That(data.ResourceName).Key("protocols.#").HasValue("1"), + check.That(data.ResourceName).Key("protocols.0").HasValue("https"), + check.That(data.ResourceName).Key("soap_pass_through").HasValue("false"), + check.That(data.ResourceName).Key("subscription_required").HasValue("false"), + check.That(data.ResourceName).Key("is_current").HasValue("true"), + check.That(data.ResourceName).Key("is_online").HasValue("false"), + ), + }, + }) +} + +func TestAccDataSourceAzureRMApiManagementApi_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management_api", "test") + r := ApiManagementApiDataSourceResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("display_name").HasValue("Butter Parser"), + check.That(data.ResourceName).Key("path").HasValue("butter-parser"), + check.That(data.ResourceName).Key("protocols.#").HasValue("2"), + check.That(data.ResourceName).Key("description").HasValue("What is my purpose? You parse butter."), + check.That(data.ResourceName).Key("service_url").HasValue("https://example.com/foo/bar"), + check.That(data.ResourceName).Key("soap_pass_through").HasValue("false"), + check.That(data.ResourceName).Key("subscription_key_parameter_names.0.header").HasValue("X-Butter-Robot-API-Key"), + check.That(data.ResourceName).Key("subscription_key_parameter_names.0.query").HasValue("location"), + check.That(data.ResourceName).Key("is_current").HasValue("true"), + check.That(data.ResourceName).Key("is_online").HasValue("false"), + ), + }, + }) +} + +func (r ApiManagementApiDataSourceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_api_management_api" "test" { + name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management_api.test.api_management_name + resource_group_name = azurerm_api_management_api.test.resource_group_name + revision = azurerm_api_management_api.test.revision +} +`, ApiManagementApiResource{}.basic(data)) +} + +func (r ApiManagementApiDataSourceResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_api_management_api" "test" { + name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management_api.test.api_management_name + resource_group_name = azurerm_api_management_api.test.resource_group_name + revision = azurerm_api_management_api.test.revision +} +`, ApiManagementApiResource{}.complete(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource.go b/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource.go index 0e7c65b48446..76b599662021 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource.go @@ -14,19 +14,20 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/validate" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementApiDiagnostic() *schema.Resource { +func resourceApiManagementApiDiagnostic() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementApiDiagnosticCreateUpdate, - Read: resourceArmApiManagementApiDiagnosticRead, - Update: resourceArmApiManagementApiDiagnosticCreateUpdate, - Delete: resourceArmApiManagementApiDiagnosticDelete, + Create: resourceApiManagementApiDiagnosticCreateUpdate, + Read: resourceApiManagementApiDiagnosticRead, + Update: resourceApiManagementApiDiagnosticCreateUpdate, + Delete: resourceApiManagementApiDiagnosticDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.ApiManagementApiDiagnosticID(id) + _, err := parse.ApiDiagnosticID(id) return err }), @@ -57,13 +58,81 @@ func resourceArmApiManagementApiDiagnostic() *schema.Resource { "api_management_logger_id": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.ApiManagementLoggerID, + ValidateFunc: validate.LoggerID, + }, + + "always_log_errors": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + }, + + "verbosity": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{ + string(apimanagement.Verbose), + string(apimanagement.Information), + string(apimanagement.Error), + }, false), + }, + + "log_client_ip": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + }, + + "http_correlation_protocol": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{ + string(apimanagement.HTTPCorrelationProtocolNone), + string(apimanagement.HTTPCorrelationProtocolLegacy), + string(apimanagement.HTTPCorrelationProtocolW3C), + }, false), + }, + + "frontend_request": resourceApiManagementApiDiagnosticAdditionalContentSchema(), + + "frontend_response": resourceApiManagementApiDiagnosticAdditionalContentSchema(), + + "backend_request": resourceApiManagementApiDiagnosticAdditionalContentSchema(), + + "backend_response": resourceApiManagementApiDiagnosticAdditionalContentSchema(), + }, + } +} + +func resourceApiManagementApiDiagnosticAdditionalContentSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "body_bytes": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(0, 8192), + }, + "headers_to_log": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Set: schema.HashString, + }, }, }, } } -func resourceArmApiManagementApiDiagnosticCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiDiagnosticCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiDiagnosticClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -92,6 +161,60 @@ func resourceArmApiManagementApiDiagnosticCreateUpdate(d *schema.ResourceData, m }, } + if alwaysLogErrors, ok := d.GetOk("always_log_errors"); ok && alwaysLogErrors.(bool) { + parameters.AlwaysLog = apimanagement.AllErrors + } + + if verbosity, ok := d.GetOk("verbosity"); ok { + switch verbosity.(string) { + case string(apimanagement.Verbose): + parameters.Verbosity = apimanagement.Verbose + case string(apimanagement.Information): + parameters.Verbosity = apimanagement.Information + case string(apimanagement.Error): + parameters.Verbosity = apimanagement.Error + } + } + + if logClientIP, ok := d.GetOk("log_client_ip"); ok { + parameters.LogClientIP = utils.Bool(logClientIP.(bool)) + } + + if httpCorrelationProtocol, ok := d.GetOk("http_correlation_protocol"); ok { + switch httpCorrelationProtocol.(string) { + case string(apimanagement.HTTPCorrelationProtocolNone): + parameters.HTTPCorrelationProtocol = apimanagement.HTTPCorrelationProtocolNone + case string(apimanagement.HTTPCorrelationProtocolLegacy): + parameters.HTTPCorrelationProtocol = apimanagement.HTTPCorrelationProtocolLegacy + case string(apimanagement.HTTPCorrelationProtocolW3C): + parameters.HTTPCorrelationProtocol = apimanagement.HTTPCorrelationProtocolW3C + } + } + + frontendRequest, frontendRequestSet := d.GetOk("frontend_request") + frontendResponse, frontendResponseSet := d.GetOk("frontend_response") + if frontendRequestSet || frontendResponseSet { + parameters.Frontend = &apimanagement.PipelineDiagnosticSettings{} + if frontendRequestSet { + parameters.Frontend.Request = expandApiManagementApiDiagnosticHTTPMessageDiagnostic(frontendRequest.([]interface{})) + } + if frontendResponseSet { + parameters.Frontend.Response = expandApiManagementApiDiagnosticHTTPMessageDiagnostic(frontendResponse.([]interface{})) + } + } + + backendRequest, backendRequestSet := d.GetOk("backend_request") + backendResponse, backendResponseSet := d.GetOk("backend_response") + if backendRequestSet || backendResponseSet { + parameters.Backend = &apimanagement.PipelineDiagnosticSettings{} + if backendRequestSet { + parameters.Backend.Request = expandApiManagementApiDiagnosticHTTPMessageDiagnostic(backendRequest.([]interface{})) + } + if backendResponseSet { + parameters.Backend.Response = expandApiManagementApiDiagnosticHTTPMessageDiagnostic(backendResponse.([]interface{})) + } + } + if _, err := client.CreateOrUpdate(ctx, resourceGroup, serviceName, apiName, diagnosticId, parameters, ""); err != nil { return fmt.Errorf("creating or updating Diagnostic %q (Resource Group %q / API Management Service %q / API %q): %+v", diagnosticId, resourceGroup, serviceName, apiName, err) } @@ -105,28 +228,28 @@ func resourceArmApiManagementApiDiagnosticCreateUpdate(d *schema.ResourceData, m } d.SetId(*resp.ID) - return resourceArmApiManagementApiDiagnosticRead(d, meta) + return resourceApiManagementApiDiagnosticRead(d, meta) } -func resourceArmApiManagementApiDiagnosticRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiDiagnosticRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiDiagnosticClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - diagnosticId, err := parse.ApiManagementApiDiagnosticID(d.Id()) + diagnosticId, err := parse.ApiDiagnosticID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, diagnosticId.Name) + resp, err := client.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, diagnosticId.DiagnosticName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Diagnostic %q (Resource Group %q / API Management Service %q / API %q) was not found - removing from state!", diagnosticId.Name, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName) + log.Printf("[DEBUG] Diagnostic %q (Resource Group %q / API Management Service %q / API %q) was not found - removing from state!", diagnosticId.DiagnosticName, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName) d.SetId("") return nil } - return fmt.Errorf("making Read request for Diagnostic %q (Resource Group %q / API Management Service %q / API %q): %+v", diagnosticId.Name, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, err) + return fmt.Errorf("making Read request for Diagnostic %q (Resource Group %q / API Management Service %q / API %q): %+v", diagnosticId.DiagnosticName, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, err) } d.Set("api_name", diagnosticId.ApiName) @@ -135,26 +258,91 @@ func resourceArmApiManagementApiDiagnosticRead(d *schema.ResourceData, meta inte d.Set("api_management_name", diagnosticId.ServiceName) if props := resp.DiagnosticContractProperties; props != nil { d.Set("api_management_logger_id", props.LoggerID) + d.Set("always_log_errors", props.AlwaysLog == apimanagement.AllErrors) + d.Set("verbosity", props.Verbosity) + d.Set("log_client_ip", props.LogClientIP) + d.Set("http_correlation_protocol", props.HTTPCorrelationProtocol) + if frontend := props.Frontend; frontend != nil { + d.Set("frontend_request", flattenApiManagementApiDiagnosticHTTPMessageDiagnostic(frontend.Request)) + d.Set("frontend_response", flattenApiManagementApiDiagnosticHTTPMessageDiagnostic(frontend.Response)) + } else { + d.Set("frontend_request", nil) + d.Set("frontend_response", nil) + } + if backend := props.Backend; backend != nil { + d.Set("backend_request", flattenApiManagementApiDiagnosticHTTPMessageDiagnostic(backend.Request)) + d.Set("backend_response", flattenApiManagementApiDiagnosticHTTPMessageDiagnostic(backend.Response)) + } else { + d.Set("backend_request", nil) + d.Set("backend_response", nil) + } } return nil } -func resourceArmApiManagementApiDiagnosticDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiDiagnosticDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiDiagnosticClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - diagnosticId, err := parse.ApiManagementApiDiagnosticID(d.Id()) + diagnosticId, err := parse.ApiDiagnosticID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, diagnosticId.Name, ""); err != nil { + if resp, err := client.Delete(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, diagnosticId.DiagnosticName, ""); err != nil { if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("deleting Diagnostic %q (Resource Group %q / API Management Service %q / API %q): %+v", diagnosticId.Name, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, err) + return fmt.Errorf("deleting Diagnostic %q (Resource Group %q / API Management Service %q / API %q): %+v", diagnosticId.DiagnosticName, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, err) } } return nil } + +func expandApiManagementApiDiagnosticHTTPMessageDiagnostic(input []interface{}) *apimanagement.HTTPMessageDiagnostic { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + result := &apimanagement.HTTPMessageDiagnostic{ + Body: &apimanagement.BodyDiagnosticSettings{}, + } + + if bodyBytes, ok := v["body_bytes"]; ok { + result.Body.Bytes = utils.Int32(int32(bodyBytes.(int))) + } + if headersSetRaw, ok := v["headers_to_log"]; ok { + headersSet := headersSetRaw.(*schema.Set).List() + headers := []string{} + for _, header := range headersSet { + headers = append(headers, header.(string)) + } + result.Headers = &headers + } + + return result +} + +func flattenApiManagementApiDiagnosticHTTPMessageDiagnostic(input *apimanagement.HTTPMessageDiagnostic) []interface{} { + result := make([]interface{}, 0) + + if input == nil { + return result + } + + diagnostic := map[string]interface{}{} + + if input.Body != nil && input.Body.Bytes != nil { + diagnostic["body_bytes"] = input.Body.Bytes + } + + if input.Headers != nil { + diagnostic["headers_to_log"] = set.FromStringSlice(*input.Headers) + } + result = append(result, diagnostic) + + return result +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource_test.go b/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource_test.go new file mode 100644 index 000000000000..43ec28e95fd5 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_diagnostic_resource_test.go @@ -0,0 +1,250 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementApiDiagnosticResource struct { +} + +func TestAccApiManagementApiDiagnostic_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") + r := ApiManagementApiDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiDiagnostic_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") + r := ApiManagementApiDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiDiagnostic_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") + r := ApiManagementApiDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementApiDiagnostic_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") + r := ApiManagementApiDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementApiDiagnosticResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApiDiagnosticID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.ApiManagement.ApiDiagnosticClient.Get(ctx, id.ResourceGroup, id.ServiceName, id.ApiName, id.DiagnosticName) + if err != nil { + return nil, fmt.Errorf("reading ApiManagementApiDiagnostic (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementApiDiagnosticResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_logger" "test" { + name = "acctestapimnglogger-%[1]d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + application_insights { + instrumentation_key = azurerm_application_insights.test.instrumentation_key + } +} + +resource "azurerm_api_management_api" "test" { + name = "acctestAMA-%[1]d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + revision = "1" + display_name = "Test API" + path = "test" + protocols = ["https"] + + import { + content_format = "swagger-link-json" + content_value = "http://conferenceapi.azurewebsites.net/?format=json" + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r ApiManagementApiDiagnosticResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_diagnostic" "test" { + identifier = "applicationinsights" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_name = azurerm_api_management_api.test.name + api_management_logger_id = azurerm_api_management_logger.test.id +} +`, r.template(data)) +} + +func (r ApiManagementApiDiagnosticResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_application_insights" "test2" { + name = "acctestappinsightsUpdate-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_api_management_logger" "test2" { + name = "acctestapimngloggerUpdate-%[2]d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + application_insights { + instrumentation_key = azurerm_application_insights.test2.instrumentation_key + } +} + +resource "azurerm_api_management_api_diagnostic" "test" { + identifier = "applicationinsights" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_name = azurerm_api_management_api.test.name + api_management_logger_id = azurerm_api_management_logger.test2.id +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiDiagnosticResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_diagnostic" "import" { + identifier = azurerm_api_management_api_diagnostic.test.identifier + resource_group_name = azurerm_api_management_api_diagnostic.test.resource_group_name + api_management_name = azurerm_api_management_api_diagnostic.test.api_management_name + api_name = azurerm_api_management_api.test.name + api_management_logger_id = azurerm_api_management_api_diagnostic.test.api_management_logger_id +} +`, r.basic(data)) +} + +func (r ApiManagementApiDiagnosticResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_diagnostic" "test" { + identifier = "applicationinsights" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_name = azurerm_api_management_api.test.name + api_management_logger_id = azurerm_api_management_logger.test.id + always_log_errors = true + log_client_ip = true + http_correlation_protocol = "W3C" + verbosity = "verbose" + + backend_request { + body_bytes = 1 + headers_to_log = ["Host"] + } + + backend_response { + body_bytes = 2 + headers_to_log = ["Content-Type"] + } + + frontend_request { + body_bytes = 3 + headers_to_log = ["Accept"] + } + + frontend_response { + body_bytes = 4 + headers_to_log = ["Content-Length"] + } +} +`, r.template(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource.go b/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource.go index 151f5dca8154..3f13b0fcba06 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementApiOperationPolicy() *schema.Resource { +func resourceApiManagementApiOperationPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementAPIOperationPolicyCreateUpdate, - Read: resourceArmApiManagementAPIOperationPolicyRead, - Update: resourceArmApiManagementAPIOperationPolicyCreateUpdate, - Delete: resourceArmApiManagementAPIOperationPolicyDelete, + Create: resourceApiManagementAPIOperationPolicyCreateUpdate, + Read: resourceApiManagementAPIOperationPolicyRead, + Update: resourceApiManagementAPIOperationPolicyCreateUpdate, + Delete: resourceApiManagementAPIOperationPolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -58,7 +58,7 @@ func resourceArmApiManagementApiOperationPolicy() *schema.Resource { } } -func resourceArmApiManagementAPIOperationPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementAPIOperationPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiOperationPoliciesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -117,10 +117,10 @@ func resourceArmApiManagementAPIOperationPolicyCreateUpdate(d *schema.ResourceDa } d.SetId(*resp.ID) - return resourceArmApiManagementAPIOperationPolicyRead(d, meta) + return resourceApiManagementAPIOperationPolicyRead(d, meta) } -func resourceArmApiManagementAPIOperationPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementAPIOperationPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiOperationPoliciesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -159,7 +159,7 @@ func resourceArmApiManagementAPIOperationPolicyRead(d *schema.ResourceData, meta return nil } -func resourceArmApiManagementAPIOperationPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementAPIOperationPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiOperationPoliciesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource_test.go b/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource_test.go new file mode 100644 index 000000000000..beb29bd97d6a --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_operation_policy_resource_test.go @@ -0,0 +1,179 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementApiOperationPolicyResource struct { +} + +func TestAccApiManagementAPIOperationPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") + r := ApiManagementApiOperationPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"xml_link"}, + }, + }) +} + +func TestAccApiManagementAPIOperationPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") + r := ApiManagementApiOperationPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementAPIOperationPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") + r := ApiManagementApiOperationPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"xml_link"}, + }, + }) +} + +func TestAccApiManagementAPIOperationPolicy_rawXml(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") + r := ApiManagementApiOperationPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.rawXml(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementApiOperationPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + apiName := id.Path["apis"] + operationID := id.Path["operations"] + + resp, err := clients.ApiManagement.ApiOperationPoliciesClient.Get(ctx, resourceGroup, serviceName, apiName, operationID, apimanagement.PolicyExportFormatXML) + if err != nil { + return nil, fmt.Errorf("reading ApiManagementApi Operation Policy (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementApiOperationPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation_policy" "test" { + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + operation_id = azurerm_api_management_api_operation.test.operation_id + xml_link = "https://gist.githubusercontent.com/riordanp/ca22f8113afae0eb38cc12d718fd048d/raw/d6ac89a2f35a6881a7729f8cb4883179dc88eea1/example.xml" +} +`, ApiManagementApiOperationResource{}.basic(data)) +} + +func (r ApiManagementApiOperationPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation_policy" "import" { + api_name = azurerm_api_management_api_operation_policy.test.api_name + api_management_name = azurerm_api_management_api_operation_policy.test.api_management_name + resource_group_name = azurerm_api_management_api_operation_policy.test.resource_group_name + operation_id = azurerm_api_management_api_operation_policy.test.operation_id + xml_link = azurerm_api_management_api_operation_policy.test.xml_link +} +`, r.basic(data)) +} + +func (r ApiManagementApiOperationPolicyResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation_policy" "test" { + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + operation_id = azurerm_api_management_api_operation.test.operation_id + + xml_content = < + + + + + +XML + +} +`, r.basic(data)) +} + +func (r ApiManagementApiOperationPolicyResource) rawXml(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation_policy" "test" { + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + operation_id = azurerm_api_management_api_operation.test.operation_id + + xml_content = file("testdata/api_management_api_operation_policy.xml") +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_operation_resource.go b/azurerm/internal/services/apimanagement/api_management_api_operation_resource.go index 29f8a9ad8f32..5b3efa046d96 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_operation_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_api_operation_resource.go @@ -14,12 +14,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementApiOperation() *schema.Resource { +func resourceApiManagementApiOperation() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementApiOperationCreateUpdate, - Read: resourceArmApiManagementApiOperationRead, - Update: resourceArmApiManagementApiOperationCreateUpdate, - Delete: resourceArmApiManagementApiOperationDelete, + Create: resourceApiManagementApiOperationCreateUpdate, + Read: resourceApiManagementApiOperationRead, + Update: resourceApiManagementApiOperationCreateUpdate, + Delete: resourceApiManagementApiOperationDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -108,7 +108,7 @@ func resourceArmApiManagementApiOperation() *schema.Resource { } } -func resourceArmApiManagementApiOperationCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiOperationCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiOperationsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -174,10 +174,10 @@ func resourceArmApiManagementApiOperationCreateUpdate(d *schema.ResourceData, me d.SetId(*resp.ID) - return resourceArmApiManagementApiOperationRead(d, meta) + return resourceApiManagementApiOperationRead(d, meta) } -func resourceArmApiManagementApiOperationRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiOperationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiOperationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -233,7 +233,7 @@ func resourceArmApiManagementApiOperationRead(d *schema.ResourceData, meta inter return nil } -func resourceArmApiManagementApiOperationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiOperationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiOperationsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_api_operation_resource_test.go b/azurerm/internal/services/apimanagement/api_management_api_operation_resource_test.go new file mode 100644 index 000000000000..12f73056809a --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_operation_resource_test.go @@ -0,0 +1,457 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementApiOperationResource struct { +} + +func TestAccApiManagementApiOperation_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") + r := ApiManagementApiOperationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiOperation_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") + r := ApiManagementApiOperationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementApiOperation_customMethod(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") + r := ApiManagementApiOperationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customMethod(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("method").HasValue("HAMMERTIME"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiOperation_headers(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") + r := ApiManagementApiOperationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.headers(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiOperation_requestRepresentations(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") + r := ApiManagementApiOperationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.requestRepresentation(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.requestRepresentationUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiOperation_representations(t *testing.T) { + // TODO: once `azurerm_api_management_schema` is supported add `request.0.representation.0.schema_id` + data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") + r := ApiManagementApiOperationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.representation(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.representationUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementApiOperationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + apiId := id.Path["apis"] + operationId := id.Path["operations"] + + resp, err := clients.ApiManagement.ApiOperationsClient.Get(ctx, resourceGroup, serviceName, apiId, operationId) + if err != nil { + return nil, fmt.Errorf("reading ApiManagementApi Operation (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementApiOperationResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "DELETE Resource" + method = "DELETE" + url_template = "/resource" +} +`, r.template(data)) +} + +func (r ApiManagementApiOperationResource) customMethod(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "HAMMERTIME Resource" + method = "HAMMERTIME" + url_template = "/resource" +} +`, r.template(data)) +} + +func (r ApiManagementApiOperationResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "import" { + operation_id = azurerm_api_management_api_operation.test.operation_id + api_name = azurerm_api_management_api_operation.test.api_name + api_management_name = azurerm_api_management_api_operation.test.api_management_name + resource_group_name = azurerm_api_management_api_operation.test.resource_group_name + display_name = azurerm_api_management_api_operation.test.display_name + method = azurerm_api_management_api_operation.test.method + url_template = azurerm_api_management_api_operation.test.url_template +} +`, r.basic(data)) +} + +func (r ApiManagementApiOperationResource) requestRepresentation(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Acceptance Test Operation" + method = "DELETE" + url_template = "/user1" + description = "This can only be done by the logged in user." + + request { + description = "Created user object" + + representation { + content_type = "application/json" + type_name = "User" + } + } +} +`, r.template(data)) +} + +func (r ApiManagementApiOperationResource) requestRepresentationUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Acceptance Test Operation" + method = "DELETE" + url_template = "/user1" + description = "This can only be done by the logged in user." + + request { + description = "Created user object" + + representation { + content_type = "application/json" + type_name = "User" + } + } +} +`, r.template(data)) +} + +func (r ApiManagementApiOperationResource) headers(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Acceptance Test Operation" + method = "DELETE" + url_template = "/user1" + description = "This can only be done by the logged in user." + + request { + description = "Created user object" + + header { + name = "X-Test-Operation" + required = true + type = "string" + } + + representation { + content_type = "application/json" + type_name = "User" + } + } + + response { + status_code = 200 + description = "successful operation" + + header { + name = "X-Test-Operation" + required = true + type = "string" + } + + representation { + content_type = "application/xml" + + sample = < + + + + + + + +SAMPLE + + } + } +} +`, r.template(data)) +} + +func (r ApiManagementApiOperationResource) representation(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Acceptance Test Operation" + method = "DELETE" + url_template = "/user1" + description = "This can only be done by the logged in user." + + request { + description = "Created user object" + + representation { + content_type = "application/json" + type_name = "User" + } + } + + response { + status_code = 200 + description = "successful operation" + + representation { + content_type = "application/xml" + + sample = < + + + + + + + +SAMPLE + + } + } +} +`, r.template(data)) +} + +func (r ApiManagementApiOperationResource) representationUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_operation" "test" { + operation_id = "acctest-operation" + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Acceptance Test Operation" + method = "DELETE" + url_template = "/user1" + description = "This can only be done by the logged in user." + + request { + description = "Created user object" + + representation { + content_type = "application/json" + type_name = "User" + } + } + + response { + status_code = 200 + description = "successful operation" + + representation { + content_type = "application/xml" + + sample = < + + + + + + + +SAMPLE + + } + + representation { + content_type = "application/json" + + sample = < + + + + + +XML + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_resource.go b/azurerm/internal/services/apimanagement/api_management_api_resource.go index 97daa651b73e..c27c61f6b61c 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_api_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementApi() *schema.Resource { +func resourceApiManagementApi() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementApiCreateUpdate, - Read: resourceArmApiManagementApiRead, - Update: resourceArmApiManagementApiCreateUpdate, - Delete: resourceArmApiManagementApiDelete, + Create: resourceApiManagementApiCreateUpdate, + Read: resourceApiManagementApiRead, + Update: resourceApiManagementApiCreateUpdate, + Delete: resourceApiManagementApiDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -242,7 +242,7 @@ func resourceArmApiManagementApi() *schema.Resource { } } -func resourceArmApiManagementApiCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -306,7 +306,7 @@ func resourceArmApiManagementApiCreateUpdate(d *schema.ResourceData, meta interf } wsdlSelectorVs := importV["wsdl_selector"].([]interface{}) - //`wsdl_selector` is necessary under format `wsdl` + // `wsdl_selector` is necessary under format `wsdl` if len(wsdlSelectorVs) == 0 && contentFormat == string(apimanagement.Wsdl) { return fmt.Errorf("`wsdl_selector` is required when content format is `wsdl` in API Management API %q", name) } @@ -396,10 +396,10 @@ func resourceArmApiManagementApiCreateUpdate(d *schema.ResourceData, meta interf } d.SetId(*read.ID) - return resourceArmApiManagementApiRead(d, meta) + return resourceApiManagementApiRead(d, meta) } -func resourceArmApiManagementApiRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -468,7 +468,7 @@ func resourceArmApiManagementApiRead(d *schema.ResourceData, meta interface{}) e return nil } -func resourceArmApiManagementApiDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_api_resource_test.go b/azurerm/internal/services/apimanagement/api_management_api_resource_test.go new file mode 100644 index 000000000000..ca0f782b70d6 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_resource_test.go @@ -0,0 +1,569 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementApiResource struct { +} + +func TestAccApiManagementApi_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soap_pass_through").HasValue("false"), + check.That(data.ResourceName).Key("is_current").HasValue("true"), + check.That(data.ResourceName).Key("is_online").HasValue("false"), + check.That(data.ResourceName).Key("subscription_required").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_wordRevision(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.wordRevision(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("revision").HasValue("one-point-oh"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_blankPath(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.blankPath(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soap_pass_through").HasValue("false"), + check.That(data.ResourceName).Key("is_current").HasValue("true"), + check.That(data.ResourceName).Key("is_online").HasValue("false"), + check.That(data.ResourceName).Key("path").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_version(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.versionSet(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("version").HasValue("v1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_oauth2Authorization(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.oauth2Authorization(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_openidAuthentication(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.openidAuthentication(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementApi_soapPassthrough(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.soapPassthrough(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_subscriptionRequired(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subscriptionRequired(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subscription_required").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApi_importSwagger(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.importSwagger(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "import", + }, + }, + }) +} + +func TestAccApiManagementApi_importWsdl(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.importWsdl(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "import", + }, + }, + }) +} + +func TestAccApiManagementApi_importUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.importWsdl(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "import", + }, + }, + { + Config: r.importSwagger(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "import", + }, + }, + }) +} + +func TestAccApiManagementApi_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") + r := ApiManagementApiResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementApiResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + apiid := id.Path["apis"] + + resp, err := clients.ApiManagement.ApiClient.Get(ctx, resourceGroup, serviceName, apiid) + if err != nil { + return nil, fmt.Errorf("reading ApiManagementApi (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementApiResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) blankPath(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "" + protocols = ["https"] + revision = "1" +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) wordRevision(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "one-point-oh" +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) soapPassthrough(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + soap_pass_through = true +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) subscriptionRequired(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + subscription_required = false +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "import" { + name = azurerm_api_management_api.test.name + resource_group_name = azurerm_api_management_api.test.resource_group_name + api_management_name = azurerm_api_management_api.test.api_management_name + display_name = azurerm_api_management_api.test.display_name + path = azurerm_api_management_api.test.path + protocols = azurerm_api_management_api.test.protocols + revision = azurerm_api_management_api.test.revision +} +`, r.basic(data)) +} + +func (r ApiManagementApiResource) importSwagger(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + + import { + content_value = file("testdata/api_management_api_swagger.json") + content_format = "swagger-json" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) importWsdl(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + + import { + content_value = file("testdata/api_management_api_wsdl.xml") + content_format = "wsdl" + + wsdl_selector { + service_name = "Calculator" + endpoint_name = "CalculatorHttpsSoap11Endpoint" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Butter Parser" + path = "butter-parser" + protocols = ["https", "http"] + revision = "3" + description = "What is my purpose? You parse butter." + service_url = "https://example.com/foo/bar" + + subscription_key_parameter_names { + header = "X-Butter-Robot-API-Key" + query = "location" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiResource) versionSet(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_version_set" "test" { + name = "acctestAMAVS-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Butter Parser" + versioning_scheme = "Segment" +} + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + version = "v1" + version_set_id = azurerm_api_management_api_version_set.test.id +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementApiResource) oauth2Authorization(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_authorization_server" "test" { + name = "acctestauthsrv-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" + authorization_endpoint = "https://azacctest.hashicorptest.com/client/authorize" + client_id = "42424242-4242-4242-4242-424242424242" + client_registration_endpoint = "https://azacctest.hashicorptest.com/client/register" + + grant_types = [ + "implicit", + ] + + authorization_methods = [ + "GET", + ] +} + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + oauth2_authorization { + authorization_server_name = azurerm_api_management_authorization_server.test.name + scope = "acctest" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementApiResource) openidAuthentication(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_openid_connect_provider" "test" { + name = "acctest-%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + client_id = "00001111-2222-3333-%d" + client_secret = "%d-cwdavsxbacsaxZX-%d" + display_name = "Initial Name" + metadata_endpoint = "https://azacctest.hashicorptest.com/example/foo" +} + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" + openid_authentication { + openid_provider_name = azurerm_api_management_openid_connect_provider.test.name + bearer_token_sending_methods = [ + "authorizationHeader", + "query", + ] + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementApiResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_schema_resource.go b/azurerm/internal/services/apimanagement/api_management_api_schema_resource.go index d709f1703f5b..3b5f836695d9 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_schema_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_api_schema_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementApiSchema() *schema.Resource { +func resourceApiManagementApiSchema() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementApiSchemaCreateUpdate, - Read: resourceArmApiManagementApiSchemaRead, - Update: resourceArmApiManagementApiSchemaCreateUpdate, - Delete: resourceArmApiManagementApiSchemaDelete, + Create: resourceApiManagementApiSchemaCreateUpdate, + Read: resourceApiManagementApiSchemaRead, + Update: resourceApiManagementApiSchemaCreateUpdate, + Delete: resourceApiManagementApiSchemaDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -56,7 +56,7 @@ func resourceArmApiManagementApiSchema() *schema.Resource { } } -func resourceArmApiManagementApiSchemaCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiSchemaCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiSchemasClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -103,10 +103,10 @@ func resourceArmApiManagementApiSchemaCreateUpdate(d *schema.ResourceData, meta } d.SetId(*resp.ID) - return resourceArmApiManagementApiSchemaRead(d, meta) + return resourceApiManagementApiSchemaRead(d, meta) } -func resourceArmApiManagementApiSchemaRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiSchemaRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiSchemasClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -146,7 +146,7 @@ func resourceArmApiManagementApiSchemaRead(d *schema.ResourceData, meta interfac return nil } -func resourceArmApiManagementApiSchemaDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiSchemaDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiSchemasClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_api_schema_resource_test.go b/azurerm/internal/services/apimanagement/api_management_api_schema_resource_test.go new file mode 100644 index 000000000000..5c28a9e14142 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_schema_resource_test.go @@ -0,0 +1,128 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementApiSchemaResource struct { +} + +func TestAccApiManagementApiSchema_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_schema", "test") + r := ApiManagementApiSchemaResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiSchema_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_schema", "test") + r := ApiManagementApiSchemaResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementApiSchemaResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + apiName := id.Path["apis"] + schemaID := id.Path["schemas"] + + resp, err := clients.ApiManagement.ApiSchemasClient.Get(ctx, resourceGroup, serviceName, apiName, schemaID) + if err != nil { + return nil, fmt.Errorf("reading ApiManagementApi Schema (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementApiSchemaResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_schema" "test" { + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management_api.test.api_management_name + resource_group_name = azurerm_api_management_api.test.resource_group_name + schema_id = "acctestSchema%d" + content_type = "application/vnd.ms-azure-apim.xsd+xml" + value = file("testdata/api_management_api_schema.xml") +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementApiSchemaResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_schema" "import" { + api_name = azurerm_api_management_api_schema.test.api_name + api_management_name = azurerm_api_management_api_schema.test.api_management_name + resource_group_name = azurerm_api_management_api_schema.test.resource_group_name + schema_id = azurerm_api_management_api_schema.test.schema_id + content_type = azurerm_api_management_api_schema.test.content_type + value = azurerm_api_management_api_schema.test.value +} +`, r.basic(data)) +} + +func (ApiManagementApiSchemaResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_version_set_data_source_test.go b/azurerm/internal/services/apimanagement/api_management_api_version_set_data_source_test.go new file mode 100644 index 000000000000..0b9e47398b17 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_version_set_data_source_test.go @@ -0,0 +1,41 @@ +package apimanagement_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ApiManagementApiVersionSetDataSource struct { +} + +func TestAccDataSourceApiManagementApiVersionSet_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management_api_version_set", "test") + r := ApiManagementApiVersionSetDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("api_management_name").Exists(), + ), + }, + }) +} + +func (ApiManagementApiVersionSetDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_api_management_api_version_set" "test" { + name = azurerm_api_management_api_version_set.test.name + resource_group_name = azurerm_api_management_api_version_set.test.resource_group_name + api_management_name = azurerm_api_management_api_version_set.test.api_management_name +} +`, ApiManagementApiVersionSetResource{}.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_api_version_set_resource.go b/azurerm/internal/services/apimanagement/api_management_api_version_set_resource.go index eb954513f25b..289c4abacb78 100644 --- a/azurerm/internal/services/apimanagement/api_management_api_version_set_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_api_version_set_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementApiVersionSet() *schema.Resource { +func resourceApiManagementApiVersionSet() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementApiVersionSetCreateUpdate, - Read: resourceArmApiManagementApiVersionSetRead, - Update: resourceArmApiManagementApiVersionSetCreateUpdate, - Delete: resourceArmApiManagementApiVersionSetDelete, + Create: resourceApiManagementApiVersionSetCreateUpdate, + Read: resourceApiManagementApiVersionSetRead, + Update: resourceApiManagementApiVersionSetCreateUpdate, + Delete: resourceApiManagementApiVersionSetDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -89,7 +89,7 @@ func resourceArmApiManagementApiVersionSet() *schema.Resource { } } -func resourceArmApiManagementApiVersionSetCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiVersionSetCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiVersionSetClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -169,15 +169,15 @@ func resourceArmApiManagementApiVersionSetCreateUpdate(d *schema.ResourceData, m } d.SetId(*resp.ID) - return resourceArmApiManagementApiVersionSetRead(d, meta) + return resourceApiManagementApiVersionSetRead(d, meta) } -func resourceArmApiManagementApiVersionSetRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiVersionSetRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiVersionSetClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.APIVersionSetID(d.Id()) + id, err := parse.ApiVersionSetID(d.Id()) if err != nil { return err } @@ -208,12 +208,12 @@ func resourceArmApiManagementApiVersionSetRead(d *schema.ResourceData, meta inte return nil } -func resourceArmApiManagementApiVersionSetDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementApiVersionSetDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ApiVersionSetClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.APIVersionSetID(d.Id()) + id, err := parse.ApiVersionSetID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/apimanagement/api_management_api_version_set_resource_test.go b/azurerm/internal/services/apimanagement/api_management_api_version_set_resource_test.go new file mode 100644 index 000000000000..1b6bb81b0c94 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_api_version_set_resource_test.go @@ -0,0 +1,216 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementApiVersionSetResource struct { +} + +func TestAccApiManagementApiVersionSet_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") + r := ApiManagementApiVersionSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiVersionSet_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") + r := ApiManagementApiVersionSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementApiVersionSet_header(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") + r := ApiManagementApiVersionSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.header(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiVersionSet_query(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") + r := ApiManagementApiVersionSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.query(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementApiVersionSet_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") + r := ApiManagementApiVersionSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("description").HasValue("TestDescription1"), + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("TestApiVersionSet1%d", data.RandomInteger)), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("description").HasValue("TestDescription2"), + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("TestApiVersionSet2%d", data.RandomInteger)), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementApiVersionSetResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApiVersionSetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.ApiManagement.ApiVersionSetClient.Get(ctx, id.ResourceGroup, id.ServiceName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagementApi Version Set (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementApiVersionSetResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_version_set" "test" { + name = "acctestAMAVS-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + description = "TestDescription1" + display_name = "TestApiVersionSet1%d" + versioning_scheme = "Segment" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementApiVersionSetResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_version_set" "import" { + name = azurerm_api_management_api_version_set.test.name + resource_group_name = azurerm_api_management_api_version_set.test.resource_group_name + api_management_name = azurerm_api_management_api_version_set.test.api_management_name + description = azurerm_api_management_api_version_set.test.description + display_name = azurerm_api_management_api_version_set.test.display_name + versioning_scheme = azurerm_api_management_api_version_set.test.versioning_scheme +} +`, r.basic(data)) +} + +func (r ApiManagementApiVersionSetResource) header(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_version_set" "test" { + name = "acctestAMAVS-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + description = "TestDescription1" + display_name = "TestApiVersionSet1%d" + versioning_scheme = "Header" + version_header_name = "Header1" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementApiVersionSetResource) query(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_version_set" "test" { + name = "acctestAMAVS-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + description = "TestDescription1" + display_name = "TestApiVersionSet1%d" + versioning_scheme = "Query" + version_query_name = "Query1" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementApiVersionSetResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_api_version_set" "test" { + name = "acctestAMAVS-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + description = "TestDescription2" + display_name = "TestApiVersionSet2%d" + versioning_scheme = "Segment" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementApiVersionSetResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_authorization_server_resource.go b/azurerm/internal/services/apimanagement/api_management_authorization_server_resource.go index 616be1d55507..409f35fb9bde 100644 --- a/azurerm/internal/services/apimanagement/api_management_authorization_server_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_authorization_server_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementAuthorizationServer() *schema.Resource { +func resourceApiManagementAuthorizationServer() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementAuthorizationServerCreateUpdate, - Read: resourceArmApiManagementAuthorizationServerRead, - Update: resourceArmApiManagementAuthorizationServerCreateUpdate, - Delete: resourceArmApiManagementAuthorizationServerDelete, + Create: resourceApiManagementAuthorizationServerCreateUpdate, + Read: resourceApiManagementAuthorizationServerRead, + Update: resourceApiManagementAuthorizationServerCreateUpdate, + Delete: resourceApiManagementAuthorizationServerDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -183,7 +183,7 @@ func resourceArmApiManagementAuthorizationServer() *schema.Resource { } } -func resourceArmApiManagementAuthorizationServerCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementAuthorizationServerCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.AuthorizationServersClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -274,10 +274,10 @@ func resourceArmApiManagementAuthorizationServerCreateUpdate(d *schema.ResourceD } d.SetId(*read.ID) - return resourceArmApiManagementAuthorizationServerRead(d, meta) + return resourceApiManagementAuthorizationServerRead(d, meta) } -func resourceArmApiManagementAuthorizationServerRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementAuthorizationServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.AuthorizationServersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -342,7 +342,7 @@ func resourceArmApiManagementAuthorizationServerRead(d *schema.ResourceData, met return nil } -func resourceArmApiManagementAuthorizationServerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementAuthorizationServerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.AuthorizationServersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_authorization_server_resource_test.go b/azurerm/internal/services/apimanagement/api_management_authorization_server_resource_test.go new file mode 100644 index 000000000000..da2484c84336 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_authorization_server_resource_test.go @@ -0,0 +1,184 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementAuthorizationServerResource struct { +} + +func TestAccApiManagementAuthorizationServer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_authorization_server", "test") + r := ApiManagementAuthorizationServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementAuthorizationServer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_authorization_server", "test") + r := ApiManagementAuthorizationServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementAuthorizationServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_authorization_server", "test") + r := ApiManagementAuthorizationServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func (t ApiManagementAuthorizationServerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["authorizationServers"] + + resp, err := clients.ApiManagement.AuthorizationServersClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Authorization Server (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementAuthorizationServerResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_authorization_server" "test" { + name = "acctestauthsrv-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" + authorization_endpoint = "https://azacctest.hashicorptest.com/client/authorize" + client_id = "42424242-4242-4242-4242-424242424242" + client_registration_endpoint = "https://azacctest.hashicorptest.com/client/register" + + grant_types = [ + "implicit", + ] + + authorization_methods = [ + "GET", + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementAuthorizationServerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_authorization_server" "import" { + name = azurerm_api_management_authorization_server.test.name + resource_group_name = azurerm_api_management_authorization_server.test.resource_group_name + api_management_name = azurerm_api_management_authorization_server.test.api_management_name + display_name = azurerm_api_management_authorization_server.test.display_name + authorization_endpoint = azurerm_api_management_authorization_server.test.authorization_endpoint + client_id = azurerm_api_management_authorization_server.test.client_id + client_registration_endpoint = azurerm_api_management_authorization_server.test.client_registration_endpoint + grant_types = azurerm_api_management_authorization_server.test.grant_types + + authorization_methods = [ + "GET", + ] +} +`, r.basic(data)) +} + +func (r ApiManagementAuthorizationServerResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_authorization_server" "test" { + name = "acctestauthsrv-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" + authorization_endpoint = "https://azacctest.hashicorptest.com/client/authorize" + client_id = "42424242-4242-4242-4242-424242424242" + client_registration_endpoint = "https://azacctest.hashicorptest.com/client/register" + + grant_types = [ + "authorizationCode", + ] + + authorization_methods = [ + "GET", + "POST", + ] + + bearer_token_sending_methods = [ + "authorizationHeader", + ] + + client_secret = "n1n3-m0re-s3a5on5-m0r1y" + default_scope = "read write" + token_endpoint = "https://azacctest.hashicorptest.com/client/token" + resource_owner_username = "rick" + resource_owner_password = "C-193P" + support_state = true +} +`, r.template(data), data.RandomInteger) +} + +func (ApiManagementAuthorizationServerResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_backend_resource.go b/azurerm/internal/services/apimanagement/api_management_backend_resource.go index bac8ed6743b7..c0ebeac10675 100644 --- a/azurerm/internal/services/apimanagement/api_management_backend_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_backend_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementBackend() *schema.Resource { +func resourceApiManagementBackend() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementBackendCreateUpdate, - Read: resourceArmApiManagementBackendRead, - Update: resourceArmApiManagementBackendCreateUpdate, - Delete: resourceArmApiManagementBackendDelete, + Create: resourceApiManagementBackendCreateUpdate, + Read: resourceApiManagementBackendRead, + Update: resourceApiManagementBackendCreateUpdate, + Delete: resourceApiManagementBackendDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -234,7 +234,7 @@ func resourceArmApiManagementBackend() *schema.Resource { } } -func resourceArmApiManagementBackendCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementBackendCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.BackendClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -308,10 +308,10 @@ func resourceArmApiManagementBackendCreateUpdate(d *schema.ResourceData, meta in } d.SetId(*read.ID) - return resourceArmApiManagementBackendRead(d, meta) + return resourceApiManagementBackendRead(d, meta) } -func resourceArmApiManagementBackendRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementBackendRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.BackendClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -364,7 +364,7 @@ func resourceArmApiManagementBackendRead(d *schema.ResourceData, meta interface{ return nil } -func resourceArmApiManagementBackendDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementBackendDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.BackendClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_backend_resource_test.go b/azurerm/internal/services/apimanagement/api_management_backend_resource_test.go new file mode 100644 index 000000000000..d89bfdc2289f --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_backend_resource_test.go @@ -0,0 +1,423 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementAuthorizationBackendResource struct { +} + +func TestAccApiManagementBackend_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("http"), + check.That(data.ResourceName).Key("url").HasValue("https://acctest"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementBackend_allProperties(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allProperties(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("http"), + check.That(data.ResourceName).Key("url").HasValue("https://acctest"), + check.That(data.ResourceName).Key("description").HasValue("description"), + check.That(data.ResourceName).Key("resource_id").HasValue("https://resourceid"), + check.That(data.ResourceName).Key("title").HasValue("title"), + check.That(data.ResourceName).Key("credentials.#").HasValue("1"), + check.That(data.ResourceName).Key("credentials.0.authorization.0.parameter").HasValue("parameter"), + check.That(data.ResourceName).Key("credentials.0.authorization.0.scheme").HasValue("scheme"), + check.That(data.ResourceName).Key("credentials.0.certificate.0").Exists(), + check.That(data.ResourceName).Key("credentials.0.header.header1").HasValue("header1value1"), + check.That(data.ResourceName).Key("credentials.0.header.header2").HasValue("header2value1"), + check.That(data.ResourceName).Key("credentials.0.query.query1").HasValue("query1value1"), + check.That(data.ResourceName).Key("credentials.0.query.query2").HasValue("query2value1"), + check.That(data.ResourceName).Key("proxy.#").HasValue("1"), + check.That(data.ResourceName).Key("proxy.0.url").HasValue("http://192.168.1.1:8080"), + check.That(data.ResourceName).Key("proxy.0.username").HasValue("username"), + check.That(data.ResourceName).Key("proxy.0.password").HasValue("password"), + check.That(data.ResourceName).Key("tls.#").HasValue("1"), + check.That(data.ResourceName).Key("tls.0.validate_certificate_chain").HasValue("false"), + check.That(data.ResourceName).Key("tls.0.validate_certificate_name").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementBackend_credentialsNoCertificate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.credentialsNoCertificate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementBackend_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "update"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("http"), + check.That(data.ResourceName).Key("url").HasValue("https://acctest"), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("soap"), + check.That(data.ResourceName).Key("url").HasValue("https://updatedacctest"), + check.That(data.ResourceName).Key("description").HasValue("description"), + check.That(data.ResourceName).Key("resource_id").HasValue("https://resourceid"), + check.That(data.ResourceName).Key("proxy.#").HasValue("1"), + check.That(data.ResourceName).Key("proxy.0.url").HasValue("http://192.168.1.1:8080"), + check.That(data.ResourceName).Key("proxy.0.username").HasValue("username"), + check.That(data.ResourceName).Key("proxy.0.password").HasValue("password"), + check.That(data.ResourceName).Key("tls.#").HasValue("1"), + check.That(data.ResourceName).Key("tls.0.validate_certificate_chain").HasValue("false"), + check.That(data.ResourceName).Key("tls.0.validate_certificate_name").HasValue("true"), + ), + }, + { + Config: r.basic(data, "update"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("http"), + check.That(data.ResourceName).Key("url").HasValue("https://acctest"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("resource_id").HasValue(""), + check.That(data.ResourceName).Key("proxy.#").HasValue("0"), + check.That(data.ResourceName).Key("tls.#").HasValue("0"), + ), + }, + }) +} + +func TestAccApiManagementBackend_serviceFabric(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serviceFabric(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("service_fabric_cluster.0.client_certificate_thumbprint").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementBackend_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "disappears"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckApiManagementBackendDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccApiManagementBackend_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") + r := ApiManagementAuthorizationBackendResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "import"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementAuthorizationBackendResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["backends"] + + resp, err := clients.ApiManagement.BackendClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Authorization Backend (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckApiManagementBackendDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.BackendClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + serviceName := rs.Primary.Attributes["api_management_name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for backend: %s", name) + } + + resp, err := conn.Delete(ctx, resourceGroup, serviceName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp) { + return nil + } + return fmt.Errorf("Bad: Delete on BackendClient: %+v", err) + } + + return nil + } +} + +func (r ApiManagementAuthorizationBackendResource) basic(data acceptance.TestData, testName string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_backend" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + protocol = "http" + url = "https://acctest" +} +`, r.template(data, testName), data.RandomInteger) +} + +func (r ApiManagementAuthorizationBackendResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_backend" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + protocol = "soap" + url = "https://updatedacctest" + description = "description" + resource_id = "https://resourceid" + proxy { + url = "http://192.168.1.1:8080" + username = "username" + password = "password" + } + tls { + validate_certificate_chain = false + validate_certificate_name = true + } +} +`, r.template(data, "update"), data.RandomInteger) +} + +func (r ApiManagementAuthorizationBackendResource) allProperties(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_certificate" "test" { + name = "example-cert" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + data = filebase64("testdata/keyvaultcert.pfx") + password = "" +} + +resource "azurerm_api_management_backend" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + protocol = "http" + url = "https://acctest" + description = "description" + resource_id = "https://resourceid" + title = "title" + credentials { + authorization { + parameter = "parameter" + scheme = "scheme" + } + certificate = [ + azurerm_api_management_certificate.test.thumbprint, + ] + header = { + header1 = "header1value1,header1value2" + header2 = "header2value1,header2value2" + } + query = { + query1 = "query1value1,query1value2" + query2 = "query2value1,query2value2" + } + } + proxy { + url = "http://192.168.1.1:8080" + username = "username" + password = "password" + } + tls { + validate_certificate_chain = false + validate_certificate_name = true + } +} +`, r.template(data, "all"), data.RandomInteger) +} + +func (r ApiManagementAuthorizationBackendResource) serviceFabric(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_certificate" "test" { + name = "example-cert" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + data = filebase64("testdata/keyvaultcert.pfx") + password = "" +} + +resource "azurerm_api_management_backend" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + protocol = "http" + url = "fabric:/mytestapp/acctest" + service_fabric_cluster { + client_certificate_thumbprint = azurerm_api_management_certificate.test.thumbprint + management_endpoints = [ + "https://acctestsf.com", + ] + max_partition_resolution_retries = 5 + server_certificate_thumbprints = [ + "thumb1", + "thumb2", + ] + } +} +`, r.template(data, "sf"), data.RandomInteger) +} + +func (r ApiManagementAuthorizationBackendResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_backend" "import" { + name = azurerm_api_management_backend.test.name + resource_group_name = azurerm_api_management_backend.test.resource_group_name + api_management_name = azurerm_api_management_backend.test.api_management_name + protocol = azurerm_api_management_backend.test.protocol + url = azurerm_api_management_backend.test.url +} +`, r.basic(data, "requiresimport")) +} + +func (ApiManagementAuthorizationBackendResource) template(data acceptance.TestData, testName string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d-%s" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} +`, data.RandomInteger, testName, data.Locations.Primary, data.RandomInteger, testName) +} + +func (r ApiManagementAuthorizationBackendResource) credentialsNoCertificate(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_backend" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + protocol = "http" + url = "https://acctest" + description = "description" + resource_id = "https://resourceid" + title = "title" + credentials { + authorization { + parameter = "parameter" + scheme = "scheme" + } + header = { + header1 = "header1value1,header1value2" + header2 = "header2value1,header2value2" + } + query = { + query1 = "query1value1,query1value2" + query2 = "query2value1,query2value2" + } + } + proxy { + url = "http://192.168.1.1:8080" + username = "username" + password = "password" + } + tls { + validate_certificate_chain = false + validate_certificate_name = true + } +} +`, r.template(data, "all"), data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_certificate_resource.go b/azurerm/internal/services/apimanagement/api_management_certificate_resource.go index 7114d1b77220..d1360fce4de7 100644 --- a/azurerm/internal/services/apimanagement/api_management_certificate_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_certificate_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementCertificate() *schema.Resource { +func resourceApiManagementCertificate() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementCertificateCreateUpdate, - Read: resourceArmApiManagementCertificateRead, - Update: resourceArmApiManagementCertificateCreateUpdate, - Delete: resourceArmApiManagementCertificateDelete, + Create: resourceApiManagementCertificateCreateUpdate, + Read: resourceApiManagementCertificateRead, + Update: resourceApiManagementCertificateCreateUpdate, + Delete: resourceApiManagementCertificateDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -70,7 +70,7 @@ func resourceArmApiManagementCertificate() *schema.Resource { } } -func resourceArmApiManagementCertificateCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementCertificateCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.CertificatesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -114,10 +114,10 @@ func resourceArmApiManagementCertificateCreateUpdate(d *schema.ResourceData, met } d.SetId(*resp.ID) - return resourceArmApiManagementCertificateRead(d, meta) + return resourceApiManagementCertificateRead(d, meta) } -func resourceArmApiManagementCertificateRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementCertificateRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.CertificatesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -158,7 +158,7 @@ func resourceArmApiManagementCertificateRead(d *schema.ResourceData, meta interf return nil } -func resourceArmApiManagementCertificateDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementCertificateDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.CertificatesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_certificate_resource_test.go b/azurerm/internal/services/apimanagement/api_management_certificate_resource_test.go new file mode 100644 index 000000000000..ee424bd3470d --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_certificate_resource_test.go @@ -0,0 +1,122 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementCertificateResource struct { +} + +func TestAccApiManagementCertificate_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_certificate", "test") + r := ApiManagementCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("expiration").Exists(), + check.That(data.ResourceName).Key("subject").Exists(), + check.That(data.ResourceName).Key("thumbprint").Exists(), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "data", + "password", + }, + }, + }) +} + +func TestAccApiManagementCertificate_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_certificate", "test") + r := ApiManagementCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementCertificateResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["certificates"] + + resp, err := clients.ApiManagement.CertificatesClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Certificate (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementCertificateResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_certificate" "test" { + name = "example-cert" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + data = filebase64("testdata/keyvaultcert.pfx") + password = "" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementCertificateResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_certificate" "import" { + name = azurerm_api_management_certificate.test.name + api_management_name = azurerm_api_management_certificate.test.api_management_name + resource_group_name = azurerm_api_management_certificate.test.resource_group_name + data = azurerm_api_management_certificate.test.data + password = azurerm_api_management_certificate.test.password +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go new file mode 100644 index 000000000000..559c26487905 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go @@ -0,0 +1,378 @@ +package apimanagement + +import ( + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +var apiManagementCustomDomainResourceName = "azurerm_api_management_custom_domain" + +func resourceApiManagementCustomDomain() *schema.Resource { + return &schema.Resource{ + Create: apiManagementCustomDomainCreateUpdate, + Read: apiManagementCustomDomainRead, + Update: apiManagementCustomDomainCreateUpdate, + Delete: apiManagementCustomDomainDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "api_management_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "management": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: []string{"management", "portal", "developer_portal", "proxy", "scm"}, + Elem: &schema.Resource{ + Schema: apiManagementResourceHostnameSchema(), + }, + }, + "portal": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: []string{"management", "portal", "developer_portal", "proxy", "scm"}, + Elem: &schema.Resource{ + Schema: apiManagementResourceHostnameSchema(), + }, + }, + "developer_portal": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: []string{"management", "portal", "developer_portal", "proxy", "scm"}, + Elem: &schema.Resource{ + Schema: apiManagementResourceHostnameSchema(), + }, + }, + "proxy": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: []string{"management", "portal", "developer_portal", "proxy", "scm"}, + Elem: &schema.Resource{ + Schema: apiManagementResourceHostnameProxySchema(), + }, + }, + "scm": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: []string{"management", "portal", "developer_portal", "proxy", "scm"}, + Elem: &schema.Resource{ + Schema: apiManagementResourceHostnameSchema(), + }, + }, + }, + } +} + +func apiManagementCustomDomainCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).ApiManagement.ServiceClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for API Management Custom domain creation.") + + apiManagementID := d.Get("api_management_id").(string) + id, err := parse.ApiManagementID(apiManagementID) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + existing, err := client.Get(ctx, resourceGroup, serviceName) + if err != nil { + return fmt.Errorf("finding API Management (API Management %q / Resource Group %q): %s", serviceName, resourceGroup, err) + } + + if d.IsNewResource() { + if existing.ServiceProperties != nil && existing.ServiceProperties.HostnameConfigurations != nil && len(*existing.ServiceProperties.HostnameConfigurations) > 1 { + return tf.ImportAsExistsError(apiManagementCustomDomainResourceName, *existing.ID) + } + } + + existing.ServiceProperties.HostnameConfigurations = expandApiManagementCustomDomains(d) + + // Wait for the ProvisioningState to become "Succeeded" before attempting to update + log.Printf("[DEBUG] Waiting for API Management Service %q (Resource Group: %q) to become ready", serviceName, resourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Updating", "Unknown"}, + Target: []string{"Succeeded", "Ready"}, + Refresh: apiManagementRefreshFunc(ctx, client, serviceName, resourceGroup), + MinTimeout: 1 * time.Minute, + ContinuousTargetOccurence: 6, + } + if d.IsNewResource() { + stateConf.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for API Management Service %q (Resource Group: %q) to become ready: %+v", serviceName, resourceGroup, err) + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, serviceName, existing); err != nil { + return fmt.Errorf("creating/updating Custom Domain (API Management %q / Resource Group %q): %+v", serviceName, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, serviceName) + if err != nil { + return fmt.Errorf("retrieving Custom Domain (API Management %q / Resource Group %q): %+v", serviceName, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("cannot read ID for Custom Domain (API Management %q / Resource Group %q)", serviceName, resourceGroup) + } + + // Wait for the ProvisioningState to become "Succeeded" before attempting to update + log.Printf("[DEBUG] Waiting for API Management Service %q (Resource Group: %q) to become ready", serviceName, resourceGroup) + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for API Management Service %q (Resource Group: %q) to become ready: %+v", serviceName, resourceGroup, err) + } + + customDomainsID := fmt.Sprintf("%s/customDomains/default", *read.ID) + d.SetId(customDomainsID) + + return apiManagementCustomDomainRead(d, meta) +} + +func apiManagementCustomDomainRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).ApiManagement.ServiceClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.CustomDomainID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + resp, err := client.Get(ctx, resourceGroup, serviceName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("API Management Service %q was not found in Resource Group %q - removing from state!", serviceName, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("making Read request on API Management Service %q (Resource Group %q): %+v", serviceName, resourceGroup, err) + } + + d.Set("api_management_id", resp.ID) + + if resp.ServiceProperties != nil && resp.ServiceProperties.HostnameConfigurations != nil { + configs := flattenApiManagementHostnameConfiguration(resp.ServiceProperties.HostnameConfigurations, d) + for _, config := range configs { + for key, v := range config.(map[string]interface{}) { + // lintignore:R001 + if err := d.Set(key, v); err != nil { + return fmt.Errorf("setting `hostname_configuration` %q: %+v", key, err) + } + } + } + } + + return nil +} + +func apiManagementCustomDomainDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).ApiManagement.ServiceClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.CustomDomainID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + resp, err := client.Get(ctx, resourceGroup, serviceName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("API Management Service %q was not found in Resource Group %q - removing from state!", serviceName, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("making Read request on API Management Service %q (Resource Group %q): %+v", serviceName, resourceGroup, err) + } + + // Wait for the ProvisioningState to become "Succeeded" before attempting to update + log.Printf("[DEBUG] Waiting for API Management Service %q (Resource Group: %q) to become ready", serviceName, resourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Updating", "Unknown"}, + Target: []string{"Succeeded", "Ready"}, + Refresh: apiManagementRefreshFunc(ctx, client, serviceName, resourceGroup), + MinTimeout: 1 * time.Minute, + Timeout: d.Timeout(schema.TimeoutDelete), + ContinuousTargetOccurence: 6, + } + + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for API Management Service %q (Resource Group: %q) to become ready: %+v", serviceName, resourceGroup, err) + } + + log.Printf("[DEBUG] Deleting API Management Custom Domain (API Management %q / Resource Group %q)", serviceName, resourceGroup) + + resp.ServiceProperties.HostnameConfigurations = nil + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, serviceName, resp); err != nil { + return fmt.Errorf("deleting Custom Domain (API Management %q / Resource Group %q): %+v", serviceName, resourceGroup, err) + } + + // Wait for the ProvisioningState to become "Succeeded" before attempting to update + log.Printf("[DEBUG] Waiting for API Management Service %q (Resource Group: %q) to become ready", serviceName, resourceGroup) + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for API Management Service %q (Resource Group: %q) to become ready: %+v", serviceName, resourceGroup, err) + } + + return nil +} + +func expandApiManagementCustomDomains(input *schema.ResourceData) *[]apimanagement.HostnameConfiguration { + results := make([]apimanagement.HostnameConfiguration, 0) + + if managementRawVal, ok := input.GetOk("management"); ok { + vs := managementRawVal.([]interface{}) + for _, rawVal := range vs { + v := rawVal.(map[string]interface{}) + output := expandApiManagementCommonHostnameConfiguration(v, apimanagement.HostnameTypeManagement) + results = append(results, output) + } + } + if portalRawVal, ok := input.GetOk("portal"); ok { + vs := portalRawVal.([]interface{}) + for _, rawVal := range vs { + v := rawVal.(map[string]interface{}) + output := expandApiManagementCommonHostnameConfiguration(v, apimanagement.HostnameTypePortal) + results = append(results, output) + } + } + if developerPortalRawVal, ok := input.GetOk("developer_portal"); ok { + vs := developerPortalRawVal.([]interface{}) + for _, rawVal := range vs { + v := rawVal.(map[string]interface{}) + output := expandApiManagementCommonHostnameConfiguration(v, apimanagement.HostnameTypeDeveloperPortal) + results = append(results, output) + } + } + if proxyRawVal, ok := input.GetOk("proxy"); ok { + vs := proxyRawVal.([]interface{}) + for _, rawVal := range vs { + v := rawVal.(map[string]interface{}) + output := expandApiManagementCommonHostnameConfiguration(v, apimanagement.HostnameTypeProxy) + if value, ok := v["default_ssl_binding"]; ok { + output.DefaultSslBinding = utils.Bool(value.(bool)) + } + results = append(results, output) + } + } + if scmRawVal, ok := input.GetOk("scm"); ok { + vs := scmRawVal.([]interface{}) + for _, rawVal := range vs { + v := rawVal.(map[string]interface{}) + output := expandApiManagementCommonHostnameConfiguration(v, apimanagement.HostnameTypeScm) + results = append(results, output) + } + } + return &results +} + +func flattenApiManagementHostnameConfiguration(input *[]apimanagement.HostnameConfiguration, d *schema.ResourceData) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + managementResults := make([]interface{}, 0) + portalResults := make([]interface{}, 0) + developerPortalResults := make([]interface{}, 0) + proxyResults := make([]interface{}, 0) + scmResults := make([]interface{}, 0) + + for _, config := range *input { + output := make(map[string]interface{}) + + if config.HostName != nil { + output["host_name"] = *config.HostName + } + + if config.NegotiateClientCertificate != nil { + output["negotiate_client_certificate"] = *config.NegotiateClientCertificate + } + + if config.KeyVaultID != nil { + output["key_vault_id"] = *config.KeyVaultID + } + + var configType string + switch strings.ToLower(string(config.Type)) { + case strings.ToLower(string(apimanagement.HostnameTypeProxy)): + // only set SSL binding for proxy types + if config.DefaultSslBinding != nil { + output["default_ssl_binding"] = *config.DefaultSslBinding + } + proxyResults = append(proxyResults, output) + configType = "proxy" + + case strings.ToLower(string(apimanagement.HostnameTypeManagement)): + managementResults = append(managementResults, output) + configType = "management" + + case strings.ToLower(string(apimanagement.HostnameTypePortal)): + portalResults = append(portalResults, output) + configType = "portal" + + case strings.ToLower(string(apimanagement.HostnameTypeDeveloperPortal)): + developerPortalResults = append(developerPortalResults, output) + configType = "developer_portal" + + case strings.ToLower(string(apimanagement.HostnameTypeScm)): + scmResults = append(scmResults, output) + configType = "scm" + } + + if configType != "" { + if valsRaw, ok := d.GetOk(configType); ok { + vals := valsRaw.([]interface{}) + azure.CopyCertificateAndPassword(vals, *config.HostName, output) + } + } + } + + return []interface{}{ + map[string]interface{}{ + "management": managementResults, + "portal": portalResults, + "developer_portal": developerPortalResults, + "proxy": proxyResults, + "scm": scmResults, + }, + } +} diff --git a/azurerm/internal/services/apimanagement/api_management_custom_domain_resource_test.go b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource_test.go new file mode 100644 index 000000000000..f4490d532ccd --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource_test.go @@ -0,0 +1,325 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementCustomDomainResource struct { +} + +func TestAccApiManagementCustomDomain_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_custom_domain", "test") + r := ApiManagementCustomDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementCustomDomain_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_custom_domain", "test") + r := ApiManagementCustomDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementCustomDomain_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_custom_domain", "test") + r := ApiManagementCustomDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.proxyOnly(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.developerPortalOnly(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.builtinProxyOnly(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementCustomDomainResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CustomDomainID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + resp, err := clients.ApiManagement.ServiceClient.Get(ctx, resourceGroup, serviceName) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Custom Domain (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementCustomDomainResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_custom_domain" "test" { + api_management_id = azurerm_api_management.test.id + + proxy { + host_name = "${azurerm_api_management.test.name}.azure-api.net" + } + + proxy { + host_name = "api.example.com" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + } + + developer_portal { + host_name = "portal.example.com" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + } +} +`, r.template(data)) +} + +func (r ApiManagementCustomDomainResource) proxyOnly(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_custom_domain" "test" { + api_management_id = azurerm_api_management.test.id + + proxy { + host_name = "${azurerm_api_management.test.name}.azure-api.net" + } + + proxy { + host_name = "api.example.com" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + } +} +`, r.template(data)) +} + +func (r ApiManagementCustomDomainResource) developerPortalOnly(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_custom_domain" "test" { + api_management_id = azurerm_api_management.test.id + + proxy { + host_name = "${azurerm_api_management.test.name}.azure-api.net" + } + + developer_portal { + host_name = "portal.example.com" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + } +} +`, r.template(data)) +} + +func (r ApiManagementCustomDomainResource) builtinProxyOnly(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_custom_domain" "test" { + api_management_id = azurerm_api_management.test.id + + proxy { + host_name = "${azurerm_api_management.test.name}.azure-api.net" + } +} +`, r.template(data)) +} + +func (r ApiManagementCustomDomainResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_custom_domain" "import" { + api_management_id = azurerm_api_management_custom_domain.test.api_management_id + + proxy { + host_name = "${azurerm_api_management.test.name}.azure-api.net" + } + + proxy { + host_name = "api.example.com" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + } + + developer_portal { + host_name = "portal.example.com" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + } +} +`, r.basic(data)) +} + +func (ApiManagementCustomDomainResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_key_vault" "test" { + name = "apimkv%[3]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "create", + "delete", + "get", + "update", + ] + + key_permissions = [ + "create", + "get", + ] + + secret_permissions = [ + "get", + ] + } + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = azurerm_api_management.test.identity.0.principal_id + + certificate_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + ] + } +} + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%[3]s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=api.example.com" + validity_in_months = 12 + + subject_alternative_names { + dns_names = [ + "api.example.com", + "portal.example.com", + ] + } + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/apimanagement/api_management_data_source.go b/azurerm/internal/services/apimanagement/api_management_data_source.go index 2bc43c2505b6..38af27e05295 100644 --- a/azurerm/internal/services/apimanagement/api_management_data_source.go +++ b/azurerm/internal/services/apimanagement/api_management_data_source.go @@ -229,6 +229,11 @@ func dataSourceApiManagementRead(d *schema.ResourceData, meta interface{}) error d.Set("location", azure.NormalizeLocation(*location)) } + identity := flattenAzureRmApiManagementMachineIdentity(resp.Identity) + if err := d.Set("identity", identity); err != nil { + return fmt.Errorf("setting `identity`: %+v", err) + } + if props := resp.ServiceProperties; props != nil { d.Set("publisher_email", props.PublisherEmail) d.Set("publisher_name", props.PublisherName) diff --git a/azurerm/internal/services/apimanagement/api_management_data_source_test.go b/azurerm/internal/services/apimanagement/api_management_data_source_test.go new file mode 100644 index 000000000000..a1c66b3c67de --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_data_source_test.go @@ -0,0 +1,203 @@ +package apimanagement_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ApiManagementDataSource struct { +} + +func TestAccDataSourceApiManagement_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management", "test") + r := ApiManagementDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("publisher_email").HasValue("pub1@email.com"), + check.That(data.ResourceName).Key("publisher_name").HasValue("pub1"), + check.That(data.ResourceName).Key("sku_name").HasValue("Developer_1"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("public_ip_addresses.#").Exists(), + ), + }, + }) +} + +func TestAccDataSourceApiManagement_identitySystemAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management", "test") + r := ApiManagementDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("publisher_email").HasValue("pub1@email.com"), + check.That(data.ResourceName).Key("publisher_name").HasValue("pub1"), + check.That(data.ResourceName).Key("sku_name").HasValue("Developer_1"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("public_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("identity.#").HasValue("1"), + check.That(data.ResourceName).Key("identity.0.type").HasValue("SystemAssigned"), + ), + }, + }) +} + +func TestAccDataSourceApiManagement_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management", "test") + r := ApiManagementDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("publisher_email").HasValue("pub1@email.com"), + check.That(data.ResourceName).Key("publisher_name").HasValue("pub1"), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_1"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("public_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("private_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("additional_location.0.public_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("additional_location.0.private_ip_addresses.#").Exists(), + ), + }, + }) +} + +func (ApiManagementDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "amtestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +data "azurerm_api_management" "test" { + name = azurerm_api_management.test.name + resource_group_name = azurerm_api_management.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementDataSource) identitySystemAssigned(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "amtestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + identity { + type = "SystemAssigned" + } +} + +data "azurerm_api_management" "test" { + name = azurerm_api_management.test.name + resource_group_name = azurerm_api_management.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementDataSource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test1" { + name = "amestRG1-%d" + location = "%s" +} + +resource "azurerm_resource_group" "test2" { + name = "amestRG2-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test1" { + name = "amtestVNET1-%d" + location = azurerm_resource_group.test1.location + resource_group_name = azurerm_resource_group.test1.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test1" { + name = "amtestSNET1-%d" + resource_group_name = azurerm_resource_group.test1.name + virtual_network_name = azurerm_virtual_network.test1.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_virtual_network" "test2" { + name = "amtestVNET2-%d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + address_space = ["10.1.0.0/16"] +} + +resource "azurerm_subnet" "test2" { + name = "amtestSNET2-%d" + resource_group_name = azurerm_resource_group.test2.name + virtual_network_name = azurerm_virtual_network.test2.name + address_prefix = "10.1.1.0/24" +} + +resource "azurerm_api_management" "test" { + name = "amtestAM-%d" + location = azurerm_resource_group.test1.location + resource_group_name = azurerm_resource_group.test1.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Premium_1" + + additional_location { + location = azurerm_resource_group.test2.location + virtual_network_configuration { + subnet_id = azurerm_subnet.test2.id + } + } + + virtual_network_type = "Internal" + virtual_network_configuration { + subnet_id = azurerm_subnet.test1.id + } +} + +data "azurerm_api_management" "test" { + name = azurerm_api_management.test.name + resource_group_name = azurerm_api_management.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_diagnostic_resource.go b/azurerm/internal/services/apimanagement/api_management_diagnostic_resource.go index 20b6a62a841b..e35fe2902739 100644 --- a/azurerm/internal/services/apimanagement/api_management_diagnostic_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_diagnostic_resource.go @@ -18,15 +18,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementDiagnostic() *schema.Resource { +func resourceApiManagementDiagnostic() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementDiagnosticCreateUpdate, - Read: resourceArmApiManagementDiagnosticRead, - Update: resourceArmApiManagementDiagnosticCreateUpdate, - Delete: resourceArmApiManagementDiagnosticDelete, + Create: resourceApiManagementDiagnosticCreateUpdate, + Read: resourceApiManagementDiagnosticRead, + Update: resourceApiManagementDiagnosticCreateUpdate, + Delete: resourceApiManagementDiagnosticDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.ApiManagementDiagnosticID(id) + _, err := parse.DiagnosticID(id) return err }), @@ -55,7 +55,7 @@ func resourceArmApiManagementDiagnostic() *schema.Resource { "api_management_logger_id": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.ApiManagementLoggerID, + ValidateFunc: validate.LoggerID, }, "enabled": { @@ -67,7 +67,7 @@ func resourceArmApiManagementDiagnostic() *schema.Resource { } } -func resourceArmApiManagementDiagnosticCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementDiagnosticCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.DiagnosticClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -108,15 +108,15 @@ func resourceArmApiManagementDiagnosticCreateUpdate(d *schema.ResourceData, meta } d.SetId(*resp.ID) - return resourceArmApiManagementDiagnosticRead(d, meta) + return resourceApiManagementDiagnosticRead(d, meta) } -func resourceArmApiManagementDiagnosticRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementDiagnosticRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.DiagnosticClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - diagnosticId, err := parse.ApiManagementDiagnosticID(d.Id()) + diagnosticId, err := parse.DiagnosticID(d.Id()) if err != nil { return err } @@ -140,12 +140,12 @@ func resourceArmApiManagementDiagnosticRead(d *schema.ResourceData, meta interfa return nil } -func resourceArmApiManagementDiagnosticDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementDiagnosticDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.DiagnosticClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - diagnosticId, err := parse.ApiManagementDiagnosticID(d.Id()) + diagnosticId, err := parse.DiagnosticID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/apimanagement/api_management_diagnostic_resource_test.go b/azurerm/internal/services/apimanagement/api_management_diagnostic_resource_test.go new file mode 100644 index 000000000000..810c6d198a08 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_diagnostic_resource_test.go @@ -0,0 +1,179 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementDiagnosticResource struct { +} + +func TestAccApiManagementDiagnostic_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_diagnostic", "test") + r := ApiManagementDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementDiagnostic_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_diagnostic", "test") + r := ApiManagementDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementDiagnostic_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_diagnostic", "test") + r := ApiManagementDiagnosticResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementDiagnosticResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + diagnosticId, err := parse.DiagnosticID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.ApiManagement.DiagnosticClient.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.Name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Diagnostic (%s): %+v", diagnosticId.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementDiagnosticResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_logger" "test" { + name = "acctestapimnglogger-%[1]d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + application_insights { + instrumentation_key = azurerm_application_insights.test.instrumentation_key + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r ApiManagementDiagnosticResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_diagnostic" "test" { + identifier = "applicationinsights" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_management_logger_id = azurerm_api_management_logger.test.id +} +`, r.template(data)) +} + +func (r ApiManagementDiagnosticResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_application_insights" "test2" { + name = "acctestappinsightsUpdate-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_api_management_logger" "test2" { + name = "acctestapimngloggerUpdate-%[2]d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + application_insights { + instrumentation_key = azurerm_application_insights.test2.instrumentation_key + } +} + +resource "azurerm_api_management_diagnostic" "test" { + identifier = "applicationinsights" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_management_logger_id = azurerm_api_management_logger.test2.id +} +`, r.template(data), data.RandomInteger) +} + +func (r ApiManagementDiagnosticResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_diagnostic" "import" { + identifier = azurerm_api_management_diagnostic.test.identifier + resource_group_name = azurerm_api_management_diagnostic.test.resource_group_name + api_management_name = azurerm_api_management_diagnostic.test.api_management_name + api_management_logger_id = azurerm_api_management_diagnostic.test.api_management_logger_id +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_group_data_source_test.go b/azurerm/internal/services/apimanagement/api_management_group_data_source_test.go new file mode 100644 index 000000000000..3537b7cf0106 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_group_data_source_test.go @@ -0,0 +1,65 @@ +package apimanagement_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ApiManagementGroupDataSource struct { +} + +func TestAccDataSourceApiManagementGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management_group", "test") + r := ApiManagementGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("display_name").HasValue("Test Group"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("external_id").HasValue(""), + check.That(data.ResourceName).Key("type").HasValue("custom"), + ), + }, + }) +} + +func (ApiManagementGroupDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_group" "test" { + name = "acctestAMGroup-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" +} + +data "azurerm_api_management_group" "test" { + name = azurerm_api_management_group.test.name + api_management_name = azurerm_api_management_group.test.api_management_name + resource_group_name = azurerm_api_management_group.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_group_resource.go b/azurerm/internal/services/apimanagement/api_management_group_resource.go index d823bbc4c50b..539f5153aa12 100644 --- a/azurerm/internal/services/apimanagement/api_management_group_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_group_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementGroup() *schema.Resource { +func resourceApiManagementGroup() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementGroupCreateUpdate, - Read: resourceArmApiManagementGroupRead, - Update: resourceArmApiManagementGroupCreateUpdate, - Delete: resourceArmApiManagementGroupDelete, + Create: resourceApiManagementGroupCreateUpdate, + Read: resourceApiManagementGroupRead, + Update: resourceApiManagementGroupCreateUpdate, + Delete: resourceApiManagementGroupDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -70,7 +70,7 @@ func resourceArmApiManagementGroup() *schema.Resource { } } -func resourceArmApiManagementGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.GroupClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -119,10 +119,10 @@ func resourceArmApiManagementGroupCreateUpdate(d *schema.ResourceData, meta inte } d.SetId(*resp.ID) - return resourceArmApiManagementGroupRead(d, meta) + return resourceApiManagementGroupRead(d, meta) } -func resourceArmApiManagementGroupRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.GroupClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -160,7 +160,7 @@ func resourceArmApiManagementGroupRead(d *schema.ResourceData, meta interface{}) return nil } -func resourceArmApiManagementGroupDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementGroupDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.GroupClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_group_resource_test.go b/azurerm/internal/services/apimanagement/api_management_group_resource_test.go new file mode 100644 index 000000000000..9e90c2c8258b --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_group_resource_test.go @@ -0,0 +1,197 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementGroupResource struct { +} + +func TestAccApiManagementGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") + r := ApiManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("Test Group"), + check.That(data.ResourceName).Key("type").HasValue("custom"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") + r := ApiManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("Test Group"), + check.That(data.ResourceName).Key("type").HasValue("custom"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementGroup_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") + r := ApiManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "Test Group", "A test description."), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("Test Group"), + check.That(data.ResourceName).Key("description").HasValue("A test description."), + check.That(data.ResourceName).Key("type").HasValue("external"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementGroup_descriptionDisplayNameUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") + r := ApiManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "Original Group", "The original description."), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("Original Group"), + check.That(data.ResourceName).Key("description").HasValue("The original description."), + check.That(data.ResourceName).Key("type").HasValue("external"), + ), + }, + { + Config: r.complete(data, "Modified Group", "A modified description."), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("Modified Group"), + check.That(data.ResourceName).Key("description").HasValue("A modified description."), + check.That(data.ResourceName).Key("type").HasValue("external"), + ), + }, + { + Config: r.complete(data, "Original Group", "The original description."), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("Original Group"), + check.That(data.ResourceName).Key("description").HasValue("The original description."), + check.That(data.ResourceName).Key("type").HasValue("external"), + ), + }, + }) +} + +func (t ApiManagementGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["groups"] + + resp, err := clients.ApiManagement.GroupClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Group (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementGroupResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_group" "test" { + name = "acctestAMGroup-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementGroupResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_group" "import" { + name = azurerm_api_management_group.test.name + resource_group_name = azurerm_api_management_group.test.resource_group_name + api_management_name = azurerm_api_management_group.test.api_management_name + display_name = azurerm_api_management_group.test.display_name +} +`, r.basic(data)) +} + +func (ApiManagementGroupResource) complete(data acceptance.TestData, displayName, description string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_group" "test" { + name = "acctestAMGroup-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "%s" + description = "%s" + type = "external" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, displayName, description) +} diff --git a/azurerm/internal/services/apimanagement/api_management_group_user_resource.go b/azurerm/internal/services/apimanagement/api_management_group_user_resource.go index f431467db6a4..4b70113a5dc7 100644 --- a/azurerm/internal/services/apimanagement/api_management_group_user_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_group_user_resource.go @@ -13,11 +13,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementGroupUser() *schema.Resource { +func resourceApiManagementGroupUser() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementGroupUserCreate, - Read: resourceArmApiManagementGroupUserRead, - Delete: resourceArmApiManagementGroupUserDelete, + Create: resourceApiManagementGroupUserCreate, + Read: resourceApiManagementGroupUserRead, + Delete: resourceApiManagementGroupUserDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -41,7 +41,7 @@ func resourceArmApiManagementGroupUser() *schema.Resource { } } -func resourceArmApiManagementGroupUserCreate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementGroupUserCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.GroupUsersClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -73,10 +73,10 @@ func resourceArmApiManagementGroupUserCreate(d *schema.ResourceData, meta interf // there's no Read so this is best-effort d.SetId(*resp.ID) - return resourceArmApiManagementGroupUserRead(d, meta) + return resourceApiManagementGroupUserRead(d, meta) } -func resourceArmApiManagementGroupUserRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementGroupUserRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.GroupUsersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -109,7 +109,7 @@ func resourceArmApiManagementGroupUserRead(d *schema.ResourceData, meta interfac return nil } -func resourceArmApiManagementGroupUserDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementGroupUserDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.GroupUsersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_group_user_resource_test.go b/azurerm/internal/services/apimanagement/api_management_group_user_resource_test.go new file mode 100644 index 000000000000..fa91f8a4837b --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_group_user_resource_test.go @@ -0,0 +1,126 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementGroupUserResource struct { +} + +func TestAccAzureRMApiManagementGroupUser_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_group_user", "test") + r := ApiManagementGroupUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMApiManagementGroupUser_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_group_user", "test") + r := ApiManagementGroupUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementGroupUserResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + groupName := id.Path["groups"] + userId := id.Path["users"] + + _, err = clients.ApiManagement.GroupUsersClient.CheckEntityExists(ctx, resourceGroup, serviceName, groupName, userId) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Group User (%s): %+v", id, err) + } + + return utils.Bool(true), nil +} + +func (ApiManagementGroupUserResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_group" "test" { + name = "acctestAMGroup-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" +} + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" +} + +resource "azurerm_api_management_group_user" "test" { + user_id = azurerm_api_management_user.test.user_id + group_name = azurerm_api_management_group.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementGroupUserResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_group_user" "import" { + user_id = azurerm_api_management_group_user.test.user_id + group_name = azurerm_api_management_group_user.test.group_name + api_management_name = azurerm_api_management_group_user.test.api_management_name + resource_group_name = azurerm_api_management_group_user.test.resource_group_name +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource.go index c0e40b35e6ad..63104a8fff97 100644 --- a/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementIdentityProviderAAD() *schema.Resource { +func resourceApiManagementIdentityProviderAAD() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementIdentityProviderAADCreateUpdate, - Read: resourceArmApiManagementIdentityProviderAADRead, - Update: resourceArmApiManagementIdentityProviderAADCreateUpdate, - Delete: resourceArmApiManagementIdentityProviderAADDelete, + Create: resourceApiManagementIdentityProviderAADCreateUpdate, + Read: resourceApiManagementIdentityProviderAADRead, + Update: resourceApiManagementIdentityProviderAADCreateUpdate, + Delete: resourceApiManagementIdentityProviderAADDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -67,7 +67,7 @@ func resourceArmApiManagementIdentityProviderAAD() *schema.Resource { } } -func resourceArmApiManagementIdentityProviderAADCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderAADCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -115,10 +115,10 @@ func resourceArmApiManagementIdentityProviderAADCreateUpdate(d *schema.ResourceD } d.SetId(*resp.ID) - return resourceArmApiManagementIdentityProviderAADRead(d, meta) + return resourceApiManagementIdentityProviderAADRead(d, meta) } -func resourceArmApiManagementIdentityProviderAADRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderAADRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -154,7 +154,7 @@ func resourceArmApiManagementIdentityProviderAADRead(d *schema.ResourceData, met return nil } -func resourceArmApiManagementIdentityProviderAADDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderAADDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource_test.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource_test.go new file mode 100644 index 000000000000..07a7a5765289 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_aad_resource_test.go @@ -0,0 +1,171 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementIdentityProviderAADResource struct { +} + +func TestAccApiManagementIdentityProviderAAD_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_aad", "test") + r := ApiManagementIdentityProviderAADResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementIdentityProviderAAD_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_aad", "test") + r := ApiManagementIdentityProviderAADResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("client_id").HasValue("00000000-0000-0000-0000-000000000000"), + check.That(data.ResourceName).Key("client_secret").HasValue("00000000000000000000000000000000"), + check.That(data.ResourceName).Key("allowed_tenants.#").HasValue("1"), + check.That(data.ResourceName).Key("allowed_tenants.0").HasValue(data.Client().TenantID), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("client_id").HasValue("11111111-1111-1111-1111-111111111111"), + check.That(data.ResourceName).Key("client_secret").HasValue("11111111111111111111111111111111"), + check.That(data.ResourceName).Key("allowed_tenants.#").HasValue("2"), + check.That(data.ResourceName).Key("allowed_tenants.0").HasValue(data.Client().TenantID), + check.That(data.ResourceName).Key("allowed_tenants.1").HasValue(data.Client().TenantID), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementIdentityProviderAAD_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_aad", "test") + r := ApiManagementIdentityProviderAADResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementIdentityProviderAADResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + identityProviderName := id.Path["identityProviders"] + + resp, err := clients.ApiManagement.IdentityProviderClient.Get(ctx, resourceGroup, serviceName, apimanagement.IdentityProviderType(identityProviderName)) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Identity Provider AAD (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementIdentityProviderAADResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_aad" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + client_id = "00000000-0000-0000-0000-000000000000" + client_secret = "00000000000000000000000000000000" + signin_tenant = "00000000-0000-0000-0000-000000000000" + allowed_tenants = ["%s"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Client().TenantID) +} + +func (ApiManagementIdentityProviderAADResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_aad" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + client_id = "11111111-1111-1111-1111-111111111111" + client_secret = "11111111111111111111111111111111" + allowed_tenants = ["%s", "%s"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Client().TenantID, data.Client().TenantID) +} + +func (r ApiManagementIdentityProviderAADResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_identity_provider_aad" "import" { + resource_group_name = azurerm_api_management_identity_provider_aad.test.resource_group_name + api_management_name = azurerm_api_management_identity_provider_aad.test.api_management_name + client_id = azurerm_api_management_identity_provider_aad.test.client_id + client_secret = azurerm_api_management_identity_provider_aad.test.client_secret + allowed_tenants = azurerm_api_management_identity_provider_aad.test.allowed_tenants +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource.go index cb9f513a80b8..15db762314c3 100644 --- a/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementIdentityProviderFacebook() *schema.Resource { +func resourceApiManagementIdentityProviderFacebook() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementIdentityProviderFacebookCreateUpdate, - Read: resourceArmApiManagementIdentityProviderFacebookRead, - Update: resourceArmApiManagementIdentityProviderFacebookCreateUpdate, - Delete: resourceArmApiManagementIdentityProviderFacebookDelete, + Create: resourceApiManagementIdentityProviderFacebookCreateUpdate, + Read: resourceApiManagementIdentityProviderFacebookRead, + Update: resourceApiManagementIdentityProviderFacebookCreateUpdate, + Delete: resourceApiManagementIdentityProviderFacebookDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -53,7 +53,7 @@ func resourceArmApiManagementIdentityProviderFacebook() *schema.Resource { } } -func resourceArmApiManagementIdentityProviderFacebookCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderFacebookCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -97,10 +97,10 @@ func resourceArmApiManagementIdentityProviderFacebookCreateUpdate(d *schema.Reso } d.SetId(*resp.ID) - return resourceArmApiManagementIdentityProviderFacebookRead(d, meta) + return resourceApiManagementIdentityProviderFacebookRead(d, meta) } -func resourceArmApiManagementIdentityProviderFacebookRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderFacebookRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -134,7 +134,7 @@ func resourceArmApiManagementIdentityProviderFacebookRead(d *schema.ResourceData return nil } -func resourceArmApiManagementIdentityProviderFacebookDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderFacebookDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource_test.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource_test.go new file mode 100644 index 000000000000..4424833a6350 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_facebook_resource_test.go @@ -0,0 +1,161 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementIdentityProviderFacebookResource struct { +} + +func TestAccApiManagementIdentityProviderFacebook_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_facebook", "test") + r := ApiManagementIdentityProviderFacebookResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("app_secret"), + }) +} + +func TestAccApiManagementIdentityProviderFacebook_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_facebook", "test") + r := ApiManagementIdentityProviderFacebookResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("app_id").HasValue("00000000000000000000000000000000"), + ), + }, + data.ImportStep("app_secret"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("app_id").HasValue("11111111111111111111111111111111"), + ), + }, + data.ImportStep("app_secret"), + }) +} + +func TestAccApiManagementIdentityProviderFacebook_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_facebook", "test") + r := ApiManagementIdentityProviderFacebookResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementIdentityProviderFacebookResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + identityProviderName := id.Path["identityProviders"] + + resp, err := clients.ApiManagement.IdentityProviderClient.Get(ctx, resourceGroup, serviceName, apimanagement.IdentityProviderType(identityProviderName)) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Identity Provider Facebook (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementIdentityProviderFacebookResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_facebook" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + app_id = "00000000000000000000000000000000" + app_secret = "00000000000000000000000000000000" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementIdentityProviderFacebookResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_facebook" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + app_id = "11111111111111111111111111111111" + app_secret = "11111111111111111111111111111111" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementIdentityProviderFacebookResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_identity_provider_facebook" "import" { + resource_group_name = azurerm_api_management_identity_provider_facebook.test.resource_group_name + api_management_name = azurerm_api_management_identity_provider_facebook.test.api_management_name + app_id = azurerm_api_management_identity_provider_facebook.test.app_id + app_secret = azurerm_api_management_identity_provider_facebook.test.app_secret +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource.go index 6d449e9dfbb4..cdaa10e11254 100644 --- a/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource.go @@ -16,12 +16,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementIdentityProviderGoogle() *schema.Resource { +func resourceApiManagementIdentityProviderGoogle() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementIdentityProviderGoogleCreateUpdate, - Read: resourceArmApiManagementIdentityProviderGoogleRead, - Update: resourceArmApiManagementIdentityProviderGoogleCreateUpdate, - Delete: resourceArmApiManagementIdentityProviderGoogleDelete, + Create: resourceApiManagementIdentityProviderGoogleCreateUpdate, + Read: resourceApiManagementIdentityProviderGoogleRead, + Update: resourceApiManagementIdentityProviderGoogleCreateUpdate, + Delete: resourceApiManagementIdentityProviderGoogleDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -54,7 +54,7 @@ func resourceArmApiManagementIdentityProviderGoogle() *schema.Resource { } } -func resourceArmApiManagementIdentityProviderGoogleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderGoogleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -98,10 +98,10 @@ func resourceArmApiManagementIdentityProviderGoogleCreateUpdate(d *schema.Resour } d.SetId(*resp.ID) - return resourceArmApiManagementIdentityProviderGoogleRead(d, meta) + return resourceApiManagementIdentityProviderGoogleRead(d, meta) } -func resourceArmApiManagementIdentityProviderGoogleRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderGoogleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -135,7 +135,7 @@ func resourceArmApiManagementIdentityProviderGoogleRead(d *schema.ResourceData, return nil } -func resourceArmApiManagementIdentityProviderGoogleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderGoogleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource_test.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource_test.go new file mode 100644 index 000000000000..4f3dc3d41c81 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_google_resource_test.go @@ -0,0 +1,161 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementIdentityProviderGoogleResource struct { +} + +func TestAccApiManagementIdentityProviderGoogle_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_google", "test") + r := ApiManagementIdentityProviderGoogleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementIdentityProviderGoogle_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_google", "test") + r := ApiManagementIdentityProviderGoogleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("client_id").HasValue("00000000.apps.googleusercontent.com"), + ), + }, + data.ImportStep("client_secret"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("client_id").HasValue("11111111.apps.googleusercontent.com"), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementIdentityProviderGoogle_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_google", "test") + r := ApiManagementIdentityProviderGoogleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementIdentityProviderGoogleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + identityProviderName := id.Path["identityProviders"] + + resp, err := clients.ApiManagement.IdentityProviderClient.Get(ctx, resourceGroup, serviceName, apimanagement.IdentityProviderType(identityProviderName)) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Identity Provider Google (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementIdentityProviderGoogleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_google" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + client_id = "00000000.apps.googleusercontent.com" + client_secret = "00000000000000000000000000000000" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementIdentityProviderGoogleResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_google" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + client_id = "11111111.apps.googleusercontent.com" + client_secret = "11111111111111111111111111111111" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementIdentityProviderGoogleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_identity_provider_google" "import" { + resource_group_name = azurerm_api_management_identity_provider_google.test.resource_group_name + api_management_name = azurerm_api_management_identity_provider_google.test.api_management_name + client_id = azurerm_api_management_identity_provider_google.test.client_id + client_secret = azurerm_api_management_identity_provider_google.test.client_secret +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource.go index 619ce0fcfe33..38cd1bde0b75 100644 --- a/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementIdentityProviderMicrosoft() *schema.Resource { +func resourceApiManagementIdentityProviderMicrosoft() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementIdentityProviderMicrosoftCreateUpdate, - Read: resourceArmApiManagementIdentityProviderMicrosoftRead, - Update: resourceArmApiManagementIdentityProviderMicrosoftCreateUpdate, - Delete: resourceArmApiManagementIdentityProviderMicrosoftDelete, + Create: resourceApiManagementIdentityProviderMicrosoftCreateUpdate, + Read: resourceApiManagementIdentityProviderMicrosoftRead, + Update: resourceApiManagementIdentityProviderMicrosoftCreateUpdate, + Delete: resourceApiManagementIdentityProviderMicrosoftDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -53,7 +53,7 @@ func resourceArmApiManagementIdentityProviderMicrosoft() *schema.Resource { } } -func resourceArmApiManagementIdentityProviderMicrosoftCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderMicrosoftCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -97,10 +97,10 @@ func resourceArmApiManagementIdentityProviderMicrosoftCreateUpdate(d *schema.Res } d.SetId(*resp.ID) - return resourceArmApiManagementIdentityProviderMicrosoftRead(d, meta) + return resourceApiManagementIdentityProviderMicrosoftRead(d, meta) } -func resourceArmApiManagementIdentityProviderMicrosoftRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderMicrosoftRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -134,7 +134,7 @@ func resourceArmApiManagementIdentityProviderMicrosoftRead(d *schema.ResourceDat return nil } -func resourceArmApiManagementIdentityProviderMicrosoftDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderMicrosoftDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource_test.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource_test.go new file mode 100644 index 000000000000..dd861144ffd1 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_microsoft_resource_test.go @@ -0,0 +1,161 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementIdentityProviderMicrosoftResource struct { +} + +func TestAccApiManagementIdentityProviderMicrosoft_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_microsoft", "test") + r := ApiManagementIdentityProviderMicrosoftResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementIdentityProviderMicrosoft_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_microsoft", "test") + r := ApiManagementIdentityProviderMicrosoftResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("client_id").HasValue("00000000-0000-0000-0000-000000000000"), + ), + }, + data.ImportStep("client_secret"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("client_id").HasValue("11111111-1111-1111-1111-111111111111"), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementIdentityProviderMicrosoft_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_microsoft", "test") + r := ApiManagementIdentityProviderMicrosoftResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementIdentityProviderMicrosoftResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + identityProviderName := id.Path["identityProviders"] + + resp, err := clients.ApiManagement.IdentityProviderClient.Get(ctx, resourceGroup, serviceName, apimanagement.IdentityProviderType(identityProviderName)) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Identity Provider Microsoft (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementIdentityProviderMicrosoftResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_microsoft" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + client_id = "00000000-0000-0000-0000-000000000000" + client_secret = "00000000000000000000000000000000" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementIdentityProviderMicrosoftResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_microsoft" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + client_id = "11111111-1111-1111-1111-111111111111" + client_secret = "11111111111111111111111111111111" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementIdentityProviderMicrosoftResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_identity_provider_microsoft" "import" { + resource_group_name = azurerm_api_management_identity_provider_microsoft.test.resource_group_name + api_management_name = azurerm_api_management_identity_provider_microsoft.test.api_management_name + client_id = azurerm_api_management_identity_provider_microsoft.test.client_id + client_secret = azurerm_api_management_identity_provider_microsoft.test.client_secret +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource.go index 2a22b208687d..0094391cfc18 100644 --- a/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementIdentityProviderTwitter() *schema.Resource { +func resourceApiManagementIdentityProviderTwitter() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementIdentityProviderTwitterCreateUpdate, - Read: resourceArmApiManagementIdentityProviderTwitterRead, - Update: resourceArmApiManagementIdentityProviderTwitterCreateUpdate, - Delete: resourceArmApiManagementIdentityProviderTwitterDelete, + Create: resourceApiManagementIdentityProviderTwitterCreateUpdate, + Read: resourceApiManagementIdentityProviderTwitterRead, + Update: resourceApiManagementIdentityProviderTwitterCreateUpdate, + Delete: resourceApiManagementIdentityProviderTwitterDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -53,7 +53,7 @@ func resourceArmApiManagementIdentityProviderTwitter() *schema.Resource { } } -func resourceArmApiManagementIdentityProviderTwitterCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderTwitterCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -97,10 +97,10 @@ func resourceArmApiManagementIdentityProviderTwitterCreateUpdate(d *schema.Resou } d.SetId(*resp.ID) - return resourceArmApiManagementIdentityProviderTwitterRead(d, meta) + return resourceApiManagementIdentityProviderTwitterRead(d, meta) } -func resourceArmApiManagementIdentityProviderTwitterRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderTwitterRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -134,7 +134,7 @@ func resourceArmApiManagementIdentityProviderTwitterRead(d *schema.ResourceData, return nil } -func resourceArmApiManagementIdentityProviderTwitterDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementIdentityProviderTwitterDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.IdentityProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource_test.go b/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource_test.go new file mode 100644 index 000000000000..f24a70513e6e --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_identity_provider_twitter_resource_test.go @@ -0,0 +1,162 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementIdentityProviderTwitterResource struct { +} + +func TestAccApiManagementIdentityProviderTwitter_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_twitter", "test") + r := ApiManagementIdentityProviderTwitterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("api_secret_key"), + }) +} + +func TestAccApiManagementIdentityProviderTwitter_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_twitter", "test") + r := ApiManagementIdentityProviderTwitterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("api_key").HasValue("00000000000000000000000000000000"), + ), + }, + data.ImportStep("api_secret_key"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("api_key").HasValue("11111111111111111111111111111111"), + ), + }, + data.ImportStep("api_secret_key"), + }) +} + +func TestAccApiManagementIdentityProviderTwitter_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_twitter", "test") + r := ApiManagementIdentityProviderTwitterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementIdentityProviderTwitterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + identityProviderName := id.Path["identityProviders"] + + resp, err := clients.ApiManagement.IdentityProviderClient.Get(ctx, resourceGroup, serviceName, apimanagement.IdentityProviderType(identityProviderName)) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Identity Provider Twitter (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementIdentityProviderTwitterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_twitter" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_key = "00000000000000000000000000000000" + api_secret_key = "00000000000000000000000000000000" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementIdentityProviderTwitterResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-api-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_identity_provider_twitter" "test" { + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + api_key = "11111111111111111111111111111111" + api_secret_key = "11111111111111111111111111111111" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementIdentityProviderTwitterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_identity_provider_twitter" "import" { + resource_group_name = azurerm_api_management_identity_provider_twitter.test.resource_group_name + api_management_name = azurerm_api_management_identity_provider_twitter.test.api_management_name + api_key = azurerm_api_management_identity_provider_twitter.test.api_key + api_secret_key = azurerm_api_management_identity_provider_twitter.test.api_secret_key +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_logger_resource.go b/azurerm/internal/services/apimanagement/api_management_logger_resource.go index ea333cf24844..dbe8ac49b249 100644 --- a/azurerm/internal/services/apimanagement/api_management_logger_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_logger_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementLogger() *schema.Resource { +func resourceApiManagementLogger() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementLoggerCreate, - Read: resourceArmApiManagementLoggerRead, - Update: resourceArmApiManagementLoggerUpdate, - Delete: resourceArmApiManagementLoggerDelete, + Create: resourceApiManagementLoggerCreate, + Read: resourceApiManagementLoggerRead, + Update: resourceApiManagementLoggerUpdate, + Delete: resourceApiManagementLoggerDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -96,7 +96,7 @@ func resourceArmApiManagementLogger() *schema.Resource { } } -func resourceArmApiManagementLoggerCreate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementLoggerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.LoggerClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -134,10 +134,10 @@ func resourceArmApiManagementLoggerCreate(d *schema.ResourceData, meta interface if len(eventHubRaw) > 0 { parameters.LoggerType = apimanagement.AzureEventHub - parameters.Credentials = expandArmApiManagementLoggerEventHub(eventHubRaw) + parameters.Credentials = expandApiManagementLoggerEventHub(eventHubRaw) } else if len(appInsightsRaw) > 0 { parameters.LoggerType = apimanagement.ApplicationInsights - parameters.Credentials = expandArmApiManagementLoggerApplicationInsights(appInsightsRaw) + parameters.Credentials = expandApiManagementLoggerApplicationInsights(appInsightsRaw) } if _, err := client.CreateOrUpdate(ctx, resourceGroup, serviceName, name, parameters, ""); err != nil { @@ -153,10 +153,10 @@ func resourceArmApiManagementLoggerCreate(d *schema.ResourceData, meta interface } d.SetId(*resp.ID) - return resourceArmApiManagementLoggerRead(d, meta) + return resourceApiManagementLoggerRead(d, meta) } -func resourceArmApiManagementLoggerRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementLoggerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.LoggerClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -186,7 +186,7 @@ func resourceArmApiManagementLoggerRead(d *schema.ResourceData, meta interface{} if properties := resp.LoggerContractProperties; properties != nil { d.Set("buffered", properties.IsBuffered) d.Set("description", properties.Description) - if err := d.Set("eventhub", flattenArmApiManagementLoggerEventHub(d, properties)); err != nil { + if err := d.Set("eventhub", flattenApiManagementLoggerEventHub(d, properties)); err != nil { return fmt.Errorf("setting `eventhub`: %s", err) } } @@ -194,7 +194,7 @@ func resourceArmApiManagementLoggerRead(d *schema.ResourceData, meta interface{} return nil } -func resourceArmApiManagementLoggerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementLoggerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.LoggerClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -215,20 +215,20 @@ func resourceArmApiManagementLoggerUpdate(d *schema.ResourceData, meta interface if hasEventHub { parameters.LoggerType = apimanagement.AzureEventHub - parameters.Credentials = expandArmApiManagementLoggerEventHub(eventHubRaw.([]interface{})) + parameters.Credentials = expandApiManagementLoggerEventHub(eventHubRaw.([]interface{})) } else if hasAppInsights { parameters.LoggerType = apimanagement.ApplicationInsights - parameters.Credentials = expandArmApiManagementLoggerApplicationInsights(appInsightsRaw.([]interface{})) + parameters.Credentials = expandApiManagementLoggerApplicationInsights(appInsightsRaw.([]interface{})) } if _, err := client.Update(ctx, resourceGroup, serviceName, name, parameters, ""); err != nil { return fmt.Errorf("updating Logger %q (Resource Group %q / API Management Service %q): %+v", name, resourceGroup, serviceName, err) } - return resourceArmApiManagementLoggerRead(d, meta) + return resourceApiManagementLoggerRead(d, meta) } -func resourceArmApiManagementLoggerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementLoggerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.LoggerClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -251,7 +251,7 @@ func resourceArmApiManagementLoggerDelete(d *schema.ResourceData, meta interface return nil } -func expandArmApiManagementLoggerEventHub(input []interface{}) map[string]*string { +func expandApiManagementLoggerEventHub(input []interface{}) map[string]*string { credentials := make(map[string]*string) eventHub := input[0].(map[string]interface{}) credentials["name"] = utils.String(eventHub["name"].(string)) @@ -259,14 +259,14 @@ func expandArmApiManagementLoggerEventHub(input []interface{}) map[string]*strin return credentials } -func expandArmApiManagementLoggerApplicationInsights(input []interface{}) map[string]*string { +func expandApiManagementLoggerApplicationInsights(input []interface{}) map[string]*string { credentials := make(map[string]*string) ai := input[0].(map[string]interface{}) credentials["instrumentationKey"] = utils.String(ai["instrumentation_key"].(string)) return credentials } -func flattenArmApiManagementLoggerEventHub(d *schema.ResourceData, properties *apimanagement.LoggerContractProperties) []interface{} { +func flattenApiManagementLoggerEventHub(d *schema.ResourceData, properties *apimanagement.LoggerContractProperties) []interface{} { result := make([]interface{}, 0) if name := properties.Credentials["name"]; name != nil { eventHub := make(map[string]interface{}) diff --git a/azurerm/internal/services/apimanagement/api_management_logger_resource_test.go b/azurerm/internal/services/apimanagement/api_management_logger_resource_test.go new file mode 100644 index 000000000000..7558e0c8470e --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_logger_resource_test.go @@ -0,0 +1,349 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementLoggerResource struct { +} + +func TestAccApiManagementLogger_basicEventHub(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") + r := ApiManagementLoggerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEventHub(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("1"), + check.That(data.ResourceName).Key("eventhub.0.name").Exists(), + check.That(data.ResourceName).Key("eventhub.0.connection_string").Exists(), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"eventhub.0.connection_string"}, + }, + }) +} + +func TestAccApiManagementLogger_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") + r := ApiManagementLoggerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEventHub(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("1"), + check.That(data.ResourceName).Key("eventhub.0.name").Exists(), + check.That(data.ResourceName).Key("eventhub.0.connection_string").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementLogger_basicApplicationInsights(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") + r := ApiManagementLoggerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicApplicationInsights(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("0"), + check.That(data.ResourceName).Key("application_insights.#").HasValue("1"), + check.That(data.ResourceName).Key("application_insights.0.instrumentation_key").Exists(), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"application_insights.#", "application_insights.0.instrumentation_key"}, + }, + }) +} + +func TestAccApiManagementLogger_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") + r := ApiManagementLoggerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "Logger from Terraform test", "false"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("description").HasValue("Logger from Terraform test"), + check.That(data.ResourceName).Key("buffered").HasValue("false"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("0"), + check.That(data.ResourceName).Key("application_insights.#").HasValue("1"), + check.That(data.ResourceName).Key("application_insights.0.instrumentation_key").Exists(), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"application_insights.#", "application_insights.0.instrumentation_key"}, + }, + }) +} + +func TestAccApiManagementLogger_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") + r := ApiManagementLoggerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicApplicationInsights(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("eventhub.#").HasValue("0"), + check.That(data.ResourceName).Key("application_insights.#").HasValue("1"), + check.That(data.ResourceName).Key("application_insights.0.instrumentation_key").Exists(), + ), + }, + { + Config: r.basicEventHub(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("eventhub.#").HasValue("1"), + check.That(data.ResourceName).Key("eventhub.0.name").Exists(), + check.That(data.ResourceName).Key("eventhub.0.connection_string").Exists(), + ), + }, + { + Config: r.complete(data, "Logger from Terraform test", "false"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("false"), + check.That(data.ResourceName).Key("description").HasValue("Logger from Terraform test"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("0"), + check.That(data.ResourceName).Key("application_insights.#").HasValue("1"), + check.That(data.ResourceName).Key("application_insights.0.instrumentation_key").Exists(), + ), + }, + { + Config: r.complete(data, "Logger from Terraform update test", "true"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue("Logger from Terraform update test"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("0"), + check.That(data.ResourceName).Key("application_insights.#").HasValue("1"), + check.That(data.ResourceName).Key("application_insights.0.instrumentation_key").Exists(), + ), + }, + { + Config: r.complete(data, "Logger from Terraform test", "false"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("false"), + check.That(data.ResourceName).Key("description").HasValue("Logger from Terraform test"), + check.That(data.ResourceName).Key("eventhub.#").HasValue("0"), + check.That(data.ResourceName).Key("application_insights.#").HasValue("1"), + check.That(data.ResourceName).Key("application_insights.0.instrumentation_key").Exists(), + ), + }, + { + Config: r.basicEventHub(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("buffered").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("eventhub.#").HasValue("1"), + check.That(data.ResourceName).Key("eventhub.0.name").Exists(), + check.That(data.ResourceName).Key("eventhub.0.connection_string").Exists(), + ), + }, + }) +} + +func (t ApiManagementLoggerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["loggers"] + + resp, err := clients.ApiManagement.LoggerClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Logger (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementLoggerResource) basicEventHub(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_logger" "test" { + name = "acctestapimnglogger-%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + eventhub { + name = azurerm_eventhub.test.name + connection_string = azurerm_eventhub_namespace.test.default_primary_connection_string + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementLoggerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_logger" "import" { + name = azurerm_api_management_logger.test.name + api_management_name = azurerm_api_management_logger.test.api_management_name + resource_group_name = azurerm_api_management_logger.test.resource_group_name + + eventhub { + name = azurerm_eventhub.test.name + connection_string = azurerm_eventhub_namespace.test.default_primary_connection_string + } +} +`, r.basicEventHub(data)) +} + +func (ApiManagementLoggerResource) basicApplicationInsights(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "other" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_logger" "test" { + name = "acctestapimnglogger-%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + application_insights { + instrumentation_key = azurerm_application_insights.test.instrumentation_key + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementLoggerResource) complete(data acceptance.TestData, description, buffered string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "other" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_logger" "test" { + name = "acctestapimnglogger-%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + description = "%s" + buffered = %s + + application_insights { + instrumentation_key = azurerm_application_insights.test.instrumentation_key + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, description, buffered) +} diff --git a/azurerm/internal/services/apimanagement/api_management_named_value_resource.go b/azurerm/internal/services/apimanagement/api_management_named_value_resource.go index e631cf84a21d..f13761886efa 100644 --- a/azurerm/internal/services/apimanagement/api_management_named_value_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_named_value_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementNamedValue() *schema.Resource { +func resourceApiManagementNamedValue() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementNamedValueCreateUpdate, - Read: resourceArmApiManagementNamedValueRead, - Update: resourceArmApiManagementNamedValueCreateUpdate, - Delete: resourceArmApiManagementNamedValueDelete, + Create: resourceApiManagementNamedValueCreateUpdate, + Read: resourceApiManagementNamedValueRead, + Update: resourceApiManagementNamedValueCreateUpdate, + Delete: resourceApiManagementNamedValueDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -69,7 +69,7 @@ func resourceArmApiManagementNamedValue() *schema.Resource { } } -func resourceArmApiManagementNamedValueCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementNamedValueCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.NamedValueClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -121,10 +121,10 @@ func resourceArmApiManagementNamedValueCreateUpdate(d *schema.ResourceData, meta } d.SetId(*resp.ID) - return resourceArmApiManagementNamedValueRead(d, meta) + return resourceApiManagementNamedValueRead(d, meta) } -func resourceArmApiManagementNamedValueRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementNamedValueRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.NamedValueClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -165,7 +165,7 @@ func resourceArmApiManagementNamedValueRead(d *schema.ResourceData, meta interfa return nil } -func resourceArmApiManagementNamedValueDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementNamedValueDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.NamedValueClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_named_value_resource_test.go b/azurerm/internal/services/apimanagement/api_management_named_value_resource_test.go new file mode 100644 index 000000000000..c560b0dda683 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_named_value_resource_test.go @@ -0,0 +1,137 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementNamedValueResource struct { +} + +func TestAccApiManagementNamedValue_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_named_value", "test") + r := ApiManagementNamedValueResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementNamedValue_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_named_value", "test") + r := ApiManagementNamedValueResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("value"), + }) +} + +func (t ApiManagementNamedValueResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["namedValues"] + + resp, err := clients.ApiManagement.NamedValueClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Named Value (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementNamedValueResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_named_value" "test" { + name = "acctestAMProperty-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + display_name = "TestProperty%d" + value = "Test Value" + tags = ["tag1", "tag2"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementNamedValueResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_named_value" "test" { + name = "acctestAMProperty-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + display_name = "TestProperty2%d" + value = "Test Value2" + secret = true + tags = ["tag3", "tag4"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource.go b/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource.go index 05c31bc6284a..5413d37b74cf 100644 --- a/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementOpenIDConnectProvider() *schema.Resource { +func resourceApiManagementOpenIDConnectProvider() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementOpenIDConnectProviderCreateUpdate, - Read: resourceArmApiManagementOpenIDConnectProviderRead, - Update: resourceArmApiManagementOpenIDConnectProviderCreateUpdate, - Delete: resourceArmApiManagementOpenIDConnectProviderDelete, + Create: resourceApiManagementOpenIDConnectProviderCreateUpdate, + Read: resourceApiManagementOpenIDConnectProviderRead, + Update: resourceApiManagementOpenIDConnectProviderCreateUpdate, + Delete: resourceApiManagementOpenIDConnectProviderDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -74,7 +74,7 @@ func resourceArmApiManagementOpenIDConnectProvider() *schema.Resource { } } -func resourceArmApiManagementOpenIDConnectProviderCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementOpenIDConnectProviderCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.OpenIdConnectClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -119,10 +119,10 @@ func resourceArmApiManagementOpenIDConnectProviderCreateUpdate(d *schema.Resourc } d.SetId(*resp.ID) - return resourceArmApiManagementOpenIDConnectProviderRead(d, meta) + return resourceApiManagementOpenIDConnectProviderRead(d, meta) } -func resourceArmApiManagementOpenIDConnectProviderRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementOpenIDConnectProviderRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.OpenIdConnectClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -159,7 +159,7 @@ func resourceArmApiManagementOpenIDConnectProviderRead(d *schema.ResourceData, m return nil } -func resourceArmApiManagementOpenIDConnectProviderDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementOpenIDConnectProviderDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.OpenIdConnectClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource_test.go b/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource_test.go new file mode 100644 index 000000000000..60a9a0c789b9 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_openid_connect_provider_resource_test.go @@ -0,0 +1,158 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementOpenIDConnectProviderResource struct { +} + +func TestAccApiManagementOpenIDConnectProvider_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_openid_connect_provider", "test") + r := ApiManagementOpenIDConnectProviderResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func TestAccApiManagementOpenIDConnectProvider_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_openid_connect_provider", "test") + r := ApiManagementOpenIDConnectProviderResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementOpenIDConnectProvider_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_openid_connect_provider", "test") + r := ApiManagementOpenIDConnectProviderResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("client_secret"), + }) +} + +func (t ApiManagementOpenIDConnectProviderResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["openidConnectProviders"] + + resp, err := clients.ApiManagement.OpenIdConnectClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Open ID Connect (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r ApiManagementOpenIDConnectProviderResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_openid_connect_provider" "test" { + name = "acctest-%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + client_id = "00001111-2222-3333-%d" + client_secret = "%d-cwdavsxbacsaxZX-%d" + display_name = "Initial Name" + metadata_endpoint = "https://azacctest.hashicorptest.com/example/foo" +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementOpenIDConnectProviderResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_openid_connect_provider" "import" { + name = azurerm_api_management_openid_connect_provider.test.name + api_management_name = azurerm_api_management_openid_connect_provider.test.api_management_name + resource_group_name = azurerm_api_management_openid_connect_provider.test.resource_group_name + client_id = azurerm_api_management_openid_connect_provider.test.client_id + client_secret = azurerm_api_management_openid_connect_provider.test.client_secret + display_name = azurerm_api_management_openid_connect_provider.test.display_name + metadata_endpoint = azurerm_api_management_openid_connect_provider.test.metadata_endpoint +} +`, r.basic(data)) +} + +func (r ApiManagementOpenIDConnectProviderResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_openid_connect_provider" "test" { + name = "acctest-%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + client_id = "00001111-3333-2222-%d" + client_secret = "%d-423egvwdcsjx-%d" + display_name = "Updated Name" + description = "Example description" + metadata_endpoint = "https://azacctest.hashicorptest.com/example/updated" +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementOpenIDConnectProviderResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_policy_resource.go b/azurerm/internal/services/apimanagement/api_management_policy_resource.go new file mode 100644 index 000000000000..e805162dd8c9 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_policy_resource.go @@ -0,0 +1,195 @@ +package apimanagement + +import ( + "fmt" + "html" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceApiManagementPolicy() *schema.Resource { + return &schema.Resource{ + Create: resourceApiManagementPolicyCreateUpdate, + Read: resourceApiManagementPolicyRead, + Update: resourceApiManagementPolicyCreateUpdate, + Delete: resourceApiManagementPolicyDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "api_management_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "xml_content": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ConflictsWith: []string{"xml_link"}, + ExactlyOneOf: []string{"xml_link", "xml_content"}, + DiffSuppressFunc: XmlWithDotNetInterpolationsDiffSuppress, + }, + + "xml_link": { + Type: schema.TypeString, + Optional: true, + ConflictsWith: []string{"xml_content"}, + ExactlyOneOf: []string{"xml_link", "xml_content"}, + }, + }, + } +} + +func resourceApiManagementPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).ApiManagement.PolicyClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + apiManagementID := d.Get("api_management_id").(string) + id, err := parse.ApiManagementID(apiManagementID) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + /* + Other resources would have a check for d.IsNewResource() at this location, and would error out using `tf.ImportAsExistsError` if the resource already existed. + However, this is a sub-resource, and the API always returns a policy when queried, either a default policy or one configured by the user or by this resource. + Instead of the usual check, the resource documentation clearly states that any existing policy will be overwritten if the resource is used. + */ + + parameters := apimanagement.PolicyContract{} + + xmlContent := d.Get("xml_content").(string) + xmlLink := d.Get("xml_link").(string) + + if xmlLink != "" { + parameters.PolicyContractProperties = &apimanagement.PolicyContractProperties{ + Format: apimanagement.RawxmlLink, + Value: utils.String(xmlLink), + } + } else if xmlContent != "" { + // this is intentionally an else-if since `xml_content` is computed + + // clear out any existing value for xml_link + if !d.IsNewResource() { + d.Set("xml_link", "") + } + + parameters.PolicyContractProperties = &apimanagement.PolicyContractProperties{ + Format: apimanagement.Rawxml, + Value: utils.String(xmlContent), + } + } + + if parameters.PolicyContractProperties == nil { + return fmt.Errorf("Either `xml_content` or `xml_link` must be set") + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, serviceName, parameters, ""); err != nil { + return fmt.Errorf("creating or updating Policy (Resource Group %q / API Management Service %q): %+v", resourceGroup, serviceName, err) + } + + resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.PolicyExportFormatXML) + if err != nil { + return fmt.Errorf("retrieving Policy (Resource Group %q / API Management Service %q): %+v", resourceGroup, serviceName, err) + } + if resp.ID == nil { + return fmt.Errorf("Cannot read ID for Policy (Resource Group %q / API Management Service %q): %+v", resourceGroup, serviceName, err) + } + d.SetId(*resp.ID) + + return resourceApiManagementPolicyRead(d, meta) +} + +func resourceApiManagementPolicyRead(d *schema.ResourceData, meta interface{}) error { + serviceClient := meta.(*clients.Client).ApiManagement.ServiceClient + client := meta.(*clients.Client).ApiManagement.PolicyClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.PolicyID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + serviceResp, err := serviceClient.Get(ctx, resourceGroup, serviceName) + if err != nil { + if utils.ResponseWasNotFound(serviceResp.Response) { + log.Printf("API Management Service %q was not found in Resource Group %q - removing Policy from state!", serviceName, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("making Read request on API Management Service %q (Resource Group %q): %+v", serviceName, resourceGroup, err) + } + + d.Set("api_management_id", serviceResp.ID) + + resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.PolicyExportFormatXML) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Policy (Resource Group %q / API Management Service %q) was not found - removing from state!", resourceGroup, serviceName) + d.SetId("") + return nil + } + + return fmt.Errorf("making Read request for Policy (Resource Group %q / API Management Service %q): %+v", resourceGroup, serviceName, err) + } + + if properties := resp.PolicyContractProperties; properties != nil { + policyContent := "" + if pc := properties.Value; pc != nil { + policyContent = html.UnescapeString(*pc) + } + + // when you submit an `xml_link` to the API, the API downloads this link and stores it as `xml_content` + // as such there is no way to set `xml_link` and we'll let Terraform handle it + d.Set("xml_content", policyContent) + } + + return nil +} + +func resourceApiManagementPolicyDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).ApiManagement.PolicyClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.PolicyID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + if resp, err := client.Delete(ctx, resourceGroup, serviceName, ""); err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("deleting Policy (Resource Group %q / API Management Service %q): %+v", resourceGroup, serviceName, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/apimanagement/api_management_policy_resource_test.go b/azurerm/internal/services/apimanagement/api_management_policy_resource_test.go new file mode 100644 index 000000000000..f65bfcfdfe5b --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_policy_resource_test.go @@ -0,0 +1,149 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementPolicyResource struct { +} + +func TestAccApiManagementPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_policy", "test") + r := ApiManagementPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("xml_link"), + }) +} + +func TestAccApiManagementPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_policy", "test") + r := ApiManagementPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("xml_link"), + { + Config: r.customPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("xml_link"), + }) +} + +func TestAccApiManagementPolicy_customPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_policy", "test") + r := ApiManagementPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("xml_link"), + }) +} + +func (t ApiManagementPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.PolicyID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.ServiceName + + resp, err := clients.ApiManagement.ServiceClient.Get(ctx, resourceGroup, serviceName) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Policy (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_policy" "test" { + api_management_id = azurerm_api_management.test.id + xml_link = "https://raw.githubusercontent.com/terraform-providers/terraform-provider-azurerm/master/azurerm/internal/services/apimanagement/tests/testdata/api_management_policy_test.xml" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (ApiManagementPolicyResource) customPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_policy" "test" { + api_management_id = azurerm_api_management.test.id + + xml_content = < + + + + + +XML + +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/apimanagement/api_management_product_api_resource.go b/azurerm/internal/services/apimanagement/api_management_product_api_resource.go index 77d90550f5fa..88e81739f350 100644 --- a/azurerm/internal/services/apimanagement/api_management_product_api_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_product_api_resource.go @@ -13,11 +13,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementProductApi() *schema.Resource { +func resourceApiManagementProductApi() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementProductApiCreate, - Read: resourceArmApiManagementProductApiRead, - Delete: resourceArmApiManagementProductApiDelete, + Create: resourceApiManagementProductApiCreate, + Read: resourceApiManagementProductApiRead, + Delete: resourceApiManagementProductApiDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -41,7 +41,7 @@ func resourceArmApiManagementProductApi() *schema.Resource { } } -func resourceArmApiManagementProductApiCreate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductApiCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductApisClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -73,10 +73,10 @@ func resourceArmApiManagementProductApiCreate(d *schema.ResourceData, meta inter // there's no Read so this is best-effort d.SetId(*resp.ID) - return resourceArmApiManagementProductApiRead(d, meta) + return resourceApiManagementProductApiRead(d, meta) } -func resourceArmApiManagementProductApiRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductApiRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductApisClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -117,7 +117,7 @@ func resourceArmApiManagementProductApiRead(d *schema.ResourceData, meta interfa return nil } -func resourceArmApiManagementProductApiDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductApiDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductApisClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_product_api_resource_test.go b/azurerm/internal/services/apimanagement/api_management_product_api_resource_test.go new file mode 100644 index 000000000000..31b00ef15b5f --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_product_api_resource_test.go @@ -0,0 +1,129 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementProductAPIResource struct { +} + +func TestAccApiManagementProductApi_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_api", "test") + r := ApiManagementProductAPIResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementProductApi_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_api", "test") + r := ApiManagementProductAPIResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementProductAPIResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + productId := id.Path["products"] + apiName := id.Path["apis"] + + _, err = clients.ApiManagement.ProductApisClient.CheckEntityExists(ctx, resourceGroup, serviceName, productId, apiName) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Policy (%s): %+v", id, err) + } + + return utils.Bool(true), nil +} + +func (ApiManagementProductAPIResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = true + approval_required = false + published = true +} + +resource "azurerm_api_management_api" "test" { + name = "acctestapi-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "api1" + path = "api1" + protocols = ["https"] + revision = "1" +} + +resource "azurerm_api_management_product_api" "test" { + product_id = azurerm_api_management_product.test.product_id + api_name = azurerm_api_management_api.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementProductAPIResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_product_api" "import" { + api_name = azurerm_api_management_product_api.test.api_name + product_id = azurerm_api_management_product_api.test.product_id + api_management_name = azurerm_api_management_product_api.test.api_management_name + resource_group_name = azurerm_api_management_product_api.test.resource_group_name +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_product_data_source.go b/azurerm/internal/services/apimanagement/api_management_product_data_source.go index 81786c4c3934..d9fe8baa4a15 100644 --- a/azurerm/internal/services/apimanagement/api_management_product_data_source.go +++ b/azurerm/internal/services/apimanagement/api_management_product_data_source.go @@ -64,6 +64,7 @@ func dataSourceApiManagementProduct() *schema.Resource { }, } } + func dataSourceApiManagementProductRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/apimanagement/api_management_product_data_source_test.go b/azurerm/internal/services/apimanagement/api_management_product_data_source_test.go new file mode 100644 index 000000000000..d3220de2e672 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_product_data_source_test.go @@ -0,0 +1,74 @@ +package apimanagement_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ApiManagementProductDataSource struct { +} + +func TestAccDataSourceApiManagementProduct_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management_product", "test") + r := ApiManagementProductDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("product_id").HasValue("test-product"), + check.That(data.ResourceName).Key("display_name").HasValue("Test Product"), + check.That(data.ResourceName).Key("subscription_required").HasValue("true"), + check.That(data.ResourceName).Key("approval_required").HasValue("true"), + check.That(data.ResourceName).Key("published").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue("This is an example description"), + check.That(data.ResourceName).Key("terms").HasValue("These are some example terms and conditions"), + ), + }, + }) +} + +func (ApiManagementProductDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "amtestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = true + approval_required = true + subscriptions_limit = 2 + published = true + description = "This is an example description" + terms = "These are some example terms and conditions" +} + +data "azurerm_api_management_product" "test" { + product_id = azurerm_api_management_product.test.product_id + api_management_name = azurerm_api_management_product.test.api_management_name + resource_group_name = azurerm_api_management_product.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_product_group_resource.go b/azurerm/internal/services/apimanagement/api_management_product_group_resource.go index 896a26924952..464a4708b399 100644 --- a/azurerm/internal/services/apimanagement/api_management_product_group_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_product_group_resource.go @@ -13,11 +13,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementProductGroup() *schema.Resource { +func resourceApiManagementProductGroup() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementProductGroupCreate, - Read: resourceArmApiManagementProductGroupRead, - Delete: resourceArmApiManagementProductGroupDelete, + Create: resourceApiManagementProductGroupCreate, + Read: resourceApiManagementProductGroupRead, + Delete: resourceApiManagementProductGroupDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -41,7 +41,7 @@ func resourceArmApiManagementProductGroup() *schema.Resource { } } -func resourceArmApiManagementProductGroupCreate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductGroupCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductGroupsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -73,10 +73,10 @@ func resourceArmApiManagementProductGroupCreate(d *schema.ResourceData, meta int // there's no Read so this is best-effort d.SetId(*resp.ID) - return resourceArmApiManagementProductGroupRead(d, meta) + return resourceApiManagementProductGroupRead(d, meta) } -func resourceArmApiManagementProductGroupRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductGroupsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -109,7 +109,7 @@ func resourceArmApiManagementProductGroupRead(d *schema.ResourceData, meta inter return nil } -func resourceArmApiManagementProductGroupDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductGroupDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductGroupsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_product_group_resource_test.go b/azurerm/internal/services/apimanagement/api_management_product_group_resource_test.go new file mode 100644 index 000000000000..aa6a1cd283e8 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_product_group_resource_test.go @@ -0,0 +1,126 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementProductGroupResource struct { +} + +func TestAccApiManagementProductGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_group", "test") + r := ApiManagementProductGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementProductGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_group", "test") + r := ApiManagementProductGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ApiManagementProductGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + groupName := id.Path["groups"] + productId := id.Path["products"] + + _, err = clients.ApiManagement.ProductGroupsClient.CheckEntityExists(ctx, resourceGroup, serviceName, productId, groupName) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Product Group (%s): %+v", id, err) + } + + return utils.Bool(true), nil +} + +func (ApiManagementProductGroupResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = true + approval_required = false + published = true +} + +resource "azurerm_api_management_group" "test" { + name = "acctestAMGroup-%d" + resource_group_name = azurerm_resource_group.test.name + api_management_name = azurerm_api_management.test.name + display_name = "Test Group" +} + +resource "azurerm_api_management_product_group" "test" { + product_id = azurerm_api_management_product.test.product_id + group_name = azurerm_api_management_group.test.name + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementProductGroupResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_product_group" "import" { + product_id = azurerm_api_management_product_group.test.product_id + group_name = azurerm_api_management_product_group.test.group_name + api_management_name = azurerm_api_management_product_group.test.api_management_name + resource_group_name = azurerm_api_management_product_group.test.resource_group_name +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/apimanagement/api_management_product_policy_resource.go b/azurerm/internal/services/apimanagement/api_management_product_policy_resource.go index 8fa5ee770450..828e4098c635 100644 --- a/azurerm/internal/services/apimanagement/api_management_product_policy_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_product_policy_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementProductPolicy() *schema.Resource { +func resourceApiManagementProductPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementProductPolicyCreateUpdate, - Read: resourceArmApiManagementProductPolicyRead, - Update: resourceArmApiManagementProductPolicyCreateUpdate, - Delete: resourceArmApiManagementProductPolicyDelete, + Create: resourceApiManagementProductPolicyCreateUpdate, + Read: resourceApiManagementProductPolicyRead, + Update: resourceApiManagementProductPolicyCreateUpdate, + Delete: resourceApiManagementProductPolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -56,7 +56,7 @@ func resourceArmApiManagementProductPolicy() *schema.Resource { } } -func resourceArmApiManagementProductPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductPoliciesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -114,10 +114,10 @@ func resourceArmApiManagementProductPolicyCreateUpdate(d *schema.ResourceData, m } d.SetId(*resp.ID) - return resourceArmApiManagementProductPolicyRead(d, meta) + return resourceApiManagementProductPolicyRead(d, meta) } -func resourceArmApiManagementProductPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductPoliciesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -154,7 +154,7 @@ func resourceArmApiManagementProductPolicyRead(d *schema.ResourceData, meta inte return nil } -func resourceArmApiManagementProductPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductPoliciesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_product_policy_resource_test.go b/azurerm/internal/services/apimanagement/api_management_product_policy_resource_test.go new file mode 100644 index 000000000000..9b16f8b5995f --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_product_policy_resource_test.go @@ -0,0 +1,195 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementProductPolicyResource struct { +} + +func TestAccApiManagementProductPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_policy", "test") + r := ApiManagementProductPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"xml_link"}, + }, + }) +} + +func TestAccApiManagementProductPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_policy", "test") + r := ApiManagementProductPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementProductPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product_policy", "test") + r := ApiManagementProductPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"xml_link"}, + }, + }) +} + +func (t ApiManagementProductPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + productID := id.Path["products"] + + resp, err := clients.ApiManagement.ProductPoliciesClient.Get(ctx, resourceGroup, serviceName, productID, apimanagement.PolicyExportFormatXML) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Product Policy (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementProductPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = false + published = false +} + +resource "azurerm_api_management_product_policy" "test" { + product_id = azurerm_api_management_product.test.product_id + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + xml_link = "https://gist.githubusercontent.com/riordanp/ca22f8113afae0eb38cc12d718fd048d/raw/d6ac89a2f35a6881a7729f8cb4883179dc88eea1/example.xml" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementProductPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_product_policy" "import" { + product_id = azurerm_api_management_product_policy.test.product_id + api_management_name = azurerm_api_management_product_policy.test.api_management_name + resource_group_name = azurerm_api_management_product_policy.test.resource_group_name + xml_link = azurerm_api_management_product_policy.test.xml_link +} +`, r.basic(data)) +} + +func (ApiManagementProductPolicyResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = false + published = false +} + +resource "azurerm_api_management_product_policy" "test" { + product_id = azurerm_api_management_product.test.product_id + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + + xml_content = < + + + + + +XML + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_product_resource.go b/azurerm/internal/services/apimanagement/api_management_product_resource.go index c38b487b2348..3c5ae14b0fc1 100644 --- a/azurerm/internal/services/apimanagement/api_management_product_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_product_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementProduct() *schema.Resource { +func resourceApiManagementProduct() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementProductCreateUpdate, - Read: resourceArmApiManagementProductRead, - Update: resourceArmApiManagementProductCreateUpdate, - Delete: resourceArmApiManagementProductDelete, + Create: resourceApiManagementProductCreateUpdate, + Read: resourceApiManagementProductRead, + Update: resourceApiManagementProductCreateUpdate, + Delete: resourceApiManagementProductDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -78,7 +78,7 @@ func resourceArmApiManagementProduct() *schema.Resource { } } -func resourceArmApiManagementProductCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -148,10 +148,10 @@ func resourceArmApiManagementProductCreateUpdate(d *schema.ResourceData, meta in d.SetId(*read.ID) - return resourceArmApiManagementProductRead(d, meta) + return resourceApiManagementProductRead(d, meta) } -func resourceArmApiManagementProductRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -193,7 +193,7 @@ func resourceArmApiManagementProductRead(d *schema.ResourceData, meta interface{ return nil } -func resourceArmApiManagementProductDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementProductDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ProductsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_product_resource_test.go b/azurerm/internal/services/apimanagement/api_management_product_resource_test.go new file mode 100644 index 000000000000..9b3582d4abdb --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_product_resource_test.go @@ -0,0 +1,360 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementProductResource struct { +} + +func TestAccApiManagementProduct_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") + r := ApiManagementProductResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("approval_required").HasValue("false"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("display_name").HasValue("Test Product"), + check.That(data.ResourceName).Key("product_id").HasValue("test-product"), + check.That(data.ResourceName).Key("published").HasValue("false"), + check.That(data.ResourceName).Key("subscription_required").HasValue("false"), + check.That(data.ResourceName).Key("terms").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementProduct_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") + r := ApiManagementProductResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementProduct_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") + r := ApiManagementProductResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("approval_required").HasValue("false"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("display_name").HasValue("Test Product"), + check.That(data.ResourceName).Key("product_id").HasValue("test-product"), + check.That(data.ResourceName).Key("published").HasValue("false"), + check.That(data.ResourceName).Key("subscription_required").HasValue("false"), + check.That(data.ResourceName).Key("terms").HasValue(""), + ), + }, + data.ImportStep(), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("approval_required").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("display_name").HasValue("Test Updated Product"), + check.That(data.ResourceName).Key("product_id").HasValue("test-product"), + check.That(data.ResourceName).Key("published").HasValue("true"), + check.That(data.ResourceName).Key("subscription_required").HasValue("true"), + check.That(data.ResourceName).Key("terms").HasValue(""), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("display_name").HasValue("Test Product"), + check.That(data.ResourceName).Key("product_id").HasValue("test-product"), + check.That(data.ResourceName).Key("published").HasValue("false"), + check.That(data.ResourceName).Key("subscription_required").HasValue("false"), + check.That(data.ResourceName).Key("terms").HasValue(""), + ), + }, + }) +} + +func TestAccApiManagementProduct_subscriptionsLimit(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") + r := ApiManagementProductResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subscriptionLimits(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("approval_required").HasValue("true"), + check.That(data.ResourceName).Key("subscription_required").HasValue("true"), + check.That(data.ResourceName).Key("subscriptions_limit").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementProduct_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") + r := ApiManagementProductResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("approval_required").HasValue("true"), + check.That(data.ResourceName).Key("description").HasValue("This is an example description"), + check.That(data.ResourceName).Key("display_name").HasValue("Test Product"), + check.That(data.ResourceName).Key("product_id").HasValue("test-product"), + check.That(data.ResourceName).Key("published").HasValue("true"), + check.That(data.ResourceName).Key("subscriptions_limit").HasValue("2"), + check.That(data.ResourceName).Key("subscription_required").HasValue("true"), + check.That(data.ResourceName).Key("terms").HasValue("These are some example terms and conditions"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementProduct_approvalRequiredError(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") + r := ApiManagementProductResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.approvalRequiredError(data), + ExpectError: regexp.MustCompile("`subscription_required` must be true and `subscriptions_limit` must be greater than 0 to use `approval_required`"), + }, + }) +} + +func (t ApiManagementProductResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + productId := id.Path["products"] + + resp, err := clients.ApiManagement.ProductsClient.Get(ctx, resourceGroup, serviceName, productId) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Product (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementProductResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = false + published = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementProductResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_product" "import" { + product_id = azurerm_api_management_product.test.product_id + api_management_name = azurerm_api_management_product.test.api_management_name + resource_group_name = azurerm_api_management_product.test.resource_group_name + display_name = azurerm_api_management_product.test.display_name + subscription_required = azurerm_api_management_product.test.subscription_required + approval_required = azurerm_api_management_product.test.approval_required + published = azurerm_api_management_product.test.published +} +`, r.basic(data)) +} + +func (ApiManagementProductResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Updated Product" + subscription_required = true + approval_required = true + subscriptions_limit = 1 + published = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementProductResource) subscriptionLimits(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = true + approval_required = true + subscriptions_limit = 2 + published = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementProductResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = true + approval_required = true + published = true + subscriptions_limit = 2 + description = "This is an example description" + terms = "These are some example terms and conditions" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementProductResource) approvalRequiredError(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + approval_required = true + subscription_required = false + published = true + description = "This is an example description" + terms = "These are some example terms and conditions" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_property_resource.go b/azurerm/internal/services/apimanagement/api_management_property_resource.go index dcde3dc78921..5b0846e4fae6 100644 --- a/azurerm/internal/services/apimanagement/api_management_property_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_property_resource.go @@ -16,12 +16,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementProperty() *schema.Resource { +func resourceApiManagementProperty() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementPropertyCreateUpdate, - Read: resourceArmApiManagementPropertyRead, - Update: resourceArmApiManagementPropertyCreateUpdate, - Delete: resourceArmApiManagementPropertyDelete, + Create: resourceApiManagementPropertyCreateUpdate, + Read: resourceApiManagementPropertyRead, + Update: resourceApiManagementPropertyCreateUpdate, + Delete: resourceApiManagementPropertyDelete, DeprecationMessage: "This resource has been superseded by `azurerm_api_management_named_value` to reflects changes in the API/SDK and will be removed in version 3.0 of the provider.", @@ -81,7 +81,7 @@ func resourceArmApiManagementProperty() *schema.Resource { } } -func resourceArmApiManagementPropertyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementPropertyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.NamedValueClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -133,10 +133,10 @@ func resourceArmApiManagementPropertyCreateUpdate(d *schema.ResourceData, meta i } d.SetId(*resp.ID) - return resourceArmApiManagementPropertyRead(d, meta) + return resourceApiManagementPropertyRead(d, meta) } -func resourceArmApiManagementPropertyRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementPropertyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.NamedValueClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -176,7 +176,7 @@ func resourceArmApiManagementPropertyRead(d *schema.ResourceData, meta interface return nil } -func resourceArmApiManagementPropertyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementPropertyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.NamedValueClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_property_resource_test.go b/azurerm/internal/services/apimanagement/api_management_property_resource_test.go new file mode 100644 index 000000000000..549815569805 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_property_resource_test.go @@ -0,0 +1,150 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementPropertyResource struct { +} + +func TestAccApiManagementProperty_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_property", "test") + r := ApiManagementPropertyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("TestProperty%d", data.RandomInteger)), + check.That(data.ResourceName).Key("value").HasValue("Test Value"), + check.That(data.ResourceName).Key("tags.0").HasValue("tag1"), + check.That(data.ResourceName).Key("tags.1").HasValue("tag2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementProperty_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_property", "test") + r := ApiManagementPropertyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("TestProperty%d", data.RandomInteger)), + check.That(data.ResourceName).Key("value").HasValue("Test Value"), + check.That(data.ResourceName).Key("tags.0").HasValue("tag1"), + check.That(data.ResourceName).Key("tags.1").HasValue("tag2"), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("TestProperty2%d", data.RandomInteger)), + check.That(data.ResourceName).Key("secret").HasValue("true"), + check.That(data.ResourceName).Key("tags.0").HasValue("tag3"), + check.That(data.ResourceName).Key("tags.1").HasValue("tag4"), + ), + }, + data.ImportStep("value"), + }) +} + +func (t ApiManagementPropertyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + name := id.Path["namedValues"] + + resp, err := clients.ApiManagement.NamedValueClient.Get(ctx, resourceGroup, serviceName, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Property (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementPropertyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_property" "test" { + name = "acctestAMProperty-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + display_name = "TestProperty%d" + value = "Test Value" + tags = ["tag1", "tag2"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementPropertyResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_property" "test" { + name = "acctestAMProperty-%d" + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + display_name = "TestProperty2%d" + value = "Test Value2" + secret = true + tags = ["tag3", "tag4"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_resource.go b/azurerm/internal/services/apimanagement/api_management_resource.go index 170c9b0b65f5..686dcf3bdf5b 100644 --- a/azurerm/internal/services/apimanagement/api_management_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_resource.go @@ -1,6 +1,7 @@ package apimanagement import ( + "context" "fmt" "log" "strconv" @@ -9,43 +10,48 @@ import ( "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + apimValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -var apimBackendProtocolSsl3 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Backend.Protocols.Ssl30" -var apimBackendProtocolTls10 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Backend.Protocols.Tls10" -var apimBackendProtocolTls11 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Backend.Protocols.Tls11" -var apimFrontendProtocolSsl3 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Protocols.Ssl30" -var apimFrontendProtocolTls10 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Protocols.Tls10" -var apimFrontendProtocolTls11 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Protocols.Tls11" -var apimTripleDesCiphers = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Ciphers.TripleDes168" -var apimHttp2Protocol = "Microsoft.WindowsAzure.ApiManagement.Gateway.Protocols.Server.Http2" +var ( + apimBackendProtocolSsl3 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Backend.Protocols.Ssl30" + apimBackendProtocolTls10 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Backend.Protocols.Tls10" + apimBackendProtocolTls11 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Backend.Protocols.Tls11" + apimFrontendProtocolSsl3 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Protocols.Ssl30" + apimFrontendProtocolTls10 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Protocols.Tls10" + apimFrontendProtocolTls11 = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Protocols.Tls11" + apimTripleDesCiphers = "Microsoft.WindowsAzure.ApiManagement.Gateway.Security.Ciphers.TripleDes168" + apimHttp2Protocol = "Microsoft.WindowsAzure.ApiManagement.Gateway.Protocols.Server.Http2" +) -func resourceArmApiManagementService() *schema.Resource { +func resourceApiManagementService() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementServiceCreateUpdate, - Read: resourceArmApiManagementServiceRead, - Update: resourceArmApiManagementServiceCreateUpdate, - Delete: resourceArmApiManagementServiceDelete, + Create: resourceApiManagementServiceCreateUpdate, + Read: resourceApiManagementServiceRead, + Update: resourceApiManagementServiceCreateUpdate, + Delete: resourceApiManagementServiceDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(60 * time.Minute), + Create: schema.DefaultTimeout(3 * time.Hour), Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(60 * time.Minute), - Delete: schema.DefaultTimeout(60 * time.Minute), + Update: schema.DefaultTimeout(3 * time.Hour), + Delete: schema.DefaultTimeout(3 * time.Hour), }, Schema: map[string]*schema.Schema{ @@ -68,15 +74,10 @@ func resourceArmApiManagementService() *schema.Resource { }, "sku_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.MinCapacitySkuNameInSlice([]string{ - string(apimanagement.SkuTypeConsumption), - string(apimanagement.SkuTypeDeveloper), - string(apimanagement.SkuTypeBasic), - string(apimanagement.SkuTypeStandard), - string(apimanagement.SkuTypePremium), - }, 1, false), + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: apimValidate.ApimSkuName(), }, "identity": { @@ -121,7 +122,6 @@ func resourceArmApiManagementService() *schema.Resource { Type: schema.TypeString, Optional: true, Default: string(apimanagement.VirtualNetworkTypeNone), - ForceNew: true, ValidateFunc: validation.StringInSlice([]string{ string(apimanagement.VirtualNetworkTypeNone), string(apimanagement.VirtualNetworkTypeExternal), @@ -138,7 +138,6 @@ func resourceArmApiManagementService() *schema.Resource { "subnet_id": { Type: schema.TypeString, Required: true, - ForceNew: true, ValidateFunc: azure.ValidateResourceID, }, }, @@ -297,7 +296,6 @@ func resourceArmApiManagementService() *schema.Resource { "hostname_configuration": { Type: schema.TypeList, Optional: true, - Computed: true, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -353,7 +351,7 @@ func resourceArmApiManagementService() *schema.Resource { Optional: true, Computed: true, ConflictsWith: []string{"policy.0.xml_link"}, - DiffSuppressFunc: suppress.XmlDiff, + DiffSuppressFunc: XmlWithDotNetInterpolationsDiffSuppress, }, "xml_link": { @@ -465,10 +463,25 @@ func resourceArmApiManagementService() *schema.Resource { "tags": tags.Schema(), }, + + // we can only change `virtual_network_type` from None to Internal Or External, Else the subnet can not be destroyed cause “InUseSubnetCannotBeDeleted” for 3 hours + // we can not change the subnet from subnet1 to subnet2 either, Else the subnet1 can not be destroyed cause “InUseSubnetCannotBeDeleted” for 3 hours + // Issue: https://github.com/Azure/azure-rest-api-specs/issues/10395 + CustomizeDiff: customdiff.All( + customdiff.ForceNewIfChange("virtual_network_type", func(old, new, meta interface{}) bool { + return !(old.(string) == string(apimanagement.VirtualNetworkTypeNone) && + (new.(string) == string(apimanagement.VirtualNetworkTypeInternal) || + new.(string) == string(apimanagement.VirtualNetworkTypeExternal))) + }), + + customdiff.ForceNewIfChange("virtual_network_configuration", func(old, new, meta interface{}) bool { + return !(len(old.([]interface{})) == 0 && len(new.([]interface{})) > 0) + }), + ), } } -func resourceArmApiManagementServiceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementServiceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ServiceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -501,23 +514,28 @@ func resourceArmApiManagementServiceCreateUpdate(d *schema.ResourceData, meta in notificationSenderEmail := d.Get("notification_sender_email").(string) virtualNetworkType := d.Get("virtual_network_type").(string) - customProperties := expandApiManagementCustomProperties(d) + customProperties, err := expandApiManagementCustomProperties(d, sku.Name == apimanagement.SkuTypeConsumption) + if err != nil { + return err + } certificates := expandAzureRmApiManagementCertificates(d) - hostnameConfigurations := expandAzureRmApiManagementHostnameConfigurations(d) properties := apimanagement.ServiceResource{ Location: utils.String(location), ServiceProperties: &apimanagement.ServiceProperties{ - PublisherName: utils.String(publisherName), - PublisherEmail: utils.String(publisherEmail), - CustomProperties: customProperties, - Certificates: certificates, - HostnameConfigurations: hostnameConfigurations, + PublisherName: utils.String(publisherName), + PublisherEmail: utils.String(publisherEmail), + CustomProperties: customProperties, + Certificates: certificates, }, Tags: tags.Expand(t), Sku: sku, } + if _, ok := d.GetOk("hostname_configuration"); ok { + properties.ServiceProperties.HostnameConfigurations = expandAzureRmApiManagementHostnameConfigurations(d) + } + // intentionally not gated since we specify a default value (of None) in the expand, which we need on updates identityRaw := d.Get("identity").([]interface{}) identity, err := expandAzureRmApiManagementIdentity(identityRaw) @@ -571,17 +589,27 @@ func resourceArmApiManagementServiceCreateUpdate(d *schema.ResourceData, meta in d.SetId(*read.ID) signInSettingsRaw := d.Get("sign_in").([]interface{}) - signInSettings := expandApiManagementSignInSettings(signInSettingsRaw) - signInClient := meta.(*clients.Client).ApiManagement.SignInClient - if _, err := signInClient.CreateOrUpdate(ctx, resourceGroup, name, signInSettings, ""); err != nil { - return fmt.Errorf(" setting Sign In settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) + if sku.Name == apimanagement.SkuTypeConsumption && len(signInSettingsRaw) > 0 { + return fmt.Errorf("`sign_in` is not support for sku tier `Consumption`") + } + if sku.Name != apimanagement.SkuTypeConsumption { + signInSettings := expandApiManagementSignInSettings(signInSettingsRaw) + signInClient := meta.(*clients.Client).ApiManagement.SignInClient + if _, err := signInClient.CreateOrUpdate(ctx, resourceGroup, name, signInSettings, ""); err != nil { + return fmt.Errorf(" setting Sign In settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) + } } signUpSettingsRaw := d.Get("sign_up").([]interface{}) - signUpSettings := expandApiManagementSignUpSettings(signUpSettingsRaw) - signUpClient := meta.(*clients.Client).ApiManagement.SignUpClient - if _, err := signUpClient.CreateOrUpdate(ctx, resourceGroup, name, signUpSettings, ""); err != nil { - return fmt.Errorf(" setting Sign Up settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) + if sku.Name == apimanagement.SkuTypeConsumption && len(signInSettingsRaw) > 0 { + return fmt.Errorf("`sign_up` is not support for sku tier `Consumption`") + } + if sku.Name != apimanagement.SkuTypeConsumption { + signUpSettings := expandApiManagementSignUpSettings(signUpSettingsRaw) + signUpClient := meta.(*clients.Client).ApiManagement.SignUpClient + if _, err := signUpClient.CreateOrUpdate(ctx, resourceGroup, name, signUpSettings, ""); err != nil { + return fmt.Errorf(" setting Sign Up settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) + } } policyClient := meta.(*clients.Client).ApiManagement.PolicyClient @@ -607,21 +635,24 @@ func resourceArmApiManagementServiceCreateUpdate(d *schema.ResourceData, meta in } } - return resourceArmApiManagementServiceRead(d, meta) + return resourceApiManagementServiceRead(d, meta) } -func resourceArmApiManagementServiceRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementServiceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ServiceClient + signInClient := meta.(*clients.Client).ApiManagement.SignInClient + signUpClient := meta.(*clients.Client).ApiManagement.SignUpClient + environment := meta.(*clients.Client).Account.Environment ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ApiManagementID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["service"] + name := id.ServiceName resp, err := client.Get(ctx, resourceGroup, name) if err != nil { @@ -634,18 +665,6 @@ func resourceArmApiManagementServiceRead(d *schema.ResourceData, meta interface{ return fmt.Errorf("making Read request on API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) } - signInClient := meta.(*clients.Client).ApiManagement.SignInClient - signInSettings, err := signInClient.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("retrieving Sign In Settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - signUpClient := meta.(*clients.Client).ApiManagement.SignUpClient - signUpSettings, err := signUpClient.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("retrieving Sign Up Settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) - } - policyClient := meta.(*clients.Client).ApiManagement.PolicyClient policy, err := policyClient.Get(ctx, resourceGroup, name, apimanagement.PolicyExportFormatXML) if err != nil { @@ -680,15 +699,18 @@ func resourceArmApiManagementServiceRead(d *schema.ResourceData, meta interface{ d.Set("private_ip_addresses", props.PrivateIPAddresses) d.Set("virtual_network_type", props.VirtualNetworkType) - if err := d.Set("security", flattenApiManagementSecurityCustomProperties(props.CustomProperties)); err != nil { - return fmt.Errorf("setting `security`: %+v", err) + if resp.Sku != nil && resp.Sku.Name != "" { + if err := d.Set("security", flattenApiManagementSecurityCustomProperties(props.CustomProperties, resp.Sku.Name == apimanagement.SkuTypeConsumption)); err != nil { + return fmt.Errorf("setting `security`: %+v", err) + } } if err := d.Set("protocols", flattenApiManagementProtocolsCustomProperties(props.CustomProperties)); err != nil { return fmt.Errorf("setting `protocols`: %+v", err) } - hostnameConfigs := flattenApiManagementHostnameConfigurations(props.HostnameConfigurations, d) + apimHostNameSuffix := environment.APIManagementHostNameSuffix + hostnameConfigs := flattenApiManagementHostnameConfigurations(props.HostnameConfigurations, d, name, apimHostNameSuffix) if err := d.Set("hostname_configuration", hostnameConfigs); err != nil { return fmt.Errorf("setting `hostname_configuration`: %+v", err) } @@ -706,32 +728,46 @@ func resourceArmApiManagementServiceRead(d *schema.ResourceData, meta interface{ return fmt.Errorf("setting `sku_name`: %+v", err) } - if err := d.Set("sign_in", flattenApiManagementSignInSettings(signInSettings)); err != nil { - return fmt.Errorf("setting `sign_in`: %+v", err) + if err := d.Set("policy", flattenApiManagementPolicies(d, policy)); err != nil { + return fmt.Errorf("setting `policy`: %+v", err) } - if err := d.Set("sign_up", flattenApiManagementSignUpSettings(signUpSettings)); err != nil { - return fmt.Errorf("setting `sign_up`: %+v", err) - } + if resp.Sku.Name != apimanagement.SkuTypeConsumption { + signInSettings, err := signInClient.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("retrieving Sign In Settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if err := d.Set("sign_in", flattenApiManagementSignInSettings(signInSettings)); err != nil { + return fmt.Errorf("setting `sign_in`: %+v", err) + } - if err := d.Set("policy", flattenApiManagementPolicies(d, policy)); err != nil { - return fmt.Errorf("setting `policy`: %+v", err) + signUpSettings, err := signUpClient.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("retrieving Sign Up Settings for API Management Service %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := d.Set("sign_up", flattenApiManagementSignUpSettings(signUpSettings)); err != nil { + return fmt.Errorf("setting `sign_up`: %+v", err) + } + } else { + d.Set("sign_in", []interface{}{}) + d.Set("sign_up", []interface{}{}) } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmApiManagementServiceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementServiceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.ServiceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ApiManagementID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["service"] + name := id.ServiceName log.Printf("[DEBUG] Deleting API Management Service %q (Resource Grouo %q)", name, resourceGroup) future, err := client.Delete(ctx, resourceGroup, name) @@ -748,9 +784,38 @@ func resourceArmApiManagementServiceDelete(d *schema.ResourceData, meta interfac return nil } +func apiManagementRefreshFunc(ctx context.Context, client *apimanagement.ServiceClient, serviceName, resourceGroup string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + log.Printf("[DEBUG] Checking to see if API Management Service %q (Resource Group: %q) is available..", serviceName, resourceGroup) + + resp, err := client.Get(ctx, resourceGroup, serviceName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Retrieving API Management %q (Resource Group: %q) returned 404.", serviceName, resourceGroup) + return nil, "NotFound", nil + } + + return nil, "", fmt.Errorf("Error polling for the state of the API Management Service %q (Resource Group: %q): %+v", serviceName, resourceGroup, err) + } + + state := "" + if props := resp.ServiceProperties; props != nil { + if props.ProvisioningState != nil { + state = *props.ProvisioningState + } + } + + return resp, state, nil + } +} + func expandAzureRmApiManagementHostnameConfigurations(d *schema.ResourceData) *[]apimanagement.HostnameConfiguration { results := make([]apimanagement.HostnameConfiguration, 0) - hostnameVs := d.Get("hostname_configuration").([]interface{}) + vs := d.Get("hostname_configuration") + if vs == nil { + return &results + } + hostnameVs := vs.([]interface{}) for _, hostnameRawVal := range hostnameVs { hostnameV := hostnameRawVal.(map[string]interface{}) @@ -829,7 +894,7 @@ func expandApiManagementCommonHostnameConfiguration(input map[string]interface{} return output } -func flattenApiManagementHostnameConfigurations(input *[]apimanagement.HostnameConfiguration, d *schema.ResourceData) []interface{} { +func flattenApiManagementHostnameConfigurations(input *[]apimanagement.HostnameConfiguration, d *schema.ResourceData, name, apimHostNameSuffix string) []interface{} { results := make([]interface{}, 0) if input == nil { return results @@ -848,6 +913,11 @@ func flattenApiManagementHostnameConfigurations(input *[]apimanagement.HostnameC output["host_name"] = *config.HostName } + // There'll always be a default custom domain with hostName "apim_name.azure-api.net" and Type "Proxy", which should be ignored + if *config.HostName == strings.ToLower(name)+"."+apimHostNameSuffix && config.Type == apimanagement.HostnameTypeProxy { + continue + } + if config.NegotiateClientCertificate != nil { output["negotiate_client_certificate"] = *config.NegotiateClientCertificate } @@ -856,26 +926,7 @@ func flattenApiManagementHostnameConfigurations(input *[]apimanagement.HostnameC output["key_vault_id"] = *config.KeyVaultID } - // Iterate through old state to find sensitive props not returned by API. - // This must be done in order to avoid state diffs. - // NOTE: this information won't be available during times like Import, so this is a best-effort. - existingHostnames := d.Get("hostname_configuration").([]interface{}) - if len(existingHostnames) > 0 { - v := existingHostnames[0].(map[string]interface{}) - - if valsRaw, ok := v[strings.ToLower(string(config.Type))]; ok { - vals := valsRaw.([]interface{}) - for _, val := range vals { - oldConfig := val.(map[string]interface{}) - - if oldConfig["host_name"] == *config.HostName { - output["certificate_password"] = oldConfig["certificate_password"] - output["certificate"] = oldConfig["certificate"] - } - } - } - } - + var configType string switch strings.ToLower(string(config.Type)) { case strings.ToLower(string(apimanagement.HostnameTypeProxy)): // only set SSL binding for proxy types @@ -883,21 +934,40 @@ func flattenApiManagementHostnameConfigurations(input *[]apimanagement.HostnameC output["default_ssl_binding"] = *config.DefaultSslBinding } proxyResults = append(proxyResults, output) + configType = "proxy" case strings.ToLower(string(apimanagement.HostnameTypeManagement)): managementResults = append(managementResults, output) + configType = "management" case strings.ToLower(string(apimanagement.HostnameTypePortal)): portalResults = append(portalResults, output) + configType = "portal" case strings.ToLower(string(apimanagement.HostnameTypeDeveloperPortal)): developerPortalResults = append(developerPortalResults, output) + configType = "developer_portal" case strings.ToLower(string(apimanagement.HostnameTypeScm)): scmResults = append(scmResults, output) + configType = "scm" + } + + existingHostnames := d.Get("hostname_configuration").([]interface{}) + if len(existingHostnames) > 0 && configType != "" { + v := existingHostnames[0].(map[string]interface{}) + + if valsRaw, ok := v[configType]; ok { + vals := valsRaw.([]interface{}) + azure.CopyCertificateAndPassword(vals, *config.HostName, output) + } } } + if len(managementResults) == 0 && len(portalResults) == 0 && len(developerPortalResults) == 0 && len(proxyResults) == 0 && len(scmResults) == 0 { + return []interface{}{} + } + return []interface{}{ map[string]interface{}{ "management": managementResults, @@ -1091,7 +1161,7 @@ func flattenApiManagementServiceSkuName(input *apimanagement.ServiceSkuPropertie return fmt.Sprintf("%s_%d", string(input.Name), *input.Capacity) } -func expandApiManagementCustomProperties(d *schema.ResourceData) map[string]*string { +func expandApiManagementCustomProperties(d *schema.ResourceData, skuIsConsumption bool) (map[string]*string, error) { backendProtocolSsl3 := false backendProtocolTls10 := false backendProtocolTls11 := false @@ -1109,16 +1179,26 @@ func expandApiManagementCustomProperties(d *schema.ResourceData) map[string]*str frontendProtocolTls10 = v["enable_frontend_tls10"].(bool) frontendProtocolTls11 = v["enable_frontend_tls11"].(bool) tripleDesCiphers = v["enable_triple_des_ciphers"].(bool) + if skuIsConsumption && frontendProtocolSsl3 { + return nil, fmt.Errorf("`enable_frontend_ssl30` is not support for Sku Tier `Consumption`") + } + + if skuIsConsumption && tripleDesCiphers { + return nil, fmt.Errorf("`enable_triple_des_ciphers` is not support for Sku Tier `Consumption`") + } } customProperties := map[string]*string{ apimBackendProtocolSsl3: utils.String(strconv.FormatBool(backendProtocolSsl3)), apimBackendProtocolTls10: utils.String(strconv.FormatBool(backendProtocolTls10)), apimBackendProtocolTls11: utils.String(strconv.FormatBool(backendProtocolTls11)), - apimFrontendProtocolSsl3: utils.String(strconv.FormatBool(frontendProtocolSsl3)), apimFrontendProtocolTls10: utils.String(strconv.FormatBool(frontendProtocolTls10)), apimFrontendProtocolTls11: utils.String(strconv.FormatBool(frontendProtocolTls11)), - apimTripleDesCiphers: utils.String(strconv.FormatBool(tripleDesCiphers)), + } + + if !skuIsConsumption { + customProperties[apimFrontendProtocolSsl3] = utils.String(strconv.FormatBool(frontendProtocolSsl3)) + customProperties[apimTripleDesCiphers] = utils.String(strconv.FormatBool(tripleDesCiphers)) } if vp := d.Get("protocols").([]interface{}); len(vp) > 0 { @@ -1127,7 +1207,7 @@ func expandApiManagementCustomProperties(d *schema.ResourceData) map[string]*str customProperties[apimHttp2Protocol] = utils.String(strconv.FormatBool(enableHttp2)) } - return customProperties + return customProperties, nil } func expandAzureRmApiManagementVirtualNetworkConfigurations(d *schema.ResourceData) *apimanagement.VirtualNetworkConfiguration { @@ -1144,16 +1224,19 @@ func expandAzureRmApiManagementVirtualNetworkConfigurations(d *schema.ResourceDa } } -func flattenApiManagementSecurityCustomProperties(input map[string]*string) []interface{} { +func flattenApiManagementSecurityCustomProperties(input map[string]*string, skuIsConsumption bool) []interface{} { output := make(map[string]interface{}) output["enable_backend_ssl30"] = parseApiManagementNilableDictionary(input, apimBackendProtocolSsl3) output["enable_backend_tls10"] = parseApiManagementNilableDictionary(input, apimBackendProtocolTls10) output["enable_backend_tls11"] = parseApiManagementNilableDictionary(input, apimBackendProtocolTls11) - output["enable_frontend_ssl30"] = parseApiManagementNilableDictionary(input, apimFrontendProtocolSsl3) output["enable_frontend_tls10"] = parseApiManagementNilableDictionary(input, apimFrontendProtocolTls10) output["enable_frontend_tls11"] = parseApiManagementNilableDictionary(input, apimFrontendProtocolTls11) - output["enable_triple_des_ciphers"] = parseApiManagementNilableDictionary(input, apimTripleDesCiphers) + + if !skuIsConsumption { + output["enable_frontend_ssl30"] = parseApiManagementNilableDictionary(input, apimFrontendProtocolSsl3) + output["enable_triple_des_ciphers"] = parseApiManagementNilableDictionary(input, apimTripleDesCiphers) + } return []interface{}{output} } @@ -1180,54 +1263,6 @@ func flattenApiManagementVirtualNetworkConfiguration(input *apimanagement.Virtua return []interface{}{virtualNetworkConfiguration} } -func apiManagementResourceHostnameSchema() map[string]*schema.Schema { - return map[string]*schema.Schema{ - "host_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "key_vault_id": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: azure.ValidateKeyVaultChildIdVersionOptional, - }, - - "certificate": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "certificate_password": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "negotiate_client_certificate": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - } -} - -func apiManagementResourceHostnameProxySchema() map[string]*schema.Schema { - hostnameSchema := apiManagementResourceHostnameSchema() - - hostnameSchema["default_ssl_binding"] = &schema.Schema{ - Type: schema.TypeBool, - Optional: true, - Computed: true, // Azure has certain logic to set this, which we cannot predict - } - - return hostnameSchema -} - func parseApiManagementNilableDictionary(input map[string]*string, key string) bool { log.Printf("Parsing value for %q", key) @@ -1360,7 +1395,7 @@ func expandApiManagementPolicies(input []interface{}) (*apimanagement.PolicyCont if xmlContent != "" { return &apimanagement.PolicyContract{ PolicyContractProperties: &apimanagement.PolicyContractProperties{ - Format: apimanagement.XML, + Format: apimanagement.Rawxml, Value: utils.String(xmlContent), }, }, nil diff --git a/azurerm/internal/services/apimanagement/api_management_resource_test.go b/azurerm/internal/services/apimanagement/api_management_resource_test.go new file mode 100644 index 000000000000..493a3e0716e3 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_resource_test.go @@ -0,0 +1,1241 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementResource struct { +} + +func TestAccApiManagement_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagement_customProps(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customProps(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocols.0.enable_http2").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + "certificate", // not returned from API, sensitive + "hostname_configuration.0.portal.0.certificate", // not returned from API, sensitive + "hostname_configuration.0.portal.0.certificate_password", // not returned from API, sensitive + "hostname_configuration.0.developer_portal.0.certificate", // not returned from API, sensitive + "hostname_configuration.0.developer_portal.0.certificate_password", // not returned from API, sensitive + "hostname_configuration.0.proxy.0.certificate", // not returned from API, sensitive + "hostname_configuration.0.proxy.0.certificate_password", // not returned from API, sensitive + "hostname_configuration.0.proxy.1.certificate", // not returned from API, sensitive + "hostname_configuration.0.proxy.1.certificate_password", // not returned from API, sensitive + }, + }, + }) +} + +func TestAccApiManagement_signInSignUpSettings(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.signInSignUpSettings(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_policy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.policyXmlContent(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.policyXmlLink(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + "policy.0.xml_link", + }, + }, + { + Config: r.policyRemoved(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_virtualNetworkInternal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetworkInternal(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("virtual_network_type").HasValue("Internal"), + check.That(data.ResourceName).Key("private_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_virtualNetworkInternalUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.virtualNetworkInternal(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_virtualNetworkInternalAdditionalLocation(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetworkInternalAdditionalLocation(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("virtual_network_type").HasValue("Internal"), + check.That(data.ResourceName).Key("private_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("additional_location.0.private_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +// Api Management doesn't support hostname keyvault using UserAssigned Identity +// There will be a inevitable dependency cycle here when using SystemAssigned Identity +// 1. create SystemAssigned Identity, grant the identity certificate access +// 2. Update the hostname configuration of the keyvault certificate +func TestAccApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionedKeyVaultId(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssignedUpdateHostnameConfigurationsKeyVaultId(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identitySystemAssignedUpdateHostnameConfigurationsVersionedKeyVaultIdUpdateCD(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionlessKeyVaultId(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssignedUpdateHostnameConfigurationsKeyVaultId(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identitySystemAssignedUpdateHostnameConfigurationsVersionlessKeyVaultIdUpdateCD(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_consumption(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.consumption(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApiManagementID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + name := id.ServiceName + + resp, err := clients.ApiManagement.ServiceClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func TestAccApiManagement_identityUserAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identityUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identityNoneUpdateUserAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identityNone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identityUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identityUserAssignedUpdateNone(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identityUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identityNone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssignedUpdateNone(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identityNone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identityNoneUpdateSystemAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identityNone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssignedUserAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssignedUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssignedUserAssignedUpdateNone(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssignedUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identityNone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identityNoneUpdateSystemAssignedUserAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identityNone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identitySystemAssignedUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssignedUserAssignedUpdateSystemAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssignedUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagement_identitySystemAssignedUserAssignedUpdateUserAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management", "test") + r := ApiManagementResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssignedUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identityUserAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (ApiManagementResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementResource) policyXmlContent(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + policy { + xml_content = < + + + + + +XML + + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementResource) policyXmlLink(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + policy { + xml_link = "https://gist.githubusercontent.com/tombuildsstuff/4f58581599d2c9f64b236f505a361a67/raw/0d29dcb0167af1e5afe4bd52a6d7f69ba1e05e1f/example.xml" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementResource) policyRemoved(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + policy = [] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r ApiManagementResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management" "import" { + name = azurerm_api_management.test.name + location = azurerm_api_management.test.location + resource_group_name = azurerm_api_management.test.resource_group_name + publisher_name = azurerm_api_management.test.publisher_name + publisher_email = azurerm_api_management.test.publisher_email + + sku_name = "Developer_1" +} +`, r.basic(data)) +} + +func (ApiManagementResource) customProps(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + security { + enable_frontend_tls10 = true + enable_triple_des_ciphers = true + } +} +`, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) +} + +func (ApiManagementResource) signInSignUpSettings(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + sign_in { + enabled = true + } + + sign_up { + enabled = true + + terms_of_service { + enabled = true + consent_required = false + text = "Lorem Ipsum Dolor Morty" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test1" { + name = "acctestRG-api1-%d" + location = "%s" +} + +resource "azurerm_resource_group" "test2" { + name = "acctestRG-api2-%d" + location = "%s" +} + +resource "azurerm_resource_group" "test3" { + name = "acctestRG-api3-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + publisher_name = "pub1" + publisher_email = "pub1@email.com" + notification_sender_email = "notification@email.com" + + additional_location { + location = azurerm_resource_group.test2.location + } + + additional_location { + location = azurerm_resource_group.test3.location + } + + certificate { + encoded_certificate = filebase64("testdata/api_management_api_test.pfx") + certificate_password = "terraform" + store_name = "CertificateAuthority" + } + + certificate { + encoded_certificate = filebase64("testdata/api_management_api_test.pfx") + certificate_password = "terraform" + store_name = "Root" + } + + protocols { + enable_http2 = true + } + + security { + enable_backend_tls11 = true + enable_backend_ssl30 = true + enable_backend_tls10 = true + enable_frontend_ssl30 = true + enable_frontend_tls10 = true + enable_frontend_tls11 = true + enable_triple_des_ciphers = true + } + + hostname_configuration { + proxy { + host_name = "api.terraform.io" + certificate = filebase64("testdata/api_management_api_test.pfx") + certificate_password = "terraform" + default_ssl_binding = true + negotiate_client_certificate = false + } + + proxy { + host_name = "api2.terraform.io" + certificate = filebase64("testdata/api_management_api2_test.pfx") + certificate_password = "terraform" + negotiate_client_certificate = true + } + + portal { + host_name = "portal.terraform.io" + certificate = filebase64("testdata/api_management_portal_test.pfx") + certificate_password = "terraform" + } + + developer_portal { + host_name = "developer-portal.terraform.io" + certificate = filebase64("testdata/api_management_developer_portal_test.pfx") + } + } + + sku_name = "Premium_1" + + tags = { + "Acceptance" = "Test" + } + + location = azurerm_resource_group.test1.location + resource_group_name = azurerm_resource_group.test1.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.Locations.Ternary, data.RandomInteger) +} + +func (ApiManagementResource) virtualNetworkTemplate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestVNET-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "acctestSNET-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_network_security_group" "test" { + name = "acctest-NSG-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet_network_security_group_association" "test" { + subnet_id = azurerm_subnet.test.id + network_security_group_id = azurerm_network_security_group.test.id +} + +resource "azurerm_network_security_rule" "port_3443" { + name = "Port_3443" + priority = 100 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = "3443" + source_address_prefix = "ApiManagement" + destination_address_prefix = "VirtualNetwork" + resource_group_name = azurerm_resource_group.test.name + network_security_group_name = azurerm_network_security_group.test.name +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r ApiManagementResource) virtualNetworkInternal(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + virtual_network_type = "Internal" + virtual_network_configuration { + subnet_id = azurerm_subnet.test.id + } +} +`, r.virtualNetworkTemplate(data), data.RandomInteger) +} + +func (r ApiManagementResource) virtualNetworkInternalAdditionalLocation(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_resource_group" "test2" { + name = "acctestRG2-%[2]d" + location = "%[3]s" +} + +// subnet2 from the second location +resource "azurerm_virtual_network" "test2" { + name = "acctestVNET2-%[2]d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + address_space = ["10.1.0.0/16"] +} + +resource "azurerm_subnet" "test2" { + name = "acctestSNET2-%[2]d" + resource_group_name = azurerm_resource_group.test2.name + virtual_network_name = azurerm_virtual_network.test2.name + address_prefix = "10.1.1.0/24" +} + +resource "azurerm_network_security_group" "test2" { + name = "acctest-NSG2-%[2]d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name +} + +resource "azurerm_subnet_network_security_group_association" "test2" { + subnet_id = azurerm_subnet.test2.id + network_security_group_id = azurerm_network_security_group.test2.id +} + +resource "azurerm_network_security_rule" "port_3443_2" { + name = "Port_3443" + priority = 100 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = "3443" + source_address_prefix = "ApiManagement" + destination_address_prefix = "VirtualNetwork" + resource_group_name = azurerm_resource_group.test2.name + network_security_group_name = azurerm_network_security_group.test2.name +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Premium_1" + + additional_location { + location = azurerm_resource_group.test2.location + virtual_network_configuration { + subnet_id = azurerm_subnet.test2.id + } + } + + virtual_network_type = "Internal" + virtual_network_configuration { + subnet_id = azurerm_subnet.test.id + } +} +`, r.virtualNetworkTemplate(data), data.RandomInteger, data.Locations.Secondary) +} + +func (ApiManagementResource) identityUserAssigned(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_user_assigned_identity" "test" { + name = "acctestUAI-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_api_management" "test" { + depends_on = [azurerm_user_assigned_identity.test] + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + identity { + type = "UserAssigned" + identity_ids = [ + azurerm_user_assigned_identity.test.id, + ] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementResource) identitySystemAssigned(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementResource) identitySystemAssignedUserAssigned(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_user_assigned_identity" "test" { + name = "acctestUAI-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" + + identity { + type = "SystemAssigned, UserAssigned" + identity_ids = [ + azurerm_user_assigned_identity.test.id, + ] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementResource) identityNone(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (ApiManagementResource) identitySystemAssignedUpdateHostnameConfigurationsTemplate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} +data "azurerm_client_config" "current" {} +resource "azurerm_key_vault" "test" { + name = "acctestKV-%[4]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" +} +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + certificate_permissions = [ + "Create", + "Delete", + "Deleteissuers", + "Get", + "Getissuers", + "Import", + "List", + "Listissuers", + "Managecontacts", + "Manageissuers", + "Setissuers", + "Update", + ] + secret_permissions = [ + "Delete", + "Get", + "List", + "Purge", + ] +} +resource "azurerm_key_vault_access_policy" "test2" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = azurerm_api_management.test.identity[0].tenant_id + object_id = azurerm_api_management.test.identity[0].principal_id + secret_permissions = [ + "Get", + "List", + ] +} +resource "azurerm_key_vault_certificate" "test" { + depends_on = [azurerm_key_vault_access_policy.test] + name = "acctestKVCert-%[3]d" + key_vault_id = azurerm_key_vault.test.id + certificate_policy { + issuer_parameters { + name = "Self" + } + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + secret_properties { + content_type = "application/x-pkcs12" + } + x509_certificate_properties { + # Server Authentication = 1.3.6.1.5.5.7.3.1 + # Client Authentication = 1.3.6.1.5.5.7.3.2 + extended_key_usage = ["1.3.6.1.5.5.7.3.1"] + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + subject_alternative_names { + dns_names = ["api.terraform.io"] + } + subject = "CN=api.terraform.io" + validity_in_months = 1 + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) +} +func (r ApiManagementResource) identitySystemAssignedUpdateHostnameConfigurationsKeyVaultId(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + identity { + type = "SystemAssigned" + } +} +`, r.identitySystemAssignedUpdateHostnameConfigurationsTemplate(data), data.RandomInteger) +} + +func (r ApiManagementResource) identitySystemAssignedUpdateHostnameConfigurationsVersionlessKeyVaultIdUpdateCD(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + identity { + type = "SystemAssigned" + } + hostname_configuration { + proxy { + host_name = "api.terraform.io" + key_vault_id = "${azurerm_key_vault.test.vault_uri}secrets/${azurerm_key_vault_certificate.test.name}" + default_ssl_binding = true + negotiate_client_certificate = false + } + } +} +`, r.identitySystemAssignedUpdateHostnameConfigurationsTemplate(data), data.RandomInteger) +} + +func (r ApiManagementResource) identitySystemAssignedUpdateHostnameConfigurationsVersionedKeyVaultIdUpdateCD(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + identity { + type = "SystemAssigned" + } + hostname_configuration { + proxy { + host_name = "api.terraform.io" + key_vault_id = azurerm_key_vault_certificate.test.secret_id + default_ssl_binding = true + negotiate_client_certificate = false + } + } +} +`, r.identitySystemAssignedUpdateHostnameConfigurationsTemplate(data), data.RandomInteger) +} + +func (ApiManagementResource) consumption(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Consumption_0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_subscription_resource.go b/azurerm/internal/services/apimanagement/api_management_subscription_resource.go index e279cf5989b8..a27da233f9c4 100644 --- a/azurerm/internal/services/apimanagement/api_management_subscription_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_subscription_resource.go @@ -16,12 +16,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementSubscription() *schema.Resource { +func resourceApiManagementSubscription() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementSubscriptionCreateUpdate, - Read: resourceArmApiManagementSubscriptionRead, - Update: resourceArmApiManagementSubscriptionCreateUpdate, - Delete: resourceArmApiManagementSubscriptionDelete, + Create: resourceApiManagementSubscriptionCreateUpdate, + Read: resourceApiManagementSubscriptionRead, + Update: resourceApiManagementSubscriptionCreateUpdate, + Delete: resourceApiManagementSubscriptionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -100,7 +100,7 @@ func resourceArmApiManagementSubscription() *schema.Resource { } } -func resourceArmApiManagementSubscriptionCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementSubscriptionCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.SubscriptionsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -162,10 +162,10 @@ func resourceArmApiManagementSubscriptionCreateUpdate(d *schema.ResourceData, me d.SetId(*resp.ID) - return resourceArmApiManagementSubscriptionRead(d, meta) + return resourceApiManagementSubscriptionRead(d, meta) } -func resourceArmApiManagementSubscriptionRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementSubscriptionRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.SubscriptionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -212,7 +212,7 @@ func resourceArmApiManagementSubscriptionRead(d *schema.ResourceData, meta inter return nil } -func resourceArmApiManagementSubscriptionDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementSubscriptionDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.SubscriptionsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_subscription_resource_test.go b/azurerm/internal/services/apimanagement/api_management_subscription_resource_test.go new file mode 100644 index 000000000000..811d5e218641 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_subscription_resource_test.go @@ -0,0 +1,243 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementSubscriptionResource struct { +} + +func TestAccApiManagementSubscription_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") + r := ApiManagementSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("allow_tracing").HasValue("true"), + check.That(data.ResourceName).Key("subscription_id").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementSubscription_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") + r := ApiManagementSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subscription_id").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementSubscription_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") + r := ApiManagementSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.update(data, "submitted", "true"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("state").HasValue("submitted"), + check.That(data.ResourceName).Key("allow_tracing").HasValue("true"), + check.That(data.ResourceName).Key("subscription_id").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + ), + }, + { + Config: r.update(data, "active", "true"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("state").HasValue("active"), + ), + }, + { + Config: r.update(data, "suspended", "true"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("state").HasValue("suspended"), + ), + }, + { + Config: r.update(data, "cancelled", "true"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("state").HasValue("cancelled"), + ), + }, + { + Config: r.update(data, "active", "false"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("allow_tracing").HasValue("false"), + ), + }, + }) +} + +func TestAccApiManagementSubscription_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") + r := ApiManagementSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("state").HasValue("active"), + check.That(data.ResourceName).Key("allow_tracing").HasValue("false"), + check.That(data.ResourceName).Key("subscription_id").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (t ApiManagementSubscriptionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + subscriptionId := id.Path["subscriptions"] + + resp, err := clients.ApiManagement.SubscriptionsClient.Get(ctx, resourceGroup, serviceName, subscriptionId) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement Subscription (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementSubscriptionResource) basic(data acceptance.TestData) string { + template := ApiManagementSubscriptionResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_subscription" "test" { + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + user_id = azurerm_api_management_user.test.id + product_id = azurerm_api_management_product.test.id + display_name = "Butter Parser API Enterprise Edition" +} +`, template) +} + +func (r ApiManagementSubscriptionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_subscription" "import" { + subscription_id = azurerm_api_management_subscription.test.subscription_id + resource_group_name = azurerm_api_management_subscription.test.resource_group_name + api_management_name = azurerm_api_management_subscription.test.api_management_name + user_id = azurerm_api_management_subscription.test.user_id + product_id = azurerm_api_management_subscription.test.product_id + display_name = azurerm_api_management_subscription.test.display_name +} +`, r.basic(data)) +} + +func (r ApiManagementSubscriptionResource) update(data acceptance.TestData, state string, allow_tracing string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_subscription" "test" { + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + user_id = azurerm_api_management_user.test.id + product_id = azurerm_api_management_product.test.id + display_name = "Butter Parser API Enterprise Edition" + state = "%s" + allow_tracing = "%s" +} +`, r.template(data), state, allow_tracing) +} + +func (r ApiManagementSubscriptionResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_subscription" "test" { + resource_group_name = azurerm_api_management.test.resource_group_name + api_management_name = azurerm_api_management.test.name + user_id = azurerm_api_management_user.test.id + product_id = azurerm_api_management_product.test.id + display_name = "Butter Parser API Enterprise Edition" + state = "active" + allow_tracing = "false" +} +`, r.template(data)) +} + +func (ApiManagementSubscriptionResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} + +resource "azurerm_api_management_product" "test" { + product_id = "test-product" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + display_name = "Test Product" + subscription_required = true + approval_required = false + published = true +} + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_user_data_source.go b/azurerm/internal/services/apimanagement/api_management_user_data_source.go index d4ec762cc02c..2aa97cfd123c 100644 --- a/azurerm/internal/services/apimanagement/api_management_user_data_source.go +++ b/azurerm/internal/services/apimanagement/api_management_user_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmApiManagementUser() *schema.Resource { +func dataSourceApiManagementUser() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmApiManagementUserRead, + Read: dataSourceApiManagementUserRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -54,7 +54,7 @@ func dataSourceArmApiManagementUser() *schema.Resource { } } -func dataSourceArmApiManagementUserRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceApiManagementUserRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.UsersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_user_data_source_test.go b/azurerm/internal/services/apimanagement/api_management_user_data_source_test.go new file mode 100644 index 000000000000..f58a7d9609c8 --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_user_data_source_test.go @@ -0,0 +1,71 @@ +package apimanagement_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ApiManagementUserDataSource struct { +} + +func TestAccDataSourceApiManagementUser_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_api_management_user", "test") + r := ApiManagementUserDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("user_id").HasValue("test-user"), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance"), + check.That(data.ResourceName).Key("last_name").HasValue("Test"), + check.That(data.ResourceName).Key("email").HasValue(fmt.Sprintf("azure-acctest%d@example.com", data.RandomInteger)), + check.That(data.ResourceName).Key("state").HasValue("active"), + check.That(data.ResourceName).Key("note").HasValue("Used for testing in dimension C-137."), + ), + }, + }) +} + +func (ApiManagementUserDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "amtestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + publisher_name = "pub1" + publisher_email = "pub1@email.com" + sku_name = "Developer_1" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_api_management_user" "test" { + user_id = "test-user" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" + state = "active" + note = "Used for testing in dimension C-137." +} + +data "azurerm_api_management_user" "test" { + user_id = azurerm_api_management_user.test.user_id + api_management_name = azurerm_api_management_user.test.api_management_name + resource_group_name = azurerm_api_management_user.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/api_management_user_resource.go b/azurerm/internal/services/apimanagement/api_management_user_resource.go index bd2bd65cf4cf..77e75f917457 100644 --- a/azurerm/internal/services/apimanagement/api_management_user_resource.go +++ b/azurerm/internal/services/apimanagement/api_management_user_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApiManagementUser() *schema.Resource { +func resourceApiManagementUser() *schema.Resource { return &schema.Resource{ - Create: resourceArmApiManagementUserCreateUpdate, - Read: resourceArmApiManagementUserRead, - Update: resourceArmApiManagementUserCreateUpdate, - Delete: resourceArmApiManagementUserDelete, + Create: resourceApiManagementUserCreateUpdate, + Read: resourceApiManagementUserRead, + Update: resourceApiManagementUserCreateUpdate, + Delete: resourceApiManagementUserDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -92,7 +92,7 @@ func resourceArmApiManagementUser() *schema.Resource { } } -func resourceArmApiManagementUserCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementUserCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.UsersClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -161,10 +161,10 @@ func resourceArmApiManagementUserCreateUpdate(d *schema.ResourceData, meta inter d.SetId(*read.ID) - return resourceArmApiManagementUserRead(d, meta) + return resourceApiManagementUserRead(d, meta) } -func resourceArmApiManagementUserRead(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementUserRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.UsersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -204,7 +204,7 @@ func resourceArmApiManagementUserRead(d *schema.ResourceData, meta interface{}) return nil } -func resourceArmApiManagementUserDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApiManagementUserDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ApiManagement.UsersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/apimanagement/api_management_user_resource_test.go b/azurerm/internal/services/apimanagement/api_management_user_resource_test.go new file mode 100644 index 000000000000..2c5f1fdbbd8f --- /dev/null +++ b/azurerm/internal/services/apimanagement/api_management_user_resource_test.go @@ -0,0 +1,352 @@ +package apimanagement_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ApiManagementUserResource struct { +} + +func TestAccApiManagementUser_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance"), + check.That(data.ResourceName).Key("last_name").HasValue("Test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApiManagementUser_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccApiManagementUser_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance"), + check.That(data.ResourceName).Key("last_name").HasValue("Test"), + check.That(data.ResourceName).Key("state").HasValue("active"), + ), + }, + { + Config: r.updatedBlocked(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance Updated"), + check.That(data.ResourceName).Key("last_name").HasValue("Test Updated"), + check.That(data.ResourceName).Key("state").HasValue("blocked"), + ), + }, + { + Config: r.updatedActive(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance"), + check.That(data.ResourceName).Key("last_name").HasValue("Test"), + check.That(data.ResourceName).Key("state").HasValue("active"), + ), + }, + }) +} + +func TestAccApiManagementUser_password(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.password(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance"), + check.That(data.ResourceName).Key("last_name").HasValue("Test"), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + }) +} + +func TestAccApiManagementUser_invite(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.invited(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned + "confirmation", + }, + }, + }) +} + +func TestAccApiManagementUser_signup(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.signUp(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned + "confirmation", + }, + }, + }) +} + +func TestAccApiManagementUser_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") + r := ApiManagementUserResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("first_name").HasValue("Acceptance"), + check.That(data.ResourceName).Key("last_name").HasValue("Test"), + check.That(data.ResourceName).Key("note").HasValue("Used for testing in dimension C-137."), + ), + }, + { + ResourceName: data.ResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned + "confirmation", + }, + }, + }) +} + +func (t ApiManagementUserResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serviceName := id.Path["service"] + userId := id.Path["users"] + + resp, err := clients.ApiManagement.UsersClient.Get(ctx, resourceGroup, serviceName, userId) + if err != nil { + return nil, fmt.Errorf("reading ApiManagement User (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (ApiManagementUserResource) basic(data acceptance.TestData) string { + template := ApiManagementUserResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" +} +`, template, data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementUserResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "import" { + user_id = azurerm_api_management_user.test.user_id + api_management_name = azurerm_api_management_user.test.api_management_name + resource_group_name = azurerm_api_management_user.test.resource_group_name + first_name = azurerm_api_management_user.test.first_name + last_name = azurerm_api_management_user.test.last_name + email = azurerm_api_management_user.test.email + state = azurerm_api_management_user.test.state +} +`, r.basic(data)) +} + +func (r ApiManagementUserResource) password(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" + state = "active" + password = "3991bb15-282d-4b9b-9de3-3d5fc89eb530" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementUserResource) updatedActive(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" + state = "active" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementUserResource) updatedBlocked(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance Updated" + last_name = "Test Updated" + email = "azure-acctest%d@example.com" + state = "blocked" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementUserResource) invited(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test User" + email = "azure-acctest%d@example.com" + state = "blocked" + confirmation = "invite" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementUserResource) signUp(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test User" + email = "azure-acctest%d@example.com" + state = "blocked" + confirmation = "signup" +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r ApiManagementUserResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_api_management_user" "test" { + user_id = "acctestuser%d" + api_management_name = azurerm_api_management.test.name + resource_group_name = azurerm_resource_group.test.name + first_name = "Acceptance" + last_name = "Test" + email = "azure-acctest%d@example.com" + state = "active" + confirmation = "signup" + note = "Used for testing in dimension C-137." +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (ApiManagementUserResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_api_management" "test" { + name = "acctestAM-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + publisher_name = "pub1" + publisher_email = "pub1@email.com" + + sku_name = "Developer_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/apimanagement/tests/diff_suppress_test.go b/azurerm/internal/services/apimanagement/diff_suppress_test.go similarity index 99% rename from azurerm/internal/services/apimanagement/tests/diff_suppress_test.go rename to azurerm/internal/services/apimanagement/diff_suppress_test.go index 93c25aaafa4e..6e58c41d5f80 100644 --- a/azurerm/internal/services/apimanagement/tests/diff_suppress_test.go +++ b/azurerm/internal/services/apimanagement/diff_suppress_test.go @@ -1,4 +1,4 @@ -package tests +package apimanagement_test import ( "log" diff --git a/azurerm/internal/services/apimanagement/parse/api_diagnostic.go b/azurerm/internal/services/apimanagement/parse/api_diagnostic.go new file mode 100644 index 000000000000..a6b60667f93e --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/api_diagnostic.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ApiDiagnosticId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + ApiName string + DiagnosticName string +} + +func NewApiDiagnosticID(subscriptionId, resourceGroup, serviceName, apiName, diagnosticName string) ApiDiagnosticId { + return ApiDiagnosticId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + ApiName: apiName, + DiagnosticName: diagnosticName, + } +} + +func (id ApiDiagnosticId) String() string { + segments := []string{ + fmt.Sprintf("Diagnostic Name %q", id.DiagnosticName), + fmt.Sprintf("Api Name %q", id.ApiName), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Api Diagnostic", segmentsStr) +} + +func (id ApiDiagnosticId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s/apis/%s/diagnostics/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.ApiName, id.DiagnosticName) +} + +// ApiDiagnosticID parses a ApiDiagnostic ID into an ApiDiagnosticId struct +func ApiDiagnosticID(input string) (*ApiDiagnosticId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApiDiagnosticId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + if resourceId.ApiName, err = id.PopSegment("apis"); err != nil { + return nil, err + } + if resourceId.DiagnosticName, err = id.PopSegment("diagnostics"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/api_diagnostic_id.go b/azurerm/internal/services/apimanagement/parse/api_diagnostic_id.go deleted file mode 100644 index 3fa26e480ae1..000000000000 --- a/azurerm/internal/services/apimanagement/parse/api_diagnostic_id.go +++ /dev/null @@ -1,43 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type ApiManagementApiDiagnosticId struct { - ResourceGroup string - ServiceName string - ApiName string - Name string -} - -func ApiManagementApiDiagnosticID(input string) (*ApiManagementApiDiagnosticId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Api Management Diagnostic ID %q: %+v", input, err) - } - - diagnostic := ApiManagementApiDiagnosticId{ - ResourceGroup: id.ResourceGroup, - } - - if diagnostic.ServiceName, err = id.PopSegment("service"); err != nil { - return nil, err - } - - if diagnostic.ApiName, err = id.PopSegment("apis"); err != nil { - return nil, err - } - - if diagnostic.Name, err = id.PopSegment("diagnostics"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &diagnostic, nil -} diff --git a/azurerm/internal/services/apimanagement/parse/api_diagnostic_id_test.go b/azurerm/internal/services/apimanagement/parse/api_diagnostic_id_test.go deleted file mode 100644 index d748c67d0846..000000000000 --- a/azurerm/internal/services/apimanagement/parse/api_diagnostic_id_test.go +++ /dev/null @@ -1,111 +0,0 @@ -package parse - -import "testing" - -func TestApiManagementApiDiagnosticID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *ApiManagementApiDiagnosticId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Service Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", - Expected: nil, - }, - { - Name: "Missing APIs", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1", - Expected: nil, - }, - { - Name: "Missing APIs Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis", - Expected: nil, - }, - { - Name: "Missing Diagnostics", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1", - Expected: nil, - }, - { - Name: "Missing Diagnostics Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics", - Expected: nil, - }, - { - Name: "Diagnostic ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/applicationinsights", - Expected: &ApiManagementApiDiagnosticId{ - Name: "applicationinsights", - ApiName: "api1", - ServiceName: "service1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/APIs/api1/diagnostics/applicationinsights", - Expected: nil, - }, - { - Name: "From ACC test", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/acctestRG-200904094049117016/providers/Microsoft.ApiManagement/service/acctestAM-200904094049117016/apis/acctestAMA-200904094049117016/diagnostics/applicationinsights", - Expected: &ApiManagementApiDiagnosticId{ - Name: "applicationinsights", - ApiName: "acctestAMA-200904094049117016", - ServiceName: "acctestAM-200904094049117016", - ResourceGroup: "acctestRG-200904094049117016", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ApiManagementApiDiagnosticID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ApiName != v.Expected.ApiName { - t.Fatalf("Expected %q but got %q for API Name", v.Expected.ApiName, actual.ApiName) - } - - if actual.ServiceName != v.Expected.ServiceName { - t.Fatalf("Expected %q but got %q for Service Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/apimanagement/parse/api_diagnostic_test.go b/azurerm/internal/services/apimanagement/parse/api_diagnostic_test.go new file mode 100644 index 000000000000..629523f42518 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/api_diagnostic_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ApiDiagnosticId{} + +func TestApiDiagnosticIDFormatter(t *testing.T) { + actual := NewApiDiagnosticID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "api1", "diagnostic1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/diagnostic1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestApiDiagnosticID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApiDiagnosticId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // missing ApiName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Error: true, + }, + + { + // missing value for ApiName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/", + Error: true, + }, + + { + // missing DiagnosticName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/", + Error: true, + }, + + { + // missing value for DiagnosticName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/diagnostic1", + Expected: &ApiDiagnosticId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + ApiName: "api1", + DiagnosticName: "diagnostic1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/APIS/API1/DIAGNOSTICS/DIAGNOSTIC1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApiDiagnosticID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.ApiName != v.Expected.ApiName { + t.Fatalf("Expected %q but got %q for ApiName", v.Expected.ApiName, actual.ApiName) + } + if actual.DiagnosticName != v.Expected.DiagnosticName { + t.Fatalf("Expected %q but got %q for DiagnosticName", v.Expected.DiagnosticName, actual.DiagnosticName) + } + } +} diff --git a/azurerm/internal/services/apimanagement/parse/api_management.go b/azurerm/internal/services/apimanagement/parse/api_management.go new file mode 100644 index 000000000000..b418790e767c --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/api_management.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ApiManagementId struct { + SubscriptionId string + ResourceGroup string + ServiceName string +} + +func NewApiManagementID(subscriptionId, resourceGroup, serviceName string) ApiManagementId { + return ApiManagementId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + } +} + +func (id ApiManagementId) String() string { + segments := []string{ + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Api Management", segmentsStr) +} + +func (id ApiManagementId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName) +} + +// ApiManagementID parses a ApiManagement ID into an ApiManagementId struct +func ApiManagementID(input string) (*ApiManagementId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApiManagementId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/api_management_test.go b/azurerm/internal/services/apimanagement/parse/api_management_test.go new file mode 100644 index 000000000000..c70c19fdc6b1 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/api_management_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ApiManagementId{} + +func TestApiManagementIDFormatter(t *testing.T) { + actual := NewApiManagementID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestApiManagementID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApiManagementId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1", + Expected: &ApiManagementId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApiManagementID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + } +} diff --git a/azurerm/internal/services/apimanagement/parse/api_version_set.go b/azurerm/internal/services/apimanagement/parse/api_version_set.go new file mode 100644 index 000000000000..988bc2b870b0 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/api_version_set.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ApiVersionSetId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + Name string +} + +func NewApiVersionSetID(subscriptionId, resourceGroup, serviceName, name string) ApiVersionSetId { + return ApiVersionSetId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + Name: name, + } +} + +func (id ApiVersionSetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Api Version Set", segmentsStr) +} + +func (id ApiVersionSetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s/apiVersionSets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.Name) +} + +// ApiVersionSetID parses a ApiVersionSet ID into an ApiVersionSetId struct +func ApiVersionSetID(input string) (*ApiVersionSetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApiVersionSetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("apiVersionSets"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/api_version_set_id.go b/azurerm/internal/services/apimanagement/parse/api_version_set_id.go deleted file mode 100644 index 4341ae7a29e0..000000000000 --- a/azurerm/internal/services/apimanagement/parse/api_version_set_id.go +++ /dev/null @@ -1,34 +0,0 @@ -package parse - -import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - -type ApiVersionSetId struct { - ResourceGroup string - ServiceName string - Name string -} - -func APIVersionSetID(input string) (*ApiVersionSetId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - logger := ApiVersionSetId{ - ResourceGroup: id.ResourceGroup, - } - - if logger.ServiceName, err = id.PopSegment("service"); err != nil { - return nil, err - } - - if logger.Name, err = id.PopSegment("apiVersionSets"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &logger, nil -} diff --git a/azurerm/internal/services/apimanagement/parse/api_version_set_id_test.go b/azurerm/internal/services/apimanagement/parse/api_version_set_id_test.go deleted file mode 100644 index ec41e6e8fca0..000000000000 --- a/azurerm/internal/services/apimanagement/parse/api_version_set_id_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package parse - -import "testing" - -func TestApiVersionSetID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *ApiVersionSetId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Service Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", - Expected: nil, - }, - { - Name: "Missing Diagnostic", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1", - Expected: nil, - }, - { - Name: "Missing Diagnostic Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets", - Expected: nil, - }, - { - Name: "Diagnostic ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/set1", - Expected: &ApiVersionSetId{ - Name: "set1", - ServiceName: "service1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/ApiVersionSets/set1", - Expected: nil, - }, - { - Name: "Legacy ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/api-version-sets/set1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := APIVersionSetID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ServiceName != v.Expected.ServiceName { - t.Fatalf("Expected %q but got %q for Service Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/apimanagement/parse/api_version_set_test.go b/azurerm/internal/services/apimanagement/parse/api_version_set_test.go new file mode 100644 index 000000000000..5462ebabb042 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/api_version_set_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ApiVersionSetId{} + +func TestApiVersionSetIDFormatter(t *testing.T) { + actual := NewApiVersionSetID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "apiVersionSet1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/apiVersionSet1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestApiVersionSetID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApiVersionSetId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/apiVersionSet1", + Expected: &ApiVersionSetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + Name: "apiVersionSet1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/APIVERSIONSETS/APIVERSIONSET1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApiVersionSetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/apimanagement/parse/custom_domain.go b/azurerm/internal/services/apimanagement/parse/custom_domain.go new file mode 100644 index 000000000000..7bd9ac7e1a76 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/custom_domain.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CustomDomainId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + Name string +} + +func NewCustomDomainID(subscriptionId, resourceGroup, serviceName, name string) CustomDomainId { + return CustomDomainId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + Name: name, + } +} + +func (id CustomDomainId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Custom Domain", segmentsStr) +} + +func (id CustomDomainId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s/customDomains/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.Name) +} + +// CustomDomainID parses a CustomDomain ID into an CustomDomainId struct +func CustomDomainID(input string) (*CustomDomainId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CustomDomainId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("customDomains"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/custom_domain_test.go b/azurerm/internal/services/apimanagement/parse/custom_domain_test.go new file mode 100644 index 000000000000..b71c29c098ad --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/custom_domain_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CustomDomainId{} + +func TestCustomDomainIDFormatter(t *testing.T) { + actual := NewCustomDomainID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "customdomain").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/customDomains/customdomain" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCustomDomainID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CustomDomainId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/customDomains/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/customDomains/customdomain", + Expected: &CustomDomainId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + Name: "customdomain", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/CUSTOMDOMAINS/CUSTOMDOMAIN", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CustomDomainID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/apimanagement/parse/diagnostic.go b/azurerm/internal/services/apimanagement/parse/diagnostic.go new file mode 100644 index 000000000000..f04aac4f5c70 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/diagnostic.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DiagnosticId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + Name string +} + +func NewDiagnosticID(subscriptionId, resourceGroup, serviceName, name string) DiagnosticId { + return DiagnosticId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + Name: name, + } +} + +func (id DiagnosticId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Diagnostic", segmentsStr) +} + +func (id DiagnosticId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s/diagnostics/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.Name) +} + +// DiagnosticID parses a Diagnostic ID into an DiagnosticId struct +func DiagnosticID(input string) (*DiagnosticId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DiagnosticId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("diagnostics"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/diagnostic_id.go b/azurerm/internal/services/apimanagement/parse/diagnostic_id.go deleted file mode 100644 index 553489d3b997..000000000000 --- a/azurerm/internal/services/apimanagement/parse/diagnostic_id.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type ApiManagementDiagnosticId struct { - ResourceGroup string - ServiceName string - Name string -} - -func ApiManagementDiagnosticID(input string) (*ApiManagementDiagnosticId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Api Management Diagnostic ID %q: %+v", input, err) - } - - diagnostic := ApiManagementDiagnosticId{ - ResourceGroup: id.ResourceGroup, - } - - if diagnostic.ServiceName, err = id.PopSegment("service"); err != nil { - return nil, err - } - - if diagnostic.Name, err = id.PopSegment("diagnostics"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &diagnostic, nil -} diff --git a/azurerm/internal/services/apimanagement/parse/diagnostic_id_test.go b/azurerm/internal/services/apimanagement/parse/diagnostic_id_test.go deleted file mode 100644 index d18ba80b06fe..000000000000 --- a/azurerm/internal/services/apimanagement/parse/diagnostic_id_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import "testing" - -func TestApiManagementDiagnosticID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *ApiManagementDiagnosticId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Service Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", - Expected: nil, - }, - { - Name: "Missing Diagnostic", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1", - Expected: nil, - }, - { - Name: "Missing Diagnostic Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics", - Expected: nil, - }, - { - Name: "Diagnostic ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/diagnostic1", - Expected: &ApiManagementDiagnosticId{ - Name: "diagnostic1", - ServiceName: "service1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/Diagnostics/diagnostic1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ApiManagementDiagnosticID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ServiceName != v.Expected.ServiceName { - t.Fatalf("Expected %q but got %q for Service Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/apimanagement/parse/diagnostic_test.go b/azurerm/internal/services/apimanagement/parse/diagnostic_test.go new file mode 100644 index 000000000000..dc97b03872f9 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/diagnostic_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DiagnosticId{} + +func TestDiagnosticIDFormatter(t *testing.T) { + actual := NewDiagnosticID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "diagnostic1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/diagnostic1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDiagnosticID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DiagnosticId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/diagnostic1", + Expected: &DiagnosticId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + Name: "diagnostic1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/DIAGNOSTICS/DIAGNOSTIC1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DiagnosticID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/apimanagement/parse/logger.go b/azurerm/internal/services/apimanagement/parse/logger.go new file mode 100644 index 000000000000..5abbeef85cb8 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/logger.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoggerId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + Name string +} + +func NewLoggerID(subscriptionId, resourceGroup, serviceName, name string) LoggerId { + return LoggerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + Name: name, + } +} + +func (id LoggerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Logger", segmentsStr) +} + +func (id LoggerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s/loggers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.Name) +} + +// LoggerID parses a Logger ID into an LoggerId struct +func LoggerID(input string) (*LoggerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoggerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("loggers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/logger_id.go b/azurerm/internal/services/apimanagement/parse/logger_id.go deleted file mode 100644 index 58f5b8648adf..000000000000 --- a/azurerm/internal/services/apimanagement/parse/logger_id.go +++ /dev/null @@ -1,36 +0,0 @@ -package parse - -import ( - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type ApiManagementLoggerId struct { - ResourceGroup string - ServiceName string - Name string -} - -func ApiManagementLoggerID(input string) (*ApiManagementLoggerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - logger := ApiManagementLoggerId{ - ResourceGroup: id.ResourceGroup, - } - - if logger.ServiceName, err = id.PopSegment("service"); err != nil { - return nil, err - } - - if logger.Name, err = id.PopSegment("loggers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &logger, nil -} diff --git a/azurerm/internal/services/apimanagement/parse/logger_id_test.go b/azurerm/internal/services/apimanagement/parse/logger_id_test.go deleted file mode 100644 index bc01b629dcbb..000000000000 --- a/azurerm/internal/services/apimanagement/parse/logger_id_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import "testing" - -func TestApiManagementLoggerID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *ApiManagementLoggerId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Service Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", - Expected: nil, - }, - { - Name: "Missing Logger", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1", - Expected: nil, - }, - { - Name: "Missing Logger Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers", - Expected: nil, - }, - { - Name: "Logger ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/logger1", - Expected: &ApiManagementLoggerId{ - Name: "logger1", - ServiceName: "service1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/Loggers/logger1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ApiManagementLoggerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ServiceName != v.Expected.ServiceName { - t.Fatalf("Expected %q but got %q for Service Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/apimanagement/parse/logger_test.go b/azurerm/internal/services/apimanagement/parse/logger_test.go new file mode 100644 index 000000000000..f7e85d4ffeb0 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/logger_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoggerId{} + +func TestLoggerIDFormatter(t *testing.T) { + actual := NewLoggerID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "logger1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/logger1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoggerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoggerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/logger1", + Expected: &LoggerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + Name: "logger1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/LOGGERS/LOGGER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoggerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/apimanagement/parse/policy.go b/azurerm/internal/services/apimanagement/parse/policy.go new file mode 100644 index 000000000000..fede00a0878a --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/policy.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PolicyId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + Name string +} + +func NewPolicyID(subscriptionId, resourceGroup, serviceName, name string) PolicyId { + return PolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + Name: name, + } +} + +func (id PolicyId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Policy", segmentsStr) +} + +func (id PolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ApiManagement/service/%s/policies/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.Name) +} + +// PolicyID parses a Policy ID into an PolicyId struct +func PolicyID(input string) (*PolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("service"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("policies"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/apimanagement/parse/policy_test.go b/azurerm/internal/services/apimanagement/parse/policy_test.go new file mode 100644 index 000000000000..5ba54a3a2d28 --- /dev/null +++ b/azurerm/internal/services/apimanagement/parse/policy_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PolicyId{} + +func TestPolicyIDFormatter(t *testing.T) { + actual := NewPolicyID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "policy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/policies/policy1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/policies/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/policies/policy1", + Expected: &PolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + Name: "policy1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/POLICIES/POLICY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/apimanagement/registration.go b/azurerm/internal/services/apimanagement/registration.go index 1c1160925d1e..c768ce88c945 100644 --- a/azurerm/internal/services/apimanagement/registration.go +++ b/azurerm/internal/services/apimanagement/registration.go @@ -26,41 +26,43 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { "azurerm_api_management_api_version_set": dataSourceApiManagementApiVersionSet(), "azurerm_api_management_group": dataSourceApiManagementGroup(), "azurerm_api_management_product": dataSourceApiManagementProduct(), - "azurerm_api_management_user": dataSourceArmApiManagementUser(), + "azurerm_api_management_user": dataSourceApiManagementUser(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_api_management": resourceArmApiManagementService(), - "azurerm_api_management_api": resourceArmApiManagementApi(), - "azurerm_api_management_api_diagnostic": resourceArmApiManagementApiDiagnostic(), - "azurerm_api_management_api_operation": resourceArmApiManagementApiOperation(), - "azurerm_api_management_api_operation_policy": resourceArmApiManagementApiOperationPolicy(), - "azurerm_api_management_api_policy": resourceArmApiManagementApiPolicy(), - "azurerm_api_management_api_schema": resourceArmApiManagementApiSchema(), - "azurerm_api_management_api_version_set": resourceArmApiManagementApiVersionSet(), - "azurerm_api_management_authorization_server": resourceArmApiManagementAuthorizationServer(), - "azurerm_api_management_backend": resourceArmApiManagementBackend(), - "azurerm_api_management_certificate": resourceArmApiManagementCertificate(), - "azurerm_api_management_diagnostic": resourceArmApiManagementDiagnostic(), - "azurerm_api_management_group": resourceArmApiManagementGroup(), - "azurerm_api_management_group_user": resourceArmApiManagementGroupUser(), - "azurerm_api_management_identity_provider_aad": resourceArmApiManagementIdentityProviderAAD(), - "azurerm_api_management_identity_provider_facebook": resourceArmApiManagementIdentityProviderFacebook(), - "azurerm_api_management_identity_provider_google": resourceArmApiManagementIdentityProviderGoogle(), - "azurerm_api_management_identity_provider_microsoft": resourceArmApiManagementIdentityProviderMicrosoft(), - "azurerm_api_management_identity_provider_twitter": resourceArmApiManagementIdentityProviderTwitter(), - "azurerm_api_management_logger": resourceArmApiManagementLogger(), - "azurerm_api_management_named_value": resourceArmApiManagementNamedValue(), - "azurerm_api_management_openid_connect_provider": resourceArmApiManagementOpenIDConnectProvider(), - "azurerm_api_management_product": resourceArmApiManagementProduct(), - "azurerm_api_management_product_api": resourceArmApiManagementProductApi(), - "azurerm_api_management_product_group": resourceArmApiManagementProductGroup(), - "azurerm_api_management_product_policy": resourceArmApiManagementProductPolicy(), - "azurerm_api_management_property": resourceArmApiManagementProperty(), - "azurerm_api_management_subscription": resourceArmApiManagementSubscription(), - "azurerm_api_management_user": resourceArmApiManagementUser(), + "azurerm_api_management": resourceApiManagementService(), + "azurerm_api_management_api": resourceApiManagementApi(), + "azurerm_api_management_api_diagnostic": resourceApiManagementApiDiagnostic(), + "azurerm_api_management_api_operation": resourceApiManagementApiOperation(), + "azurerm_api_management_api_operation_policy": resourceApiManagementApiOperationPolicy(), + "azurerm_api_management_api_policy": resourceApiManagementApiPolicy(), + "azurerm_api_management_api_schema": resourceApiManagementApiSchema(), + "azurerm_api_management_api_version_set": resourceApiManagementApiVersionSet(), + "azurerm_api_management_authorization_server": resourceApiManagementAuthorizationServer(), + "azurerm_api_management_backend": resourceApiManagementBackend(), + "azurerm_api_management_certificate": resourceApiManagementCertificate(), + "azurerm_api_management_custom_domain": resourceApiManagementCustomDomain(), + "azurerm_api_management_diagnostic": resourceApiManagementDiagnostic(), + "azurerm_api_management_group": resourceApiManagementGroup(), + "azurerm_api_management_group_user": resourceApiManagementGroupUser(), + "azurerm_api_management_identity_provider_aad": resourceApiManagementIdentityProviderAAD(), + "azurerm_api_management_identity_provider_facebook": resourceApiManagementIdentityProviderFacebook(), + "azurerm_api_management_identity_provider_google": resourceApiManagementIdentityProviderGoogle(), + "azurerm_api_management_identity_provider_microsoft": resourceApiManagementIdentityProviderMicrosoft(), + "azurerm_api_management_identity_provider_twitter": resourceApiManagementIdentityProviderTwitter(), + "azurerm_api_management_logger": resourceApiManagementLogger(), + "azurerm_api_management_named_value": resourceApiManagementNamedValue(), + "azurerm_api_management_openid_connect_provider": resourceApiManagementOpenIDConnectProvider(), + "azurerm_api_management_policy": resourceApiManagementPolicy(), + "azurerm_api_management_product": resourceApiManagementProduct(), + "azurerm_api_management_product_api": resourceApiManagementProductApi(), + "azurerm_api_management_product_group": resourceApiManagementProductGroup(), + "azurerm_api_management_product_policy": resourceApiManagementProductPolicy(), + "azurerm_api_management_property": resourceApiManagementProperty(), + "azurerm_api_management_subscription": resourceApiManagementSubscription(), + "azurerm_api_management_user": resourceApiManagementUser(), } } diff --git a/azurerm/internal/services/apimanagement/resourceids.go b/azurerm/internal/services/apimanagement/resourceids.go new file mode 100644 index 000000000000..f08c896a1361 --- /dev/null +++ b/azurerm/internal/services/apimanagement/resourceids.go @@ -0,0 +1,9 @@ +package apimanagement + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ApiDiagnostic -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/diagnostic1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ApiManagement -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ApiVersionSet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/apiVersionSet1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=CustomDomain -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/customDomains/customdomain +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Diagnostic -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/diagnostic1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Logger -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/logger1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Policy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/policies/policy1 diff --git a/azurerm/internal/services/apimanagement/schema.go b/azurerm/internal/services/apimanagement/schema.go new file mode 100644 index 000000000000..2cfff58efbd8 --- /dev/null +++ b/azurerm/internal/services/apimanagement/schema.go @@ -0,0 +1,58 @@ +package apimanagement + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" +) + +func apiManagementResourceHostnameSchema() map[string]*schema.Schema { + return map[string]*schema.Schema{ + "host_name": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "key_vault_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: azure.ValidateKeyVaultChildIdVersionOptional, + }, + + "certificate": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "certificate_password": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "negotiate_client_certificate": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + } +} + +func apiManagementResourceHostnameProxySchema() map[string]*schema.Schema { + hostnameSchema := apiManagementResourceHostnameSchema() + + hostnameSchema["default_ssl_binding"] = &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Computed: true, // Azure has certain logic to set this, which we cannot predict + } + + return hostnameSchema +} diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_api2_test.pfx b/azurerm/internal/services/apimanagement/testdata/api_management_api2_test.pfx similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_api2_test.pfx rename to azurerm/internal/services/apimanagement/testdata/api_management_api2_test.pfx diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_api_operation_policy.xml b/azurerm/internal/services/apimanagement/testdata/api_management_api_operation_policy.xml similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_api_operation_policy.xml rename to azurerm/internal/services/apimanagement/testdata/api_management_api_operation_policy.xml diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_api_schema.xml b/azurerm/internal/services/apimanagement/testdata/api_management_api_schema.xml similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_api_schema.xml rename to azurerm/internal/services/apimanagement/testdata/api_management_api_schema.xml diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_api_swagger.json b/azurerm/internal/services/apimanagement/testdata/api_management_api_swagger.json similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_api_swagger.json rename to azurerm/internal/services/apimanagement/testdata/api_management_api_swagger.json diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_api_test.pfx b/azurerm/internal/services/apimanagement/testdata/api_management_api_test.pfx similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_api_test.pfx rename to azurerm/internal/services/apimanagement/testdata/api_management_api_test.pfx diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_api_wsdl.xml b/azurerm/internal/services/apimanagement/testdata/api_management_api_wsdl.xml similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_api_wsdl.xml rename to azurerm/internal/services/apimanagement/testdata/api_management_api_wsdl.xml diff --git a/azurerm/internal/services/apimanagement/testdata/api_management_developer_portal_test.pfx b/azurerm/internal/services/apimanagement/testdata/api_management_developer_portal_test.pfx new file mode 100644 index 000000000000..c86d2089c0fd Binary files /dev/null and b/azurerm/internal/services/apimanagement/testdata/api_management_developer_portal_test.pfx differ diff --git a/azurerm/internal/services/apimanagement/testdata/api_management_policy_test.xml b/azurerm/internal/services/apimanagement/testdata/api_management_policy_test.xml new file mode 100644 index 000000000000..03b2893c3e66 --- /dev/null +++ b/azurerm/internal/services/apimanagement/testdata/api_management_policy_test.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_portal_test.pfx b/azurerm/internal/services/apimanagement/testdata/api_management_portal_test.pfx similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_portal_test.pfx rename to azurerm/internal/services/apimanagement/testdata/api_management_portal_test.pfx diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_testdata.sh b/azurerm/internal/services/apimanagement/testdata/api_management_testdata.sh similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/api_management_testdata.sh rename to azurerm/internal/services/apimanagement/testdata/api_management_testdata.sh diff --git a/azurerm/internal/services/apimanagement/tests/testdata/keyvaultcert.pfx b/azurerm/internal/services/apimanagement/testdata/keyvaultcert.pfx similarity index 100% rename from azurerm/internal/services/apimanagement/tests/testdata/keyvaultcert.pfx rename to azurerm/internal/services/apimanagement/testdata/keyvaultcert.pfx diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_data_source_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_data_source_test.go deleted file mode 100644 index bf7d3bf5f818..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_data_source_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMApiManagementApi_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagementApi_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "api1"), - resource.TestCheckResourceAttr(data.ResourceName, "path", "api1"), - resource.TestCheckResourceAttr(data.ResourceName, "protocols.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "protocols.0", "https"), - resource.TestCheckResourceAttr(data.ResourceName, "soap_pass_through", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "is_current", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "is_online", "false"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMApiManagementApi_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagementApi_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Butter Parser"), - resource.TestCheckResourceAttr(data.ResourceName, "path", "butter-parser"), - resource.TestCheckResourceAttr(data.ResourceName, "protocols.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "What is my purpose? You parse butter."), - resource.TestCheckResourceAttr(data.ResourceName, "service_url", "https://example.com/foo/bar"), - resource.TestCheckResourceAttr(data.ResourceName, "soap_pass_through", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_key_parameter_names.0.header", "X-Butter-Robot-API-Key"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_key_parameter_names.0.query", "location"), - resource.TestCheckResourceAttr(data.ResourceName, "is_current", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "is_online", "false"), - ), - }, - }, - }) -} - -func testAccDataSourceApiManagementApi_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_api_management_api" "test" { - name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management_api.test.api_management_name - resource_group_name = azurerm_api_management_api.test.resource_group_name - revision = azurerm_api_management_api.test.revision -} -`, template) -} - -func testAccDataSourceApiManagementApi_complete(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_api_management_api" "test" { - name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management_api.test.api_management_name - resource_group_name = azurerm_api_management_api.test.resource_group_name - revision = azurerm_api_management_api.test.revision -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_diagnostic_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_diagnostic_resource_test.go deleted file mode 100644 index 391061039507..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_diagnostic_resource_test.go +++ /dev/null @@ -1,246 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementApiDiagnostic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDiagnosticDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiDiagnostic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiDiagnosticExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiDiagnostic_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDiagnosticDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiDiagnostic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiDiagnosticExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementApiDiagnostic_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiDiagnosticExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiDiagnostic_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_diagnostic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDiagnosticDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiDiagnostic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiDiagnosticExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementApiDiagnostic_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementApiDiagnosticDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiDiagnosticClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_api_diagnostic" { - continue - } - - diagnosticId, err := parse.ApiManagementApiDiagnosticID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, diagnosticId.Name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementApiDiagnosticExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiDiagnosticClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - diagnosticId, err := parse.ApiManagementApiDiagnosticID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName, diagnosticId.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: API Management Diagnostic %q (Resource Group %q / API Management Service %q / API %q) does not exist", diagnosticId.Name, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.ApiName) - } - return fmt.Errorf("bad: Get on apiManagementApiDiagnosticClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementApiDiagnostic_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_logger" "test" { - name = "acctestapimnglogger-%[1]d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - application_insights { - instrumentation_key = azurerm_application_insights.test.instrumentation_key - } -} - -resource "azurerm_api_management_api" "test" { - name = "acctestAMA-%[1]d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - revision = "1" - display_name = "Test API" - path = "test" - protocols = ["https"] - - import { - content_format = "swagger-link-json" - content_value = "http://conferenceapi.azurewebsites.net/?format=json" - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMApiManagementApiDiagnostic_basic(data acceptance.TestData) string { - config := testAccAzureRMApiManagementApiDiagnostic_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_diagnostic" "test" { - identifier = "applicationinsights" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - api_name = azurerm_api_management_api.test.name - api_management_logger_id = azurerm_api_management_logger.test.id -} -`, config) -} - -func testAccAzureRMApiManagementApiDiagnostic_update(data acceptance.TestData) string { - config := testAccAzureRMApiManagementApiDiagnostic_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_application_insights" "test2" { - name = "acctestappinsightsUpdate-%[2]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_api_management_logger" "test2" { - name = "acctestapimngloggerUpdate-%[2]d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - application_insights { - instrumentation_key = azurerm_application_insights.test2.instrumentation_key - } -} - -resource "azurerm_api_management_api_diagnostic" "test" { - identifier = "applicationinsights" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - api_name = azurerm_api_management_api.test.name - api_management_logger_id = azurerm_api_management_logger.test2.id -} -`, config, data.RandomInteger) -} - -func testAccAzureRMApiManagementApiDiagnostic_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiDiagnostic_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_diagnostic" "import" { - identifier = azurerm_api_management_api_diagnostic.test.identifier - resource_group_name = azurerm_api_management_api_diagnostic.test.resource_group_name - api_management_name = azurerm_api_management_api_diagnostic.test.api_management_name - api_name = azurerm_api_management_api.test.name - api_management_logger_id = azurerm_api_management_api_diagnostic.test.api_management_logger_id -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_operation_policy_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_operation_policy_resource_test.go deleted file mode 100644 index 2689afbc6467..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_operation_policy_resource_test.go +++ /dev/null @@ -1,234 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementAPIOperationPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementAPIOperationPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAPIOperationPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementAPIOperationPolicyExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"xml_link"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementAPIOperationPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementAPIOperationPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAPIOperationPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementAPIOperationPolicyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementAPIOperationPolicy_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementAPIOperationPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementAPIOperationPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAPIOperationPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementAPIOperationPolicyExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMApiManagementAPIOperationPolicy_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementAPIOperationPolicyExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"xml_link"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementAPIOperationPolicy_rawXml(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementAPIOperationPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAPIOperationPolicy_rawXml(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementAPIOperationPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMApiManagementAPIOperationPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiOperationPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - apiName := rs.Primary.Attributes["api_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - operationID := rs.Primary.Attributes["operation_id"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiName, operationID, apimanagement.PolicyExportFormatXML) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Policy (API Management Service %q / API %q / Operation %q / Resource Group %q) does not exist", serviceName, apiName, operationID, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagementAPIOperationPoliciesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMApiManagementAPIOperationPolicyDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiOperationPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_api_operation_policy" { - continue - } - - apiName := rs.Primary.Attributes["api_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - operationID := rs.Primary.Attributes["operation_id"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiName, operationID, apimanagement.PolicyExportFormatXML) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testAccAzureRMApiManagementAPIOperationPolicy_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_basic(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation_policy" "test" { - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - operation_id = azurerm_api_management_api_operation.test.operation_id - xml_link = "https://gist.githubusercontent.com/riordanp/ca22f8113afae0eb38cc12d718fd048d/raw/d6ac89a2f35a6881a7729f8cb4883179dc88eea1/example.xml" -} -`, template) -} - -func testAccAzureRMApiManagementAPIOperationPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementAPIOperationPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation_policy" "import" { - api_name = azurerm_api_management_api_operation_policy.test.api_name - api_management_name = azurerm_api_management_api_operation_policy.test.api_management_name - resource_group_name = azurerm_api_management_api_operation_policy.test.resource_group_name - operation_id = azurerm_api_management_api_operation_policy.test.operation_id - xml_link = azurerm_api_management_api_operation_policy.test.xml_link -} -`, template) -} - -func testAccAzureRMApiManagementAPIOperationPolicy_updated(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation_policy" "test" { - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - operation_id = azurerm_api_management_api_operation.test.operation_id - - xml_content = < - - - - - -XML - -} -`, template) -} - -func testAccAzureRMApiManagementAPIOperationPolicy_rawXml(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation_policy" "test" { - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - operation_id = azurerm_api_management_api_operation.test.operation_id - - xml_content = file("testdata/api_management_api_operation_policy.xml") -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_operation_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_operation_resource_test.go deleted file mode 100644 index 51691f37790c..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_operation_resource_test.go +++ /dev/null @@ -1,522 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementApiOperation_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiOperationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiOperation_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiOperation_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiOperationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiOperation_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementApiOperation_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementApiOperation_customMethod(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiOperationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiOperation_customMethod(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "method", "HAMMERTIME"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiOperation_headers(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiOperationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiOperation_headers(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiOperation_requestRepresentations(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiOperationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiOperation_requestRepresentation(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementApiOperation_requestRepresentationUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiOperation_representations(t *testing.T) { - // TODO: once `azurerm_api_management_schema` is supported add `request.0.representation.0.schema_id` - data := acceptance.BuildTestData(t, "azurerm_api_management_api_operation", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiOperationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiOperation_representation(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementApiOperation_representationUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiOperationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMApiManagementApiOperationDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiOperationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_api_operation" { - continue - } - - operationId := rs.Primary.Attributes["operation_id"] - apiName := rs.Primary.Attributes["api_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiName, operationId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMApiManagementApiOperationExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiOperationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - operationId := rs.Primary.Attributes["operation_id"] - apiName := rs.Primary.Attributes["api_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiName, operationId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Operation %q (API %q / API Management Service %q / Resource Group: %q) does not exist", operationId, apiName, serviceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagementApiOperationsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementApiOperation_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "DELETE Resource" - method = "DELETE" - url_template = "/resource" -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_customMethod(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "HAMMERTIME Resource" - method = "HAMMERTIME" - url_template = "/resource" -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "import" { - operation_id = azurerm_api_management_api_operation.test.operation_id - api_name = azurerm_api_management_api_operation.test.api_name - api_management_name = azurerm_api_management_api_operation.test.api_management_name - resource_group_name = azurerm_api_management_api_operation.test.resource_group_name - display_name = azurerm_api_management_api_operation.test.display_name - method = azurerm_api_management_api_operation.test.method - url_template = azurerm_api_management_api_operation.test.url_template -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_requestRepresentation(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Acceptance Test Operation" - method = "DELETE" - url_template = "/user1" - description = "This can only be done by the logged in user." - - request { - description = "Created user object" - - representation { - content_type = "application/json" - type_name = "User" - } - } -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_requestRepresentationUpdated(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Acceptance Test Operation" - method = "DELETE" - url_template = "/user1" - description = "This can only be done by the logged in user." - - request { - description = "Created user object" - - representation { - content_type = "application/json" - type_name = "User" - } - } -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_headers(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Acceptance Test Operation" - method = "DELETE" - url_template = "/user1" - description = "This can only be done by the logged in user." - - request { - description = "Created user object" - - header { - name = "X-Test-Operation" - required = true - type = "string" - } - - representation { - content_type = "application/json" - type_name = "User" - } - } - - response { - status_code = 200 - description = "successful operation" - - header { - name = "X-Test-Operation" - required = true - type = "string" - } - - representation { - content_type = "application/xml" - - sample = < - - - - - - - -SAMPLE - - } - } -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_representation(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Acceptance Test Operation" - method = "DELETE" - url_template = "/user1" - description = "This can only be done by the logged in user." - - request { - description = "Created user object" - - representation { - content_type = "application/json" - type_name = "User" - } - } - - response { - status_code = 200 - description = "successful operation" - - representation { - content_type = "application/xml" - - sample = < - - - - - - - -SAMPLE - - } - } -} -`, template) -} - -func testAccAzureRMApiManagementApiOperation_representationUpdated(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiOperation_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_operation" "test" { - operation_id = "acctest-operation" - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Acceptance Test Operation" - method = "DELETE" - url_template = "/user1" - description = "This can only be done by the logged in user." - - request { - description = "Created user object" - - representation { - content_type = "application/json" - type_name = "User" - } - } - - response { - status_code = 200 - description = "successful operation" - - representation { - content_type = "application/xml" - - sample = < - - - - - - - -SAMPLE - - } - - representation { - content_type = "application/json" - - sample = < - - - - - -XML - -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_resource_test.go deleted file mode 100644 index 61dcea96c58f..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_resource_test.go +++ /dev/null @@ -1,669 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementApi_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soap_pass_through", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "is_current", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "is_online", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_wordRevision(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_wordRevision(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "revision", "one-point-oh"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_blankPath(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_blankPath(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soap_pass_through", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "is_current", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "is_online", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "path", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_version(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_versionSet(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "version", "v1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_oauth2Authorization(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_oauth2Authorization(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_openidAuthentication(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_openidAuthentication(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementApi_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementApi_soapPassthrough(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_soapPassthrough(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_subscriptionRequired(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_subscriptionRequired(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApi_importSwagger(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_importSwagger(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned from the API - "import", - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagementApi_importWsdl(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_importWsdl(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned from the API - "import", - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagementApi_importUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_importWsdl(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned from the API - "import", - }, - }, - { - Config: testAccAzureRMApiManagementApi_importSwagger(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned from the API - "import", - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagementApi_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApi_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMApiManagementApiDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_api" { - continue - } - - name := rs.Primary.Attributes["name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - revision := rs.Primary.Attributes["revision"] - apiId := fmt.Sprintf("%s;rev=%s", name, revision) - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMApiManagementApiExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - name := rs.Primary.Attributes["name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - revision := rs.Primary.Attributes["revision"] - - apiId := fmt.Sprintf("%s;rev=%s", name, revision) - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API %q Revision %q (API Management Service %q / Resource Group: %q) does not exist", name, revision, serviceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagementClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementApi_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_blankPath(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "" - protocols = ["https"] - revision = "1" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_wordRevision(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "one-point-oh" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_soapPassthrough(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - soap_pass_through = true -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_subscriptionRequired(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - subscription_required = false -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "import" { - name = azurerm_api_management_api.test.name - resource_group_name = azurerm_api_management_api.test.resource_group_name - api_management_name = azurerm_api_management_api.test.api_management_name - display_name = azurerm_api_management_api.test.display_name - path = azurerm_api_management_api.test.path - protocols = azurerm_api_management_api.test.protocols - revision = azurerm_api_management_api.test.revision -} -`, template) -} - -func testAccAzureRMApiManagementApi_importSwagger(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - - import { - content_value = file("testdata/api_management_api_swagger.json") - content_format = "swagger-json" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_importWsdl(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - - import { - content_value = file("testdata/api_management_api_wsdl.xml") - content_format = "wsdl" - - wsdl_selector { - service_name = "Calculator" - endpoint_name = "CalculatorHttpsSoap11Endpoint" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_complete(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Butter Parser" - path = "butter-parser" - protocols = ["https", "http"] - revision = "3" - description = "What is my purpose? You parse butter." - service_url = "https://example.com/foo/bar" - - subscription_key_parameter_names { - header = "X-Butter-Robot-API-Key" - query = "location" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_versionSet(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_version_set" "test" { - name = "acctestAMAVS-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Butter Parser" - versioning_scheme = "Segment" -} - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - version = "v1" - version_set_id = azurerm_api_management_api_version_set.test.id -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_oauth2Authorization(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_authorization_server" "test" { - name = "acctestauthsrv-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" - authorization_endpoint = "https://azacctest.hashicorptest.com/client/authorize" - client_id = "42424242-4242-4242-4242-424242424242" - client_registration_endpoint = "https://azacctest.hashicorptest.com/client/register" - - grant_types = [ - "implicit", - ] - - authorization_methods = [ - "GET", - ] -} - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - oauth2_authorization { - authorization_server_name = azurerm_api_management_authorization_server.test.name - scope = "acctest" - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_openidAuthentication(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApi_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_openid_connect_provider" "test" { - name = "acctest-%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - client_id = "00001111-2222-3333-%d" - client_secret = "%d-cwdavsxbacsaxZX-%d" - display_name = "Initial Name" - metadata_endpoint = "https://azacctest.hashicorptest.com/example/foo" -} - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" - openid_authentication { - openid_provider_name = azurerm_api_management_openid_connect_provider.test.name - bearer_token_sending_methods = [ - "authorizationHeader", - "query", - ] - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApi_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_schema_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_schema_resource_test.go deleted file mode 100644 index a6a964cf6c7d..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_schema_resource_test.go +++ /dev/null @@ -1,172 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementApiSchema_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_schema", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiSchemaDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiSchema_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiSchemaExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiSchema_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_schema", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiSchemaDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiSchema_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiSchemaExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementApiSchema_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementApiSchemaDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiSchemasClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_api_schema" { - continue - } - - schemaID := rs.Primary.Attributes["schema_id"] - apiName := rs.Primary.Attributes["api_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiName, schemaID) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMApiManagementApiSchemaExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiSchemasClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - schemaID := rs.Primary.Attributes["schema_id"] - apiName := rs.Primary.Attributes["api_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, apiName, schemaID) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Schema %q (API %q / API Management Service %q / Resource Group: %q) does not exist", schemaID, apiName, serviceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagementApiSchemasClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementApiSchema_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiSchema_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_schema" "test" { - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management_api.test.api_management_name - resource_group_name = azurerm_api_management_api.test.resource_group_name - schema_id = "acctestSchema%d" - content_type = "application/vnd.ms-azure-apim.xsd+xml" - value = file("testdata/api_management_api_schema.xml") -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementApiSchema_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiSchema_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_schema" "import" { - api_name = azurerm_api_management_api_schema.test.api_name - api_management_name = azurerm_api_management_api_schema.test.api_management_name - resource_group_name = azurerm_api_management_api_schema.test.resource_group_name - schema_id = azurerm_api_management_api_schema.test.schema_id - content_type = azurerm_api_management_api_schema.test.content_type - value = azurerm_api_management_api_schema.test.value -} -`, template) -} - -func testAccAzureRMApiManagementApiSchema_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_version_set_data_source_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_version_set_data_source_test.go deleted file mode 100644 index b5ed626b090a..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_version_set_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMApiManagementApiVersionSet_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management_api_version_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagementApiVersionSet_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "api_management_name"), - ), - }, - }, - }) -} - -func testAccDataSourceApiManagementApiVersionSet_basic(data acceptance.TestData) string { - config := testAccAzureRMApiManagementApiVersionSet_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_api_management_api_version_set" "test" { - name = azurerm_api_management_api_version_set.test.name - resource_group_name = azurerm_api_management_api_version_set.test.resource_group_name - api_management_name = azurerm_api_management_api_version_set.test.api_management_name -} -`, config) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_api_version_set_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_api_version_set_resource_test.go deleted file mode 100644 index 9c318f48f2ec..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_api_version_set_resource_test.go +++ /dev/null @@ -1,275 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementApiVersionSet_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiVersionSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiVersionSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiVersionSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiVersionSet_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiVersionSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiVersionSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiVersionSetExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementApiVersionSet_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementApiVersionSet_header(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiVersionSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiVersionSet_header(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiVersionSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiVersionSet_query(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiVersionSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiVersionSet_query(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiVersionSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementApiVersionSet_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_api_version_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementApiVersionSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementApiVersionSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiVersionSetExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "description", "TestDescription1"), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("TestApiVersionSet1%d", data.RandomInteger)), - ), - }, - { - Config: testAccAzureRMApiManagementApiVersionSet_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementApiVersionSetExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "description", "TestDescription2"), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("TestApiVersionSet2%d", data.RandomInteger)), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMApiManagementApiVersionSetDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiVersionSetClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_api_version_set" { - continue - } - - id, err := parse.APIVersionSetID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementApiVersionSetExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ApiVersionSetClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.APIVersionSetID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Api Management Api Version Set %q (Resource Group %q / Api Management Service %q) does not exist", id.Name, id.ResourceGroup, id.ServiceName) - } - return fmt.Errorf("Bad: Get on apiManagementApiVersionSetClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementApiVersionSet_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiVersionSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_version_set" "test" { - name = "acctestAMAVS-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - description = "TestDescription1" - display_name = "TestApiVersionSet1%d" - versioning_scheme = "Segment" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApiVersionSet_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiVersionSet_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_version_set" "import" { - name = azurerm_api_management_api_version_set.test.name - resource_group_name = azurerm_api_management_api_version_set.test.resource_group_name - api_management_name = azurerm_api_management_api_version_set.test.api_management_name - description = azurerm_api_management_api_version_set.test.description - display_name = azurerm_api_management_api_version_set.test.display_name - versioning_scheme = azurerm_api_management_api_version_set.test.versioning_scheme -} -`, template) -} - -func testAccAzureRMApiManagementApiVersionSet_header(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiVersionSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_version_set" "test" { - name = "acctestAMAVS-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - description = "TestDescription1" - display_name = "TestApiVersionSet1%d" - versioning_scheme = "Header" - version_header_name = "Header1" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApiVersionSet_query(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiVersionSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_version_set" "test" { - name = "acctestAMAVS-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - description = "TestDescription1" - display_name = "TestApiVersionSet1%d" - versioning_scheme = "Query" - version_query_name = "Query1" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApiVersionSet_update(data acceptance.TestData) string { - template := testAccAzureRMApiManagementApiVersionSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_api_version_set" "test" { - name = "acctestAMAVS-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - description = "TestDescription2" - display_name = "TestApiVersionSet2%d" - versioning_scheme = "Segment" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementApiVersionSet_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_authorization_server_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_authorization_server_resource_test.go deleted file mode 100644 index 851fee1af709..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_authorization_server_resource_test.go +++ /dev/null @@ -1,227 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementAuthorizationServer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_authorization_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementAuthorizationServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAuthorizationServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementAuthorizationServerExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementAuthorizationServer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_authorization_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementAuthorizationServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAuthorizationServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementAuthorizationServerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementAuthorizationServer_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementAuthorizationServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_authorization_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementAuthorizationServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementAuthorizationServer_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementAuthorizationServerExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func testCheckAzureRMAPIManagementAuthorizationServerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.AuthorizationServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_authorization_server" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementAuthorizationServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.AuthorizationServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Authorization Server %q (API Management Service %q / Resource Group %q) does not exist", name, serviceName, resourceGroup) - } - return fmt.Errorf("Bad: Get on apiManagementAuthorizationServersClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementAuthorizationServer_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementAuthorizationServer_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_authorization_server" "test" { - name = "acctestauthsrv-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" - authorization_endpoint = "https://azacctest.hashicorptest.com/client/authorize" - client_id = "42424242-4242-4242-4242-424242424242" - client_registration_endpoint = "https://azacctest.hashicorptest.com/client/register" - - grant_types = [ - "implicit", - ] - - authorization_methods = [ - "GET", - ] -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementAuthorizationServer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementAuthorizationServer_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_authorization_server" "import" { - name = azurerm_api_management_authorization_server.test.name - resource_group_name = azurerm_api_management_authorization_server.test.resource_group_name - api_management_name = azurerm_api_management_authorization_server.test.api_management_name - display_name = azurerm_api_management_authorization_server.test.display_name - authorization_endpoint = azurerm_api_management_authorization_server.test.authorization_endpoint - client_id = azurerm_api_management_authorization_server.test.client_id - client_registration_endpoint = azurerm_api_management_authorization_server.test.client_registration_endpoint - grant_types = azurerm_api_management_authorization_server.test.grant_types - - authorization_methods = [ - "GET", - ] -} -`, template) -} - -func testAccAzureRMApiManagementAuthorizationServer_complete(data acceptance.TestData) string { - template := testAccAzureRMApiManagementAuthorizationServer_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_authorization_server" "test" { - name = "acctestauthsrv-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" - authorization_endpoint = "https://azacctest.hashicorptest.com/client/authorize" - client_id = "42424242-4242-4242-4242-424242424242" - client_registration_endpoint = "https://azacctest.hashicorptest.com/client/register" - - grant_types = [ - "authorizationCode", - ] - - authorization_methods = [ - "GET", - "POST", - ] - - bearer_token_sending_methods = [ - "authorizationHeader", - ] - - client_secret = "n1n3-m0re-s3a5on5-m0r1y" - default_scope = "read write" - token_endpoint = "https://azacctest.hashicorptest.com/client/token" - resource_owner_username = "rick" - resource_owner_password = "C-193P" - support_state = true -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementAuthorizationServer_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_backend_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_backend_resource_test.go deleted file mode 100644 index a50f9ffcacad..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_backend_resource_test.go +++ /dev/null @@ -1,490 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementBackend_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_basic(data, "basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocol", "http"), - resource.TestCheckResourceAttr(data.ResourceName, "url", "https://acctest"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementBackend_allProperties(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_allProperties(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocol", "http"), - resource.TestCheckResourceAttr(data.ResourceName, "url", "https://acctest"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "description"), - resource.TestCheckResourceAttr(data.ResourceName, "resource_id", "https://resourceid"), - resource.TestCheckResourceAttr(data.ResourceName, "title", "title"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.0.authorization.0.parameter", "parameter"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.0.authorization.0.scheme", "scheme"), - resource.TestCheckResourceAttrSet(data.ResourceName, "credentials.0.certificate.0"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.0.header.header1", "header1value1,header1value2"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.0.header.header2", "header2value1,header2value2"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.0.query.query1", "query1value1,query1value2"), - resource.TestCheckResourceAttr(data.ResourceName, "credentials.0.query.query2", "query2value1,query2value2"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.0.url", "http://192.168.1.1:8080"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.0.username", "username"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.0.password", "password"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.0.validate_certificate_chain", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.0.validate_certificate_name", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementBackend_credentialsNoCertificate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_credentialsNoCertificate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementBackend_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_basic(data, "update"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocol", "http"), - resource.TestCheckResourceAttr(data.ResourceName, "url", "https://acctest"), - ), - }, - { - Config: testAccAzureRMApiManagementBackend_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocol", "soap"), - resource.TestCheckResourceAttr(data.ResourceName, "url", "https://updatedacctest"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "description"), - resource.TestCheckResourceAttr(data.ResourceName, "resource_id", "https://resourceid"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.0.url", "http://192.168.1.1:8080"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.0.username", "username"), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.0.password", "password"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.0.validate_certificate_chain", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.0.validate_certificate_name", "true"), - ), - }, - { - Config: testAccAzureRMApiManagementBackend_basic(data, "update"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocol", "http"), - resource.TestCheckResourceAttr(data.ResourceName, "url", "https://acctest"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "resource_id", ""), - resource.TestCheckResourceAttr(data.ResourceName, "proxy.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "tls.#", "0"), - ), - }, - }, - }) -} - -func TestAccAzureRMApiManagementBackend_serviceFabric(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_serviceFabric(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_fabric_cluster.0.client_certificate_thumbprint"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementBackend_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_basic(data, "disappears"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - testCheckAzureRMApiManagementBackendDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMApiManagementBackend_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_backend", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementBackendDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementBackend_basic(data, "import"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementBackendExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementBackend_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementBackendDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.BackendClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_backend" { - continue - } - - name := rs.Primary.Attributes["name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMApiManagementBackendExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.BackendClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - name := rs.Primary.Attributes["name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Backend %q (API Management Service %q / Resource Group: %q) does not exist", name, serviceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on BackendClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMApiManagementBackendDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.BackendClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for backend: %s", name) - } - - resp, err := conn.Delete(ctx, resourceGroup, serviceName, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp) { - return nil - } - return fmt.Errorf("Bad: Delete on BackendClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementBackend_basic(data acceptance.TestData, testName string) string { - template := testAccAzureRMApiManagementBackend_template(data, testName) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_backend" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - protocol = "http" - url = "https://acctest" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementBackend_update(data acceptance.TestData) string { - template := testAccAzureRMApiManagementBackend_template(data, "update") - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_backend" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - protocol = "soap" - url = "https://updatedacctest" - description = "description" - resource_id = "https://resourceid" - proxy { - url = "http://192.168.1.1:8080" - username = "username" - password = "password" - } - tls { - validate_certificate_chain = false - validate_certificate_name = true - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementBackend_allProperties(data acceptance.TestData) string { - template := testAccAzureRMApiManagementBackend_template(data, "all") - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_certificate" "test" { - name = "example-cert" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - data = filebase64("testdata/keyvaultcert.pfx") - password = "" -} - -resource "azurerm_api_management_backend" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - protocol = "http" - url = "https://acctest" - description = "description" - resource_id = "https://resourceid" - title = "title" - credentials { - authorization { - parameter = "parameter" - scheme = "scheme" - } - certificate = [ - azurerm_api_management_certificate.test.thumbprint, - ] - header = { - header1 = "header1value1,header1value2" - header2 = "header2value1,header2value2" - } - query = { - query1 = "query1value1,query1value2" - query2 = "query2value1,query2value2" - } - } - proxy { - url = "http://192.168.1.1:8080" - username = "username" - password = "password" - } - tls { - validate_certificate_chain = false - validate_certificate_name = true - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementBackend_serviceFabric(data acceptance.TestData) string { - template := testAccAzureRMApiManagementBackend_template(data, "sf") - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_certificate" "test" { - name = "example-cert" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - data = filebase64("testdata/keyvaultcert.pfx") - password = "" -} - -resource "azurerm_api_management_backend" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - protocol = "http" - url = "fabric:/mytestapp/acctest" - service_fabric_cluster { - client_certificate_thumbprint = azurerm_api_management_certificate.test.thumbprint - management_endpoints = [ - "https://acctestsf.com", - ] - max_partition_resolution_retries = 5 - server_certificate_thumbprints = [ - "thumb1", - "thumb2", - ] - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMApiManagementBackend_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementBackend_basic(data, "requiresimport") - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_backend" "import" { - name = azurerm_api_management_backend.test.name - resource_group_name = azurerm_api_management_backend.test.resource_group_name - api_management_name = azurerm_api_management_backend.test.api_management_name - protocol = azurerm_api_management_backend.test.protocol - url = azurerm_api_management_backend.test.url -} -`, template) -} - -func testAccAzureRMApiManagementBackend_template(data acceptance.TestData, testName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d-%s" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} -`, data.RandomInteger, testName, data.Locations.Primary, data.RandomInteger, testName) -} - -func testAccAzureRMApiManagementBackend_credentialsNoCertificate(data acceptance.TestData) string { - template := testAccAzureRMApiManagementBackend_template(data, "all") - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_backend" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - protocol = "http" - url = "https://acctest" - description = "description" - resource_id = "https://resourceid" - title = "title" - credentials { - authorization { - parameter = "parameter" - scheme = "scheme" - } - header = { - header1 = "header1value1,header1value2" - header2 = "header2value1,header2value2" - } - query = { - query1 = "query1value1,query1value2" - query2 = "query2value1,query2value2" - } - } - proxy { - url = "http://192.168.1.1:8080" - username = "username" - password = "password" - } - tls { - validate_certificate_chain = false - validate_certificate_name = true - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_certificate_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_certificate_resource_test.go deleted file mode 100644 index 60175a0da619..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_certificate_resource_test.go +++ /dev/null @@ -1,159 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementCertificate_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "expiration"), - resource.TestCheckResourceAttrSet(data.ResourceName, "subject"), - resource.TestCheckResourceAttrSet(data.ResourceName, "thumbprint"), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned from the API - "data", - "password", - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagementCertificate_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementCertificateExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementCertificate_requiresImport), - }, - }) -} - -func testCheckAzureRMAPIManagementCertificateDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.CertificatesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_certificate" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementCertificateExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.CertificatesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Certificate %q (Resource Group %q / API Management Service %q) does not exist", name, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagementCertificatesClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementCertificate_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_certificate" "test" { - name = "example-cert" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - data = filebase64("testdata/keyvaultcert.pfx") - password = "" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementCertificate_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementCertificate_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_certificate" "import" { - name = azurerm_api_management_certificate.test.name - api_management_name = azurerm_api_management_certificate.test.api_management_name - resource_group_name = azurerm_api_management_certificate.test.resource_group_name - data = azurerm_api_management_certificate.test.data - password = azurerm_api_management_certificate.test.password -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_data_source_test.go b/azurerm/internal/services/apimanagement/tests/api_management_data_source_test.go deleted file mode 100644 index c6c2b106f56b..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_data_source_test.go +++ /dev/null @@ -1,154 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMApiManagement_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagement_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "publisher_email", "pub1@email.com"), - resource.TestCheckResourceAttr(data.ResourceName, "publisher_name", "pub1"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "Developer_1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "public_ip_addresses.#"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMApiManagement_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagement_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "publisher_email", "pub1@email.com"), - resource.TestCheckResourceAttr(data.ResourceName, "publisher_name", "pub1"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "Premium_1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "public_ip_addresses.#"), - resource.TestCheckResourceAttrSet(data.ResourceName, "private_ip_addresses.#"), - resource.TestCheckResourceAttrSet(data.ResourceName, "additional_location.0.public_ip_addresses.#"), - resource.TestCheckResourceAttrSet(data.ResourceName, "additional_location.0.private_ip_addresses.#"), - ), - }, - }, - }) -} - -func testAccDataSourceApiManagement_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "amtestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -data "azurerm_api_management" "test" { - name = azurerm_api_management.test.name - resource_group_name = azurerm_api_management.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourceApiManagement_virtualNetwork(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test1" { - name = "amestRG1-%d" - location = "%s" -} - -resource "azurerm_resource_group" "test2" { - name = "amestRG2-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test1" { - name = "amtestVNET1-%d" - location = azurerm_resource_group.test1.location - resource_group_name = azurerm_resource_group.test1.name - address_space = ["10.0.0.0/16"] -} - -resource "azurerm_subnet" "test1" { - name = "amtestSNET1-%d" - resource_group_name = azurerm_resource_group.test1.name - virtual_network_name = azurerm_virtual_network.test1.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_virtual_network" "test2" { - name = "amtestVNET2-%d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - address_space = ["10.1.0.0/16"] -} - -resource "azurerm_subnet" "test2" { - name = "amtestSNET2-%d" - resource_group_name = azurerm_resource_group.test2.name - virtual_network_name = azurerm_virtual_network.test2.name - address_prefix = "10.1.1.0/24" -} - -resource "azurerm_api_management" "test" { - name = "amtestAM-%d" - location = azurerm_resource_group.test1.location - resource_group_name = azurerm_resource_group.test1.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Premium_1" - - additional_location { - location = azurerm_resource_group.test2.location - virtual_network_configuration { - subnet_id = azurerm_subnet.test2.id - } - } - - virtual_network_type = "Internal" - virtual_network_configuration { - subnet_id = azurerm_subnet.test1.id - } -} - -data "azurerm_api_management" "test" { - name = azurerm_api_management.test.name - resource_group_name = azurerm_api_management.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_diagnostic_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_diagnostic_resource_test.go deleted file mode 100644 index c11dc02228d5..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_diagnostic_resource_test.go +++ /dev/null @@ -1,228 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementDiagnostic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_diagnostic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDiagnosticDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementDiagnostic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementDiagnosticExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementDiagnostic_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_diagnostic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDiagnosticDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementDiagnostic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementDiagnosticExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementDiagnostic_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementDiagnosticExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementDiagnostic_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_diagnostic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementDiagnostic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementDiagnosticExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementDiagnostic_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementDiagnosticDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.DiagnosticClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_diagnostic" { - continue - } - - diagnosticId, err := parse.ApiManagementDiagnosticID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.Name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementDiagnosticExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.DiagnosticClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - diagnosticId, err := parse.ApiManagementDiagnosticID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, diagnosticId.ResourceGroup, diagnosticId.ServiceName, diagnosticId.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: API Management Diagnostic %q (Resource Group %q / API Management Service %q) does not exist", diagnosticId.Name, diagnosticId.ResourceGroup, diagnosticId.ServiceName) - } - return fmt.Errorf("bad: Get on apiManagementDiagnosticClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementDiagnostic_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_logger" "test" { - name = "acctestapimnglogger-%[1]d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - application_insights { - instrumentation_key = azurerm_application_insights.test.instrumentation_key - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMApiManagementDiagnostic_basic(data acceptance.TestData) string { - config := testAccAzureRMApiManagementDiagnostic_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_diagnostic" "test" { - identifier = "applicationinsights" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - api_management_logger_id = azurerm_api_management_logger.test.id -} -`, config) -} - -func testAccAzureRMApiManagementDiagnostic_update(data acceptance.TestData) string { - config := testAccAzureRMApiManagementDiagnostic_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_application_insights" "test2" { - name = "acctestappinsightsUpdate-%[2]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_api_management_logger" "test2" { - name = "acctestapimngloggerUpdate-%[2]d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - application_insights { - instrumentation_key = azurerm_application_insights.test2.instrumentation_key - } -} - -resource "azurerm_api_management_diagnostic" "test" { - identifier = "applicationinsights" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - api_management_logger_id = azurerm_api_management_logger.test2.id -} -`, config, data.RandomInteger) -} - -func testAccAzureRMApiManagementDiagnostic_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementDiagnostic_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_diagnostic" "import" { - identifier = azurerm_api_management_diagnostic.test.identifier - resource_group_name = azurerm_api_management_diagnostic.test.resource_group_name - api_management_name = azurerm_api_management_diagnostic.test.api_management_name - api_management_logger_id = azurerm_api_management_diagnostic.test.api_management_logger_id -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_group_data_source_test.go b/azurerm/internal/services/apimanagement/tests/api_management_group_data_source_test.go deleted file mode 100644 index 376eae63d1df..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_group_data_source_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMApiManagementGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagementGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Group"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "external_id", ""), - resource.TestCheckResourceAttr(data.ResourceName, "type", "custom"), - ), - }, - }, - }) -} - -func testAccDataSourceApiManagementGroup_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_group" "test" { - name = "acctestAMGroup-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" -} - -data "azurerm_api_management_group" "test" { - name = azurerm_api_management_group.test.name - api_management_name = azurerm_api_management_group.test.api_management_name - resource_group_name = azurerm_api_management_group.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_group_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_group_resource_test.go deleted file mode 100644 index 420cb1aaf8f8..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_group_resource_test.go +++ /dev/null @@ -1,243 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Group"), - resource.TestCheckResourceAttr(data.ResourceName, "type", "custom"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementGroup_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Group"), - resource.TestCheckResourceAttr(data.ResourceName, "type", "custom"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementGroup_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementGroup_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementGroup_complete(data, "Test Group", "A test description."), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Group"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "A test description."), - resource.TestCheckResourceAttr(data.ResourceName, "type", "external"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementGroup_descriptionDisplayNameUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementGroup_complete(data, "Original Group", "The original description."), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Original Group"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "The original description."), - resource.TestCheckResourceAttr(data.ResourceName, "type", "external"), - ), - }, - { - Config: testAccAzureRMApiManagementGroup_complete(data, "Modified Group", "A modified description."), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Modified Group"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "A modified description."), - resource.TestCheckResourceAttr(data.ResourceName, "type", "external"), - ), - }, - { - Config: testAccAzureRMApiManagementGroup_complete(data, "Original Group", "The original description."), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Original Group"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "The original description."), - resource.TestCheckResourceAttr(data.ResourceName, "type", "external"), - ), - }, - }, - }) -} - -func testCheckAzureRMAPIManagementGroupDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.GroupClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_group" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementGroupExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.GroupClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Group %q (Resource Group %q / API Management Service %q) does not exist", name, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagement.GroupClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementGroup_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_group" "test" { - name = "acctestAMGroup-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementGroup_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementGroup_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_group" "import" { - name = azurerm_api_management_group.test.name - resource_group_name = azurerm_api_management_group.test.resource_group_name - api_management_name = azurerm_api_management_group.test.api_management_name - display_name = azurerm_api_management_group.test.display_name -} -`, template) -} - -func testAccAzureRMApiManagementGroup_complete(data acceptance.TestData, displayName, description string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_group" "test" { - name = "acctestAMGroup-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "%s" - description = "%s" - type = "external" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, displayName, description) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_group_user_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_group_user_resource_test.go deleted file mode 100644 index fac6ab3ba89e..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_group_user_resource_test.go +++ /dev/null @@ -1,163 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementGroupUser_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_group_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementGroupUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementGroupUser_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupUserExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementGroupUser_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_group_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementGroupUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementGroupUser_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementGroupUserExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementGroupUser_requiresImport), - }, - }) -} - -func testCheckAzureRMAPIManagementGroupUserDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.GroupUsersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_group_user" { - continue - } - - userId := rs.Primary.Attributes["user_id"] - groupName := rs.Primary.Attributes["group_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.CheckEntityExists(ctx, resourceGroup, serviceName, groupName, userId) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementGroupUserExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.GroupUsersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - userId := rs.Primary.Attributes["user_id"] - groupName := rs.Primary.Attributes["group_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.CheckEntityExists(ctx, resourceGroup, serviceName, groupName, userId) - if err != nil { - if utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Bad: User %q / Group %q (API Management Service %q / Resource Group %q) does not exist", userId, groupName, serviceName, resourceGroup) - } - return fmt.Errorf("Bad: Get on apiManagement.GroupUsersClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementGroupUser_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_group" "test" { - name = "acctestAMGroup-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" -} - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" -} - -resource "azurerm_api_management_group_user" "test" { - user_id = azurerm_api_management_user.test.user_id - group_name = azurerm_api_management_group.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementGroupUser_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementGroupUser_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_group_user" "import" { - user_id = azurerm_api_management_group_user.test.user_id - group_name = azurerm_api_management_group_user.test.group_name - api_management_name = azurerm_api_management_group_user.test.api_management_name - resource_group_name = azurerm_api_management_group_user.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_aad_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_aad_resource_test.go deleted file mode 100644 index f4f03d70837e..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_aad_resource_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementIdentityProviderAAD_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_aad", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderAADDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderAAD_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderAADExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderAAD_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_aad", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderAADDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderAAD_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderAADExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "client_id", "00000000-0000-0000-0000-000000000000"), - resource.TestCheckResourceAttr(data.ResourceName, "client_secret", "00000000000000000000000000000000"), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_tenants.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_tenants.0", data.Client().TenantID), - ), - }, - { - Config: testAccAzureRMApiManagementIdentityProviderAAD_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderAADExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "client_id", "11111111-1111-1111-1111-111111111111"), - resource.TestCheckResourceAttr(data.ResourceName, "client_secret", "11111111111111111111111111111111"), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_tenants.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_tenants.0", data.Client().TenantID), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_tenants.1", data.Client().TenantID), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderAAD_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_aad", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderAADDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderAAD_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderAADExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementIdentityProviderAAD_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementIdentityProviderAADDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_identity_provider_aad" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Aad) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementIdentityProviderAADExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Aad) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Identity Provider %q (Resource Group %q / API Management Service %q) does not exist", apimanagement.Aad, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagementIdentityProviderClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementIdentityProviderAAD_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_aad" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - client_id = "00000000-0000-0000-0000-000000000000" - client_secret = "00000000000000000000000000000000" - signin_tenant = "00000000-0000-0000-0000-000000000000" - allowed_tenants = ["%s"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Client().TenantID) -} - -func testAccAzureRMApiManagementIdentityProviderAAD_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_aad" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - client_id = "11111111-1111-1111-1111-111111111111" - client_secret = "11111111111111111111111111111111" - allowed_tenants = ["%s", "%s"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Client().TenantID, data.Client().TenantID) -} - -func testAccAzureRMApiManagementIdentityProviderAAD_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementIdentityProviderAAD_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_identity_provider_aad" "import" { - resource_group_name = azurerm_api_management_identity_provider_aad.test.resource_group_name - api_management_name = azurerm_api_management_identity_provider_aad.test.api_management_name - client_id = azurerm_api_management_identity_provider_aad.test.client_id - client_secret = azurerm_api_management_identity_provider_aad.test.client_secret - allowed_tenants = azurerm_api_management_identity_provider_aad.test.allowed_tenants -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_facebook_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_facebook_resource_test.go deleted file mode 100644 index f41de65225e8..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_facebook_resource_test.go +++ /dev/null @@ -1,202 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementIdentityProviderFacebook_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_facebook", "test") - config := testAccAzureRMApiManagementIdentityProviderFacebook_basic(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderFacebookDestroy, - Steps: []resource.TestStep{ - { - Config: config, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderFacebookExists(data.ResourceName), - ), - }, - data.ImportStep("app_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderFacebook_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_facebook", "test") - config := testAccAzureRMApiManagementIdentityProviderFacebook_basic(data) - updateConfig := testAccAzureRMApiManagementIdentityProviderFacebook_update(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderFacebookDestroy, - Steps: []resource.TestStep{ - { - Config: config, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderFacebookExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "app_id", "00000000000000000000000000000000"), - ), - }, - data.ImportStep("app_secret"), - { - Config: updateConfig, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderFacebookExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "app_id", "11111111111111111111111111111111"), - ), - }, - data.ImportStep("app_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderFacebook_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_facebook", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderFacebookDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderFacebook_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderFacebookExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementIdentityProviderFacebook_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementIdentityProviderFacebookDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_identity_provider_facebook" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Facebook) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementIdentityProviderFacebookExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Facebook) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Identity Provider %q (Resource Group %q / API Management Service %q) does not exist", apimanagement.Facebook, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagementIdentityProviderClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementIdentityProviderFacebook_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_facebook" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - app_id = "00000000000000000000000000000000" - app_secret = "00000000000000000000000000000000" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderFacebook_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_facebook" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - app_id = "11111111111111111111111111111111" - app_secret = "11111111111111111111111111111111" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderFacebook_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementIdentityProviderFacebook_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_identity_provider_facebook" "import" { - resource_group_name = azurerm_api_management_identity_provider_facebook.test.resource_group_name - api_management_name = azurerm_api_management_identity_provider_facebook.test.api_management_name - app_id = azurerm_api_management_identity_provider_facebook.test.app_id - app_secret = azurerm_api_management_identity_provider_facebook.test.app_secret -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_google_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_google_resource_test.go deleted file mode 100644 index 2a5dd8d9ab19..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_google_resource_test.go +++ /dev/null @@ -1,201 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementIdentityProviderGoogle_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_google", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderGoogleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderGoogle_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderGoogleExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderGoogle_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_google", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderGoogleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderGoogle_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderGoogleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "client_id", "00000000.apps.googleusercontent.com"), - ), - }, - data.ImportStep("client_secret"), - { - Config: testAccAzureRMApiManagementIdentityProviderGoogle_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderGoogleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "client_id", "11111111.apps.googleusercontent.com"), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderGoogle_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_google", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderGoogleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderGoogle_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderGoogleExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementIdentityProviderGoogle_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementIdentityProviderGoogleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_identity_provider_google" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Google) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementIdentityProviderGoogleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Google) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Identity Provider %q (Resource Group %q / API Management Service %q) does not exist", apimanagement.Google, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagementIdentityProviderClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementIdentityProviderGoogle_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_google" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - client_id = "00000000.apps.googleusercontent.com" - client_secret = "00000000000000000000000000000000" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderGoogle_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_google" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - client_id = "11111111.apps.googleusercontent.com" - client_secret = "11111111111111111111111111111111" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderGoogle_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementIdentityProviderGoogle_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_identity_provider_google" "import" { - resource_group_name = azurerm_api_management_identity_provider_google.test.resource_group_name - api_management_name = azurerm_api_management_identity_provider_google.test.api_management_name - client_id = azurerm_api_management_identity_provider_google.test.client_id - client_secret = azurerm_api_management_identity_provider_google.test.client_secret -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_microsoft_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_microsoft_resource_test.go deleted file mode 100644 index dfc3375d30f3..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_microsoft_resource_test.go +++ /dev/null @@ -1,202 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementIdentityProviderMicrosoft_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_microsoft", "test") - config := testAccAzureRMApiManagementIdentityProviderMicrosoft_basic(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderMicrosoftDestroy, - Steps: []resource.TestStep{ - { - Config: config, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderMicrosoftExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderMicrosoft_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_microsoft", "test") - config := testAccAzureRMApiManagementIdentityProviderMicrosoft_basic(data) - updateConfig := testAccAzureRMApiManagementIdentityProviderMicrosoft_update(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderMicrosoftDestroy, - Steps: []resource.TestStep{ - { - Config: config, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderMicrosoftExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "client_id", "00000000-0000-0000-0000-000000000000"), - ), - }, - data.ImportStep("client_secret"), - { - Config: updateConfig, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderMicrosoftExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "client_id", "11111111-1111-1111-1111-111111111111"), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderMicrosoft_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_microsoft", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderMicrosoftDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderMicrosoft_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderMicrosoftExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementIdentityProviderMicrosoft_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementIdentityProviderMicrosoftDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_identity_provider_microsoft" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Microsoft) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementIdentityProviderMicrosoftExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Microsoft) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Identity Provider %q (Resource Group %q / API Management Service %q) does not exist", apimanagement.Microsoft, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagementIdentityProviderClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementIdentityProviderMicrosoft_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_microsoft" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - client_id = "00000000-0000-0000-0000-000000000000" - client_secret = "00000000000000000000000000000000" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderMicrosoft_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_microsoft" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - client_id = "11111111-1111-1111-1111-111111111111" - client_secret = "11111111111111111111111111111111" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderMicrosoft_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementIdentityProviderMicrosoft_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_identity_provider_microsoft" "import" { - resource_group_name = azurerm_api_management_identity_provider_microsoft.test.resource_group_name - api_management_name = azurerm_api_management_identity_provider_microsoft.test.api_management_name - client_id = azurerm_api_management_identity_provider_microsoft.test.client_id - client_secret = azurerm_api_management_identity_provider_microsoft.test.client_secret -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_twitter_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_twitter_resource_test.go deleted file mode 100644 index e6393d1f4dcf..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_identity_provider_twitter_resource_test.go +++ /dev/null @@ -1,202 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementIdentityProviderTwitter_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_twitter", "test") - config := testAccAzureRMApiManagementIdentityProviderTwitter_basic(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderTwitterDestroy, - Steps: []resource.TestStep{ - { - Config: config, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderTwtterExists(data.ResourceName), - ), - }, - data.ImportStep("api_secret_key"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderTwitter_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_twitter", "test") - config := testAccAzureRMApiManagementIdentityProviderTwitter_basic(data) - updateConfig := testAccAzureRMApiManagementIdentityProviderTwitter_update(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderTwitterDestroy, - Steps: []resource.TestStep{ - { - Config: config, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderTwtterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "api_key", "00000000000000000000000000000000"), - ), - }, - data.ImportStep("api_secret_key"), - { - Config: updateConfig, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderTwtterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "api_key", "11111111111111111111111111111111"), - ), - }, - data.ImportStep("api_secret_key"), - }, - }) -} - -func TestAccAzureRMApiManagementIdentityProviderTwitter_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_identity_provider_twitter", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementIdentityProviderTwitterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementIdentityProviderTwitter_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementIdentityProviderTwtterExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementIdentityProviderTwitter_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementIdentityProviderTwitterDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_identity_provider_twitter" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Twitter) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMApiManagementIdentityProviderTwtterExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.IdentityProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - resp, err := client.Get(ctx, resourceGroup, serviceName, apimanagement.Twitter) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Identity Provider %q (Resource Group %q / API Management Service %q) does not exist", apimanagement.Twitter, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagementIdentityProviderClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementIdentityProviderTwitter_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_twitter" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - api_key = "00000000000000000000000000000000" - api_secret_key = "00000000000000000000000000000000" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderTwitter_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-api-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_identity_provider_twitter" "test" { - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - api_key = "11111111111111111111111111111111" - api_secret_key = "11111111111111111111111111111111" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementIdentityProviderTwitter_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementIdentityProviderTwitter_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_identity_provider_twitter" "import" { - resource_group_name = azurerm_api_management_identity_provider_twitter.test.resource_group_name - api_management_name = azurerm_api_management_identity_provider_twitter.test.api_management_name - api_key = azurerm_api_management_identity_provider_twitter.test.api_key - api_secret_key = azurerm_api_management_identity_provider_twitter.test.api_secret_key -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_logger_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_logger_resource_test.go deleted file mode 100644 index 6b77d31ae6fb..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_logger_resource_test.go +++ /dev/null @@ -1,397 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementLogger_basicEventHub(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementLoggerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementLogger_basicEventHub(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.connection_string"), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"eventhub.0.connection_string"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementLogger_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementLoggerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementLogger_basicEventHub(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.connection_string"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementLogger_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementLogger_basicApplicationInsights(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementLoggerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementLogger_basicApplicationInsights(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "application_insights.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "application_insights.0.instrumentation_key"), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_insights.#", "application_insights.0.instrumentation_key"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementLogger_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementLoggerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementLogger_complete(data, "Logger from Terraform test", "false"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Logger from Terraform test"), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "application_insights.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "application_insights.0.instrumentation_key"), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_insights.#", "application_insights.0.instrumentation_key"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementLogger_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_logger", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementLoggerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementLogger_basicApplicationInsights(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "application_insights.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "application_insights.0.instrumentation_key"), - ), - }, - { - Config: testAccAzureRMApiManagementLogger_basicEventHub(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.connection_string"), - ), - }, - { - Config: testAccAzureRMApiManagementLogger_complete(data, "Logger from Terraform test", "false"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Logger from Terraform test"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "application_insights.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "application_insights.0.instrumentation_key"), - ), - }, - { - Config: testAccAzureRMApiManagementLogger_complete(data, "Logger from Terraform update test", "true"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Logger from Terraform update test"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "application_insights.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "application_insights.0.instrumentation_key"), - ), - }, - { - Config: testAccAzureRMApiManagementLogger_complete(data, "Logger from Terraform test", "false"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Logger from Terraform test"), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "application_insights.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "application_insights.0.instrumentation_key"), - ), - }, - { - Config: testAccAzureRMApiManagementLogger_basicEventHub(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementLoggerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "buffered", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub.0.connection_string"), - ), - }, - }, - }) -} - -func testCheckAzureRMApiManagementLoggerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.LoggerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("API Management Logger not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - if resp, err := client.Get(ctx, resourceGroup, serviceName, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Logger %q (Resource Group %q / API Management Service %q) does not exist", name, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagement.LoggerClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMApiManagementLoggerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.LoggerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_logger" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - if resp, err := client.Get(ctx, resourceGroup, serviceName, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on apiManagement.LoggerClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMApiManagementLogger_basicEventHub(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_logger" "test" { - name = "acctestapimnglogger-%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - eventhub { - name = azurerm_eventhub.test.name - connection_string = azurerm_eventhub_namespace.test.default_primary_connection_string - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementLogger_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementLogger_basicEventHub(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_logger" "import" { - name = azurerm_api_management_logger.test.name - api_management_name = azurerm_api_management_logger.test.api_management_name - resource_group_name = azurerm_api_management_logger.test.resource_group_name - - eventhub { - name = azurerm_eventhub.test.name - connection_string = azurerm_eventhub_namespace.test.default_primary_connection_string - } -} -`, template) -} - -func testAccAzureRMApiManagementLogger_basicApplicationInsights(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "other" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_logger" "test" { - name = "acctestapimnglogger-%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - application_insights { - instrumentation_key = azurerm_application_insights.test.instrumentation_key - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementLogger_complete(data acceptance.TestData, description, buffered string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "other" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_logger" "test" { - name = "acctestapimnglogger-%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - description = "%s" - buffered = %s - - application_insights { - instrumentation_key = azurerm_application_insights.test.instrumentation_key - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, description, buffered) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_named_value_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_named_value_resource_test.go deleted file mode 100644 index e95e6aad7a00..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_named_value_resource_test.go +++ /dev/null @@ -1,178 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementNamedValue_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_named_value", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementNamedValueDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementNamedValue_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementNamedValueExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementNamedValue_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_named_value", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementNamedValueDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementNamedValue_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementNamedValueExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementNamedValue_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementNamedValueExists(data.ResourceName), - ), - }, - data.ImportStep("value"), - }, - }) -} - -func testCheckAzureRMAPIManagementNamedValueDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.NamedValueClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_named_value" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementNamedValueExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.NamedValueClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Property %q (Resource Group %q / API Management Service %q) does not exist", name, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagement.NamedValueClient: %+v", err) - } - - return nil - } -} - -/* - - */ - -func testAccAzureRMApiManagementNamedValue_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_named_value" "test" { - name = "acctestAMProperty-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - display_name = "TestProperty%d" - value = "Test Value" - tags = ["tag1", "tag2"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementNamedValue_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_named_value" "test" { - name = "acctestAMProperty-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - display_name = "TestProperty2%d" - value = "Test Value2" - secret = true - tags = ["tag3", "tag4"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_openid_connect_provider_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_openid_connect_provider_resource_test.go deleted file mode 100644 index c9e30f0af2fd..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_openid_connect_provider_resource_test.go +++ /dev/null @@ -1,200 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementOpenIDConnectProvider_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_openid_connect_provider", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementOpenIDConnectProviderDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementOpenIDConnectProvider_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementOpenIDConnectProviderExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func TestAccAzureRMApiManagementOpenIDConnectProvider_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_openid_connect_provider", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementOpenIDConnectProviderDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementOpenIDConnectProvider_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementOpenIDConnectProviderExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementOpenIDConnectProvider_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementOpenIDConnectProvider_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_openid_connect_provider", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementOpenIDConnectProviderDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementOpenIDConnectProvider_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementOpenIDConnectProviderExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - { - Config: testAccAzureRMApiManagementOpenIDConnectProvider_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementOpenIDConnectProviderExists(data.ResourceName), - ), - }, - data.ImportStep("client_secret"), - }, - }) -} - -func testCheckAzureRMApiManagementOpenIDConnectProviderExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.OpenIdConnectClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("API Management OpenID Connect Provider not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - if resp, err := client.Get(ctx, resourceGroup, serviceName, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: OpenID Connect Provider %q (Resource Group %q / API Management Service %q) does not exist", name, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagement.OpenIdConnectClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMApiManagementOpenIDConnectProviderDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.OpenIdConnectClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_openid_connect_provider" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - if resp, err := client.Get(ctx, resourceGroup, serviceName, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on apiManagement.OpenIdConnectClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMApiManagementOpenIDConnectProvider_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementOpenIDConnectProvider_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_openid_connect_provider" "test" { - name = "acctest-%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - client_id = "00001111-2222-3333-%d" - client_secret = "%d-cwdavsxbacsaxZX-%d" - display_name = "Initial Name" - metadata_endpoint = "https://azacctest.hashicorptest.com/example/foo" -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementOpenIDConnectProvider_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementOpenIDConnectProvider_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_openid_connect_provider" "import" { - name = azurerm_api_management_openid_connect_provider.test.name - api_management_name = azurerm_api_management_openid_connect_provider.test.api_management_name - resource_group_name = azurerm_api_management_openid_connect_provider.test.resource_group_name - client_id = azurerm_api_management_openid_connect_provider.test.client_id - client_secret = azurerm_api_management_openid_connect_provider.test.client_secret - display_name = azurerm_api_management_openid_connect_provider.test.display_name - metadata_endpoint = azurerm_api_management_openid_connect_provider.test.metadata_endpoint -} -`, template) -} - -func testAccAzureRMApiManagementOpenIDConnectProvider_complete(data acceptance.TestData) string { - template := testAccAzureRMApiManagementOpenIDConnectProvider_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_openid_connect_provider" "test" { - name = "acctest-%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - client_id = "00001111-3333-2222-%d" - client_secret = "%d-423egvwdcsjx-%d" - display_name = "Updated Name" - description = "Example description" - metadata_endpoint = "https://azacctest.hashicorptest.com/example/updated" -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementOpenIDConnectProvider_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_product_api_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_product_api_resource_test.go deleted file mode 100644 index aadf456bfaae..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_product_api_resource_test.go +++ /dev/null @@ -1,167 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementProductApi_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_api", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementProductApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductApi_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementProductApiExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementProductApi_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_api", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementProductApiDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductApi_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementProductApiExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementProductApi_requiresImport), - }, - }) -} - -func testCheckAzureRMAPIManagementProductApiDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductApisClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_product_api" { - continue - } - - apiName := rs.Primary.Attributes["api_name"] - productId := rs.Primary.Attributes["product_id"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.CheckEntityExists(ctx, resourceGroup, serviceName, productId, apiName) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementProductApiExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductApisClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - apiName := rs.Primary.Attributes["api_name"] - productId := rs.Primary.Attributes["product_id"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.CheckEntityExists(ctx, resourceGroup, serviceName, productId, apiName) - if err != nil { - if utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Bad: API %q / Product %q (API Management Service %q / Resource Group %q) does not exist", apiName, productId, serviceName, resourceGroup) - } - return fmt.Errorf("Bad: Get on apiManagement.ProductApisClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementProductApi_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = true - approval_required = false - published = true -} - -resource "azurerm_api_management_api" "test" { - name = "acctestapi-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "api1" - path = "api1" - protocols = ["https"] - revision = "1" -} - -resource "azurerm_api_management_product_api" "test" { - product_id = azurerm_api_management_product.test.product_id - api_name = azurerm_api_management_api.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementProductApi_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementProductApi_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_product_api" "import" { - api_name = azurerm_api_management_product_api.test.api_name - product_id = azurerm_api_management_product_api.test.product_id - api_management_name = azurerm_api_management_product_api.test.api_management_name - resource_group_name = azurerm_api_management_product_api.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_product_data_source_test.go b/azurerm/internal/services/apimanagement/tests/api_management_product_data_source_test.go deleted file mode 100644 index ad89dbed8310..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_product_data_source_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMApiManagementProduct_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagementProduct_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "product_id", "test-product"), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Product"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "approval_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "published", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "This is an example description"), - resource.TestCheckResourceAttr(data.ResourceName, "terms", "These are some example terms and conditions"), - ), - }, - }, - }) -} - -func testAccDataSourceApiManagementProduct_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "amtestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = true - approval_required = true - subscriptions_limit = 2 - published = true - description = "This is an example description" - terms = "These are some example terms and conditions" -} - -data "azurerm_api_management_product" "test" { - product_id = azurerm_api_management_product.test.product_id - api_management_name = azurerm_api_management_product.test.api_management_name - resource_group_name = azurerm_api_management_product.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_product_group_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_product_group_resource_test.go deleted file mode 100644 index 89565686b736..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_product_group_resource_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementProductGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementProductGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementProductGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementProductGroup_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementProductGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementProductGroupExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementProductGroup_requiresImport), - }, - }) -} - -func testCheckAzureRMAPIManagementProductGroupDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductGroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_product_group" { - continue - } - - productId := rs.Primary.Attributes["product_id"] - groupName := rs.Primary.Attributes["group_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.CheckEntityExists(ctx, resourceGroup, serviceName, productId, groupName) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementProductGroupExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductGroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - productId := rs.Primary.Attributes["product_id"] - groupName := rs.Primary.Attributes["group_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.CheckEntityExists(ctx, resourceGroup, serviceName, productId, groupName) - if err != nil { - if utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Bad: Product %q / Group %q (API Management Service %q / Resource Group %q) does not exist", productId, groupName, serviceName, resourceGroup) - } - return fmt.Errorf("Bad: Get on apiManagement.ProductGroupsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementProductGroup_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = true - approval_required = false - published = true -} - -resource "azurerm_api_management_group" "test" { - name = "acctestAMGroup-%d" - resource_group_name = azurerm_resource_group.test.name - api_management_name = azurerm_api_management.test.name - display_name = "Test Group" -} - -resource "azurerm_api_management_product_group" "test" { - product_id = azurerm_api_management_product.test.product_id - group_name = azurerm_api_management_group.test.name - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementProductGroup_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementProductGroup_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_product_group" "import" { - product_id = azurerm_api_management_product_group.test.product_id - group_name = azurerm_api_management_product_group.test.group_name - api_management_name = azurerm_api_management_product_group.test.api_management_name - resource_group_name = azurerm_api_management_product_group.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_product_policy_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_product_policy_resource_test.go deleted file mode 100644 index 6249e8121d1b..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_product_policy_resource_test.go +++ /dev/null @@ -1,240 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/apimanagement/mgmt/2019-12-01/apimanagement" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementProductPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductPolicyExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"xml_link"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementProductPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductPolicyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementProductPolicy_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementProductPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProductPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductPolicyExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMApiManagementProductPolicy_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductPolicyExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"xml_link"}, - }, - }, - }) -} - -func testCheckAzureRMApiManagementProductPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - productID := rs.Primary.Attributes["product_id"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, productID, apimanagement.PolicyExportFormatXML) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Product Policy (API Management Service %q / Product %q/ Resource Group %q) does not exist", serviceName, productID, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagement.ProductPoliciesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMApiManagementProductPolicyDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_product_policy" { - continue - } - - productID := rs.Primary.Attributes["product_id"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - resp, err := conn.Get(ctx, resourceGroup, serviceName, productID, apimanagement.PolicyExportFormatXML) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testAccAzureRMApiManagementProductPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = false - published = false -} - -resource "azurerm_api_management_product_policy" "test" { - product_id = azurerm_api_management_product.test.product_id - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - xml_link = "https://gist.githubusercontent.com/riordanp/ca22f8113afae0eb38cc12d718fd048d/raw/d6ac89a2f35a6881a7729f8cb4883179dc88eea1/example.xml" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementProductPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementProductPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_product_policy" "import" { - product_id = azurerm_api_management_product_policy.test.product_id - api_management_name = azurerm_api_management_product_policy.test.api_management_name - resource_group_name = azurerm_api_management_product_policy.test.resource_group_name - xml_link = azurerm_api_management_product_policy.test.xml_link -} -`, template) -} - -func testAccAzureRMApiManagementProductPolicy_updated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = false - published = false -} - -resource "azurerm_api_management_product_policy" "test" { - product_id = azurerm_api_management_product.test.product_id - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - - xml_content = < - - - - - -XML - -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_product_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_product_resource_test.go deleted file mode 100644 index 491d848d51dc..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_product_resource_test.go +++ /dev/null @@ -1,419 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementProduct_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProduct_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "approval_required", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Product"), - resource.TestCheckResourceAttr(data.ResourceName, "product_id", "test-product"), - resource.TestCheckResourceAttr(data.ResourceName, "published", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "terms", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementProduct_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProduct_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementProduct_requiresImport), - }, - }) -} - -func testCheckAzureRMApiManagementProductDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_product" { - continue - } - - productId := rs.Primary.Attributes["product_id"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - resp, err := conn.Get(ctx, resourceGroup, serviceName, productId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func TestAccAzureRMApiManagementProduct_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProduct_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "approval_required", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Product"), - resource.TestCheckResourceAttr(data.ResourceName, "product_id", "test-product"), - resource.TestCheckResourceAttr(data.ResourceName, "published", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "terms", ""), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementProduct_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "approval_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Updated Product"), - resource.TestCheckResourceAttr(data.ResourceName, "product_id", "test-product"), - resource.TestCheckResourceAttr(data.ResourceName, "published", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "terms", ""), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementProduct_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "description", ""), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Product"), - resource.TestCheckResourceAttr(data.ResourceName, "product_id", "test-product"), - resource.TestCheckResourceAttr(data.ResourceName, "published", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "terms", ""), - ), - }, - }, - }) -} - -func TestAccAzureRMApiManagementProduct_subscriptionsLimit(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProduct_subscriptionLimits(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "approval_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "subscriptions_limit", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementProduct_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProduct_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "approval_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "description", "This is an example description"), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", "Test Product"), - resource.TestCheckResourceAttr(data.ResourceName, "product_id", "test-product"), - resource.TestCheckResourceAttr(data.ResourceName, "published", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "subscriptions_limit", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_required", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "terms", "These are some example terms and conditions"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementProduct_approvalRequiredError(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_product", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementProductDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProduct_approvalRequiredError(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementProductExists(data.ResourceName)), - ExpectError: regexp.MustCompile("`subscription_required` must be true and `subscriptions_limit` must be greater than 0 to use `approval_required`"), - }, - }, - }) -} - -func testCheckAzureRMApiManagementProductExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ProductsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - productId := rs.Primary.Attributes["product_id"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, productId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Product %q (API Management Service %q / Resource Group %q) does not exist", productId, serviceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagement.ProductsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementProduct_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = false - published = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementProduct_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementProduct_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_product" "import" { - product_id = azurerm_api_management_product.test.product_id - api_management_name = azurerm_api_management_product.test.api_management_name - resource_group_name = azurerm_api_management_product.test.resource_group_name - display_name = azurerm_api_management_product.test.display_name - subscription_required = azurerm_api_management_product.test.subscription_required - approval_required = azurerm_api_management_product.test.approval_required - published = azurerm_api_management_product.test.published -} -`, template) -} - -func testAccAzureRMApiManagementProduct_updated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Updated Product" - subscription_required = true - approval_required = true - subscriptions_limit = 1 - published = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementProduct_subscriptionLimits(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = true - approval_required = true - subscriptions_limit = 2 - published = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementProduct_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = true - approval_required = true - published = true - subscriptions_limit = 2 - description = "This is an example description" - terms = "These are some example terms and conditions" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagementProduct_approvalRequiredError(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - approval_required = true - subscription_required = false - published = true - description = "This is an example description" - terms = "These are some example terms and conditions" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_property_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_property_resource_test.go deleted file mode 100644 index 36becd0fd4d9..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_property_resource_test.go +++ /dev/null @@ -1,190 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementProperty_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_property", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementPropertyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProperty_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementPropertyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("TestProperty%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "value", "Test Value"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.0", "tag1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.1", "tag2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementProperty_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_property", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementPropertyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementProperty_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementPropertyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("TestProperty%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "value", "Test Value"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.0", "tag1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.1", "tag2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagementProperty_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementPropertyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("TestProperty2%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "secret", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.0", "tag3"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.1", "tag4"), - ), - }, - data.ImportStep("value"), - }, - }) -} - -func testCheckAzureRMAPIManagementPropertyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.NamedValueClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_property" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - return nil -} - -func testCheckAzureRMAPIManagementPropertyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.NamedValueClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: API Management Property %q (Resource Group %q / API Management Service %q) does not exist", name, resourceGroup, serviceName) - } - return fmt.Errorf("Bad: Get on apiManagement.NamedValueClient: %+v", err) - } - - return nil - } -} - -/* - - */ - -func testAccAzureRMApiManagementProperty_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_property" "test" { - name = "acctestAMProperty-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - display_name = "TestProperty%d" - value = "Test Value" - tags = ["tag1", "tag2"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementProperty_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_property" "test" { - name = "acctestAMProperty-%d" - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - display_name = "TestProperty2%d" - value = "Test Value2" - secret = true - tags = ["tag3", "tag4"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_resource_test.go deleted file mode 100644 index 976e06b4097a..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_resource_test.go +++ /dev/null @@ -1,1326 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagement_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagement_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagement_customProps(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_customProps(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocols.0.enable_http2", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - "certificate", // not returned from API, sensitive - "hostname_configuration.0.portal.0.certificate", // not returned from API, sensitive - "hostname_configuration.0.portal.0.certificate_password", // not returned from API, sensitive - "hostname_configuration.0.developer_portal.0.certificate", // not returned from API, sensitive - "hostname_configuration.0.developer_portal.0.certificate_password", // not returned from API, sensitive - "hostname_configuration.0.proxy.0.certificate", // not returned from API, sensitive - "hostname_configuration.0.proxy.0.certificate_password", // not returned from API, sensitive - "hostname_configuration.0.proxy.1.certificate", // not returned from API, sensitive - "hostname_configuration.0.proxy.1.certificate_password", // not returned from API, sensitive - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagement_signInSignUpSettings(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_signInSignUpSettings(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_policy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_policyXmlContent(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_policyXmlLink(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - "policy.0.xml_link", - }, - }, - { - Config: testAccAzureRMApiManagement_policyRemoved(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_virtualNetworkInternal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_virtualNetworkInternal(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "virtual_network_type", "Internal"), - resource.TestCheckResourceAttrSet(data.ResourceName, "private_ip_addresses.#"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_virtualNetworkInternalAdditionalLocation(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_virtualNetworkInternalAdditionalLocation(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "virtual_network_type", "Internal"), - resource.TestCheckResourceAttrSet(data.ResourceName, "private_ip_addresses.#"), - resource.TestCheckResourceAttrSet(data.ResourceName, "additional_location.0.private_ip_addresses.#"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionedKeyVaultId(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionedKeyVaultId(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionlessKeyVaultId(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionlessKeyVaultId(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMApiManagementDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ServiceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - resp, err := conn.Get(ctx, resourceGroup, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMApiManagementExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.ServiceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - apiMangementName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Api Management: %s", apiMangementName) - } - - resp, err := conn.Get(ctx, resourceGroup, apiMangementName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Api Management %q (resource group: %q) does not exist", apiMangementName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagementClient: %+v", err) - } - - return nil - } -} - -func TestAccAzureRMApiManagement_identityUserAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identityUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identityNoneUpdateUserAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identityNone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identityUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identityUserAssignedUpdateNone(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identityUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identityNone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUpdateNone(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identityNone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identityNoneUpdateSystemAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identityNone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUserAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUserAssignedUpdateNone(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identityNone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identityNoneUpdateSystemAssignedUserAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identityNone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUserAssignedUpdateSystemAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagement_identitySystemAssignedUserAssignedUpdateUserAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagement_identitySystemAssignedUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMApiManagement_identityUserAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMApiManagement_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_policyXmlContent(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - policy { - xml_content = < - - - - -XML - - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_policyXmlLink(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - policy { - xml_link = "https://gist.githubusercontent.com/tombuildsstuff/4f58581599d2c9f64b236f505a361a67/raw/0d29dcb0167af1e5afe4bd52a6d7f69ba1e05e1f/example.xml" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_policyRemoved(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - policy = [] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagement_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management" "import" { - name = azurerm_api_management.test.name - location = azurerm_api_management.test.location - resource_group_name = azurerm_api_management.test.resource_group_name - publisher_name = azurerm_api_management.test.publisher_name - publisher_email = azurerm_api_management.test.publisher_email - - sku_name = "Developer_1" -} -`, template) -} - -func testAccAzureRMApiManagement_customProps(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - security { - enable_frontend_tls10 = true - enable_triple_des_ciphers = true - } -} -`, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_signInSignUpSettings(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - sign_in { - enabled = true - } - - sign_up { - enabled = true - - terms_of_service { - enabled = true - consent_required = false - text = "Lorem Ipsum Dolor Morty" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test1" { - name = "acctestRG-api1-%d" - location = "%s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG-api2-%d" - location = "%s" -} - -resource "azurerm_resource_group" "test3" { - name = "acctestRG-api3-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - publisher_name = "pub1" - publisher_email = "pub1@email.com" - notification_sender_email = "notification@email.com" - - additional_location { - location = azurerm_resource_group.test2.location - } - - additional_location { - location = azurerm_resource_group.test3.location - } - - certificate { - encoded_certificate = filebase64("testdata/api_management_api_test.pfx") - certificate_password = "terraform" - store_name = "CertificateAuthority" - } - - certificate { - encoded_certificate = filebase64("testdata/api_management_api_test.pfx") - certificate_password = "terraform" - store_name = "Root" - } - - protocols { - enable_http2 = true - } - - security { - enable_backend_tls11 = true - enable_backend_ssl30 = true - enable_backend_tls10 = true - enable_frontend_ssl30 = true - enable_frontend_tls10 = true - enable_frontend_tls11 = true - enable_triple_des_ciphers = true - } - - hostname_configuration { - proxy { - host_name = "api.terraform.io" - certificate = filebase64("testdata/api_management_api_test.pfx") - certificate_password = "terraform" - default_ssl_binding = true - negotiate_client_certificate = false - } - - proxy { - host_name = "api2.terraform.io" - certificate = filebase64("testdata/api_management_api2_test.pfx") - certificate_password = "terraform" - negotiate_client_certificate = true - } - - portal { - host_name = "portal.terraform.io" - certificate = filebase64("testdata/api_management_portal_test.pfx") - certificate_password = "terraform" - } - - #developer_portal { - # host_name = "developer-portal.terraform.io" - # certificate = filebase64("testdata/api_management_developer_portal_test.pfx") - # certificate_password = "terraform" - #} - } - - sku_name = "Premium_1" - - tags = { - "Acceptance" = "Test" - } - - location = azurerm_resource_group.test1.location - resource_group_name = azurerm_resource_group.test1.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.Locations.Ternary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_virtualNetworkInternal(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestVNET-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] -} - -resource "azurerm_subnet" "test" { - name = "acctestSNET-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - virtual_network_type = "Internal" - virtual_network_configuration { - subnet_id = azurerm_subnet.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagement_virtualNetworkInternalAdditionalLocation(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test1" { - name = "acctestRG1-%d" - location = "%s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG2-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test1" { - name = "acctestVNET1-%d" - location = azurerm_resource_group.test1.location - resource_group_name = azurerm_resource_group.test1.name - address_space = ["10.0.0.0/16"] -} - -resource "azurerm_subnet" "test1" { - name = "acctestSNET1-%d" - resource_group_name = azurerm_resource_group.test1.name - virtual_network_name = azurerm_virtual_network.test1.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_virtual_network" "test2" { - name = "acctestVNET2-%d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - address_space = ["10.1.0.0/16"] -} - -resource "azurerm_subnet" "test2" { - name = "acctestSNET2-%d" - resource_group_name = azurerm_resource_group.test2.name - virtual_network_name = azurerm_virtual_network.test2.name - address_prefix = "10.1.1.0/24" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test1.location - resource_group_name = azurerm_resource_group.test1.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Premium_1" - - additional_location { - location = azurerm_resource_group.test2.location - virtual_network_configuration { - subnet_id = azurerm_subnet.test2.id - } - } - - virtual_network_type = "Internal" - virtual_network_configuration { - subnet_id = azurerm_subnet.test1.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagement_identityUserAssigned(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_user_assigned_identity" "test" { - name = "acctestUAI-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_api_management" "test" { - depends_on = [azurerm_user_assigned_identity.test] - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - identity { - type = "UserAssigned" - identity_ids = [ - azurerm_user_assigned_identity.test.id, - ] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagement_identitySystemAssigned(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_identitySystemAssignedUserAssigned(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_user_assigned_identity" "test" { - name = "acctestUAI-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" - - identity { - type = "SystemAssigned, UserAssigned" - identity_ids = [ - azurerm_user_assigned_identity.test.id, - ] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagement_identityNone(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionlessKeyVaultId(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} -data "azurerm_client_config" "current" {} -resource "azurerm_key_vault" "test" { - name = "acctestKV-%[4]s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "standard" -} -resource "azurerm_key_vault_access_policy" "test" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - certificate_permissions = [ - "Create", - "Delete", - "Deleteissuers", - "Get", - "Getissuers", - "Import", - "List", - "Listissuers", - "Managecontacts", - "Manageissuers", - "Setissuers", - "Update", - ] - secret_permissions = [ - "Delete", - "Get", - "List", - "Purge", - ] - depends_on = [azurerm_key_vault.test] -} -resource "azurerm_key_vault_access_policy" "test2" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = azurerm_api_management.test.identity[0].tenant_id - object_id = azurerm_api_management.test.identity[0].principal_id - secret_permissions = [ - "Get", - "List", - ] - depends_on = [azurerm_key_vault.test] -} -resource "azurerm_key_vault_certificate" "test" { - depends_on = [azurerm_key_vault_access_policy.test] - name = "acctestKVCert-%[3]d" - key_vault_id = azurerm_key_vault.test.id - certificate_policy { - issuer_parameters { - name = "Self" - } - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - secret_properties { - content_type = "application/x-pkcs12" - } - x509_certificate_properties { - # Server Authentication = 1.3.6.1.5.5.7.3.1 - # Client Authentication = 1.3.6.1.5.5.7.3.2 - extended_key_usage = ["1.3.6.1.5.5.7.3.1"] - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - subject_alternative_names { - dns_names = ["api.terraform.io"] - } - subject = "CN=api.terraform.io" - validity_in_months = 1 - } - } -} -resource "azurerm_api_management" "test" { - name = "acctestAM-%[3]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" - identity { - type = "SystemAssigned" - } - hostname_configuration { - proxy { - host_name = "api.terraform.io" - key_vault_id = "${azurerm_key_vault.test.vault_uri}secrets/${azurerm_key_vault_certificate.test.name}" - default_ssl_binding = true - negotiate_client_certificate = false - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) -} - -func testAccAzureRMApiManagement_identitySystemAssignedUpdateHostnameConfigurationsVersionedKeyVaultId(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} -data "azurerm_client_config" "current" {} -resource "azurerm_key_vault" "test" { - name = "acctestKV-%[4]s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "standard" -} -resource "azurerm_key_vault_access_policy" "test" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - certificate_permissions = [ - "Create", - "Delete", - "Deleteissuers", - "Get", - "Getissuers", - "Import", - "List", - "Listissuers", - "Managecontacts", - "Manageissuers", - "Setissuers", - "Update", - ] - secret_permissions = [ - "Delete", - "Get", - "List", - "Purge", - ] - depends_on = [azurerm_key_vault.test] -} -resource "azurerm_key_vault_access_policy" "test2" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = azurerm_api_management.test.identity[0].tenant_id - object_id = azurerm_api_management.test.identity[0].principal_id - secret_permissions = [ - "Get", - "List", - ] - depends_on = [azurerm_key_vault.test] -} -resource "azurerm_key_vault_certificate" "test" { - depends_on = [azurerm_key_vault_access_policy.test, azurerm_key_vault.test] - name = "acctestKVCert-%[3]d" - key_vault_id = azurerm_key_vault.test.id - certificate_policy { - issuer_parameters { - name = "Self" - } - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - secret_properties { - content_type = "application/x-pkcs12" - } - x509_certificate_properties { - # Server Authentication = 1.3.6.1.5.5.7.3.1 - # Client Authentication = 1.3.6.1.5.5.7.3.2 - extended_key_usage = ["1.3.6.1.5.5.7.3.1"] - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - subject_alternative_names { - dns_names = ["api.terraform.io"] - } - subject = "CN=api.terraform.io" - validity_in_months = 1 - } - } -} -resource "azurerm_api_management" "test" { - name = "acctestAM-%[3]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" - identity { - type = "SystemAssigned" - } - hostname_configuration { - proxy { - host_name = "api.terraform.io" - key_vault_id = azurerm_key_vault_certificate.test.secret_id - default_ssl_binding = true - negotiate_client_certificate = false - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_subscription_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_subscription_resource_test.go deleted file mode 100644 index 818fb9f6ea7e..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_subscription_resource_test.go +++ /dev/null @@ -1,289 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementSubscription_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementSubscription_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "allow_tracing", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "subscription_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementSubscription_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementSubscription_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "subscription_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementSubscription_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementSubscription_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementSubscription_update(data, "submitted", "true"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "submitted"), - resource.TestCheckResourceAttr(data.ResourceName, "allow_tracing", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "subscription_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - ), - }, - { - Config: testAccAzureRMApiManagementSubscription_update(data, "active", "true"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "active"), - ), - }, - { - Config: testAccAzureRMApiManagementSubscription_update(data, "suspended", "true"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "suspended"), - ), - }, - { - Config: testAccAzureRMApiManagementSubscription_update(data, "cancelled", "true"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "cancelled"), - ), - }, - { - Config: testAccAzureRMApiManagementSubscription_update(data, "active", "false"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "allow_tracing", "false"), - ), - }, - }, - }) -} - -func TestAccAzureRMApiManagementSubscription_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAPIManagementSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementSubscription_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMAPIManagementSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "active"), - resource.TestCheckResourceAttr(data.ResourceName, "allow_tracing", "false"), - resource.TestCheckResourceAttrSet(data.ResourceName, "subscription_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMAPIManagementSubscriptionDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.SubscriptionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_subscription" { - continue - } - - subscriptionId := rs.Primary.Attributes["subscription_id"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, subscriptionId) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - } - return nil -} - -func testCheckAzureRMAPIManagementSubscriptionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.SubscriptionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - subscriptionId := rs.Primary.Attributes["subscription_id"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["api_management_name"] - - resp, err := client.Get(ctx, resourceGroup, serviceName, subscriptionId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Subscription %q (API Management Service %q / Resource Group %q) does not exist", subscriptionId, serviceName, resourceGroup) - } - return fmt.Errorf("Bad: Get on apiManagement.SubscriptionsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementSubscription_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementSubscription_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_subscription" "test" { - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - user_id = azurerm_api_management_user.test.id - product_id = azurerm_api_management_product.test.id - display_name = "Butter Parser API Enterprise Edition" -} -`, template) -} - -func testAccAzureRMApiManagementSubscription_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementSubscription_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_subscription" "import" { - subscription_id = azurerm_api_management_subscription.test.subscription_id - resource_group_name = azurerm_api_management_subscription.test.resource_group_name - api_management_name = azurerm_api_management_subscription.test.api_management_name - user_id = azurerm_api_management_subscription.test.user_id - product_id = azurerm_api_management_subscription.test.product_id - display_name = azurerm_api_management_subscription.test.display_name -} -`, template) -} - -func testAccAzureRMApiManagementSubscription_update(data acceptance.TestData, state string, allow_tracing string) string { - template := testAccAzureRMApiManagementSubscription_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_subscription" "test" { - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - user_id = azurerm_api_management_user.test.id - product_id = azurerm_api_management_product.test.id - display_name = "Butter Parser API Enterprise Edition" - state = "%s" - allow_tracing = "%s" -} -`, template, state, allow_tracing) -} - -func testAccAzureRMApiManagementSubscription_complete(data acceptance.TestData) string { - template := testAccAzureRMApiManagementSubscription_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_subscription" "test" { - resource_group_name = azurerm_api_management.test.resource_group_name - api_management_name = azurerm_api_management.test.name - user_id = azurerm_api_management_user.test.id - product_id = azurerm_api_management_product.test.id - display_name = "Butter Parser API Enterprise Edition" - state = "active" - allow_tracing = "false" -} -`, template) -} - -func testAccAzureRMApiManagementSubscription_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} - -resource "azurerm_api_management_product" "test" { - product_id = "test-product" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - display_name = "Test Product" - subscription_required = true - approval_required = false - published = true -} - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_user_data_source_test.go b/azurerm/internal/services/apimanagement/tests/api_management_user_data_source_test.go deleted file mode 100644 index 3432d3b70f63..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_user_data_source_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMApiManagementUser_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceApiManagementUser_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "user_id", "test-user"), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test"), - resource.TestCheckResourceAttr(data.ResourceName, "email", fmt.Sprintf("azure-acctest%d@example.com", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "state", "active"), - resource.TestCheckResourceAttr(data.ResourceName, "note", "Used for testing in dimension C-137."), - ), - }, - }, - }) -} - -func testAccDataSourceApiManagementUser_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "amtestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - publisher_name = "pub1" - publisher_email = "pub1@email.com" - sku_name = "Developer_1" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_api_management_user" "test" { - user_id = "test-user" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" - state = "active" - note = "Used for testing in dimension C-137." -} - -data "azurerm_api_management_user" "test" { - user_id = azurerm_api_management_user.test.user_id - api_management_name = azurerm_api_management_user.test.api_management_name - resource_group_name = azurerm_api_management_user.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/api_management_user_resource_test.go b/azurerm/internal/services/apimanagement/tests/api_management_user_resource_test.go deleted file mode 100644 index bcc4d68a38fe..000000000000 --- a/azurerm/internal/services/apimanagement/tests/api_management_user_resource_test.go +++ /dev/null @@ -1,419 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMApiManagementUser_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApiManagementUser_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApiManagementUser_requiresImport), - }, - }) -} - -func TestAccAzureRMApiManagementUser_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test"), - resource.TestCheckResourceAttr(data.ResourceName, "state", "active"), - ), - }, - { - Config: testAccAzureRMApiManagementUser_updatedBlocked(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance Updated"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test Updated"), - resource.TestCheckResourceAttr(data.ResourceName, "state", "blocked"), - ), - }, - { - Config: testAccAzureRMApiManagementUser_updatedActive(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test"), - resource.TestCheckResourceAttr(data.ResourceName, "state", "active"), - ), - }, - }, - }) -} - -func TestAccAzureRMApiManagementUser_password(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_password(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test"), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - }, - }) -} - -func TestAccAzureRMApiManagementUser_invite(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_invited(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned - "confirmation", - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagementUser_signup(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_signUp(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned - "confirmation", - }, - }, - }, - }) -} - -func TestAccAzureRMApiManagementUser_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_api_management_user", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApiManagementUserDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApiManagementUser_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApiManagementUserExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "first_name", "Acceptance"), - resource.TestCheckResourceAttr(data.ResourceName, "last_name", "Test"), - resource.TestCheckResourceAttr(data.ResourceName, "note", "Used for testing in dimension C-137."), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - // not returned - "confirmation", - }, - }, - }, - }) -} - -func testCheckAzureRMApiManagementUserDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.UsersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_api_management_user" { - continue - } - - userId := rs.Primary.Attributes["user_id"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - resp, err := conn.Get(ctx, resourceGroup, serviceName, userId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMApiManagementUserExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).ApiManagement.UsersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - userId := rs.Primary.Attributes["user_id"] - serviceName := rs.Primary.Attributes["api_management_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, serviceName, userId) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: User %q (API Management Service %q / Resource Group %q) does not exist", userId, serviceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on apiManagement.UsersClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMApiManagementUser_basic(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "import" { - user_id = azurerm_api_management_user.test.user_id - api_management_name = azurerm_api_management_user.test.api_management_name - resource_group_name = azurerm_api_management_user.test.resource_group_name - first_name = azurerm_api_management_user.test.first_name - last_name = azurerm_api_management_user.test.last_name - email = azurerm_api_management_user.test.email - state = azurerm_api_management_user.test.state -} -`, template) -} - -func testAccAzureRMApiManagementUser_password(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" - state = "active" - password = "3991bb15-282d-4b9b-9de3-3d5fc89eb530" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_updatedActive(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" - state = "active" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_updatedBlocked(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance Updated" - last_name = "Test Updated" - email = "azure-acctest%d@example.com" - state = "blocked" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_invited(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test User" - email = "azure-acctest%d@example.com" - state = "blocked" - confirmation = "invite" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_signUp(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test User" - email = "azure-acctest%d@example.com" - state = "blocked" - confirmation = "signup" -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_complete(data acceptance.TestData) string { - template := testAccAzureRMApiManagementUser_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_api_management_user" "test" { - user_id = "acctestuser%d" - api_management_name = azurerm_api_management.test.name - resource_group_name = azurerm_resource_group.test.name - first_name = "Acceptance" - last_name = "Test" - email = "azure-acctest%d@example.com" - state = "active" - confirmation = "signup" - note = "Used for testing in dimension C-137." -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApiManagementUser_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_api_management" "test" { - name = "acctestAM-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - publisher_name = "pub1" - publisher_email = "pub1@email.com" - - sku_name = "Developer_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/apimanagement/tests/testdata/api_management_developer_portal_test.pfx b/azurerm/internal/services/apimanagement/tests/testdata/api_management_developer_portal_test.pfx deleted file mode 100644 index cede6279330b..000000000000 Binary files a/azurerm/internal/services/apimanagement/tests/testdata/api_management_developer_portal_test.pfx and /dev/null differ diff --git a/azurerm/internal/services/apimanagement/validate/api_diagnostic_id.go b/azurerm/internal/services/apimanagement/validate/api_diagnostic_id.go new file mode 100644 index 000000000000..e310e0d53791 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_diagnostic_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func ApiDiagnosticID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApiDiagnosticID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/api_diagnostic_id_test.go b/azurerm/internal/services/apimanagement/validate/api_diagnostic_id_test.go new file mode 100644 index 000000000000..882fa530daa8 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_diagnostic_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApiDiagnosticID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // missing ApiName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Valid: false, + }, + + { + // missing value for ApiName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/", + Valid: false, + }, + + { + // missing DiagnosticName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/", + Valid: false, + }, + + { + // missing value for DiagnosticName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apis/api1/diagnostics/diagnostic1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/APIS/API1/DIAGNOSTICS/DIAGNOSTIC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApiDiagnosticID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/validate/api_management.go b/azurerm/internal/services/apimanagement/validate/api_management.go new file mode 100644 index 000000000000..3b64b0268bd9 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_management.go @@ -0,0 +1,15 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func ApimSkuName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^Consumption_0$|^Basic_(1|2)$|^Developer_1$|^Premium_([1-9]|10)$|^Standard_[1-4]$`), + `This is not a valid Api Management sku name.`, + ) +} diff --git a/azurerm/internal/services/apimanagement/validate/api_management_id.go b/azurerm/internal/services/apimanagement/validate/api_management_id.go new file mode 100644 index 000000000000..99d8b7c03f23 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_management_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func ApiManagementID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApiManagementID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/api_management_id_test.go b/azurerm/internal/services/apimanagement/validate/api_management_id_test.go new file mode 100644 index 000000000000..9bd4414a6d9d --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_management_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApiManagementID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApiManagementID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/validate/api_management_test.go b/azurerm/internal/services/apimanagement/validate/api_management_test.go new file mode 100644 index 000000000000..6a49b591d1f8 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_management_test.go @@ -0,0 +1,77 @@ +package validate + +import "testing" + +func TestApimSkuName(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "Consumption_0", + input: "Consumption_0", + valid: true, + }, + { + name: "Consumption_1", + input: "Consumption_1", + valid: false, + }, + { + name: "Basic_3", + input: "Basic_3", + valid: false, + }, + { + name: "Basic_1", + input: "Basic_1", + valid: true, + }, + { + name: "Developer_1", + input: "Developer_1", + valid: true, + }, + { + name: "Premium_0", + input: "Premium_0", + valid: false, + }, + { + name: "Premium_11", + input: "Premium_11", + valid: false, + }, + { + name: "Premium_7", + input: "Premium_7", + valid: true, + }, + { + name: "Standard_7", + input: "Standard_7", + valid: false, + }, + { + name: "standard_2", + input: "standard_2", + valid: false, + }, + { + name: "PREMIUM_7", + input: "PREMIUM_7", + valid: false, + }, + } + var validationFunction = ApimSkuName() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/services/apimanagement/validate/api_version_set_id.go b/azurerm/internal/services/apimanagement/validate/api_version_set_id.go new file mode 100644 index 000000000000..788464371f31 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_version_set_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func ApiVersionSetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApiVersionSetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/api_version_set_id_test.go b/azurerm/internal/services/apimanagement/validate/api_version_set_id_test.go new file mode 100644 index 000000000000..2805b2bc04cb --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/api_version_set_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApiVersionSetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/apiVersionSets/apiVersionSet1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/APIVERSIONSETS/APIVERSIONSET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApiVersionSetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/validate/apimanagement.go b/azurerm/internal/services/apimanagement/validate/apimanagement.go deleted file mode 100644 index 13ffe7e59543..000000000000 --- a/azurerm/internal/services/apimanagement/validate/apimanagement.go +++ /dev/null @@ -1,21 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" -) - -func ApiManagementLoggerID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.ApiManagementLoggerID(v); err != nil { - errors = append(errors, fmt.Errorf("can not parse %q as a Api Management Logger id: %v", k, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/apimanagement/validate/custom_domain_id.go b/azurerm/internal/services/apimanagement/validate/custom_domain_id.go new file mode 100644 index 000000000000..8c6e190a3d94 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/custom_domain_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func CustomDomainID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CustomDomainID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/custom_domain_id_test.go b/azurerm/internal/services/apimanagement/validate/custom_domain_id_test.go new file mode 100644 index 000000000000..331c57c78fe1 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/custom_domain_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCustomDomainID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/customDomains/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/customDomains/customdomain", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/CUSTOMDOMAINS/CUSTOMDOMAIN", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CustomDomainID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/validate/diagnostic_id.go b/azurerm/internal/services/apimanagement/validate/diagnostic_id.go new file mode 100644 index 000000000000..165a5e0ca152 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/diagnostic_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func DiagnosticID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DiagnosticID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/diagnostic_id_test.go b/azurerm/internal/services/apimanagement/validate/diagnostic_id_test.go new file mode 100644 index 000000000000..c78876202886 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/diagnostic_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDiagnosticID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/diagnostics/diagnostic1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/DIAGNOSTICS/DIAGNOSTIC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DiagnosticID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/validate/logger_id.go b/azurerm/internal/services/apimanagement/validate/logger_id.go new file mode 100644 index 000000000000..1d925da466be --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/logger_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func LoggerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoggerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/logger_id_test.go b/azurerm/internal/services/apimanagement/validate/logger_id_test.go new file mode 100644 index 000000000000..30043ca9bc73 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/logger_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoggerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/loggers/logger1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/LOGGERS/LOGGER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoggerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/apimanagement/validate/policy_id.go b/azurerm/internal/services/apimanagement/validate/policy_id.go new file mode 100644 index 000000000000..98d33b659f54 --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/apimanagement/parse" +) + +func PolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/apimanagement/validate/policy_id_test.go b/azurerm/internal/services/apimanagement/validate/policy_id_test.go new file mode 100644 index 000000000000..d137418ff7bb --- /dev/null +++ b/azurerm/internal/services/apimanagement/validate/policy_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/policies/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ApiManagement/service/service1/policies/policy1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.APIMANAGEMENT/SERVICE/SERVICE1/POLICIES/POLICY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/appconfiguration/app_configuration_data_source.go b/azurerm/internal/services/appconfiguration/app_configuration_data_source.go index c4dd39a1c719..c8d598142bce 100644 --- a/azurerm/internal/services/appconfiguration/app_configuration_data_source.go +++ b/azurerm/internal/services/appconfiguration/app_configuration_data_source.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" @@ -25,7 +26,7 @@ func dataSourceAppConfiguration() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.AppConfigurationName, + ValidateFunc: validate.ConfigurationStoreName, }, "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), @@ -145,6 +146,7 @@ func dataSourceAppConfiguration() *schema.Resource { func dataSourceAppConfigurationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppConfiguration.AppConfigurationsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -165,7 +167,7 @@ func dataSourceAppConfigurationRead(d *schema.ResourceData, meta interface{}) er return fmt.Errorf("Failed to receive access keys for App Configuration %q (Resource Group %q): %+v", name, resourceGroup, err) } - d.SetId(*resp.ID) + d.SetId(parse.NewConfigurationStoreID(subscriptionId, resourceGroup, name).ID()) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) diff --git a/azurerm/internal/services/appconfiguration/app_configuration_data_source_test.go b/azurerm/internal/services/appconfiguration/app_configuration_data_source_test.go new file mode 100644 index 000000000000..f4535e1a60fa --- /dev/null +++ b/azurerm/internal/services/appconfiguration/app_configuration_data_source_test.go @@ -0,0 +1,56 @@ +package appconfiguration_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type AppConfigurationDataSource struct { +} + +func TestAccAppConfigurationDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_app_configuration", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: AppConfigurationResource{}.standard(data), + }, + { + Config: AppConfigurationDataSource{}.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(AppConfigurationResource{}), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("sku").Exists(), + check.That(data.ResourceName).Key("primary_read_key.0.connection_string").Exists(), + check.That(data.ResourceName).Key("primary_read_key.0.id").Exists(), + check.That(data.ResourceName).Key("primary_read_key.0.secret").Exists(), + check.That(data.ResourceName).Key("primary_write_key.0.connection_string").Exists(), + check.That(data.ResourceName).Key("primary_write_key.0.id").Exists(), + check.That(data.ResourceName).Key("primary_write_key.0.secret").Exists(), + check.That(data.ResourceName).Key("secondary_read_key.0.connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_read_key.0.id").Exists(), + check.That(data.ResourceName).Key("secondary_read_key.0.secret").Exists(), + check.That(data.ResourceName).Key("secondary_write_key.0.connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_write_key.0.id").Exists(), + check.That(data.ResourceName).Key("secondary_write_key.0.secret").Exists(), + ), + }, + }) +} + +func (AppConfigurationDataSource) basic(data acceptance.TestData) string { + template := AppConfigurationResource{}.standard(data) + return fmt.Sprintf(` +%s + +data "azurerm_app_configuration" "test" { + name = azurerm_app_configuration.test.name + resource_group_name = azurerm_app_configuration.test.resource_group_name +} +`, template) +} diff --git a/azurerm/internal/services/appconfiguration/app_configuration_resource.go b/azurerm/internal/services/appconfiguration/app_configuration_resource.go index e8d06fca31fd..63091dcfc724 100644 --- a/azurerm/internal/services/appconfiguration/app_configuration_resource.go +++ b/azurerm/internal/services/appconfiguration/app_configuration_resource.go @@ -21,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmAppConfiguration() *schema.Resource { +func resourceAppConfiguration() *schema.Resource { return &schema.Resource{ - Create: resourceArmAppConfigurationCreate, - Read: resourceArmAppConfigurationRead, - Update: resourceArmAppConfigurationUpdate, - Delete: resourceArmAppConfigurationDelete, + Create: resourceAppConfigurationCreate, + Read: resourceAppConfigurationRead, + Update: resourceAppConfigurationUpdate, + Delete: resourceAppConfigurationDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -36,7 +36,7 @@ func resourceArmAppConfiguration() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.AppConfigurationID(id) + _, err := parse.ConfigurationStoreID(id) return err }), @@ -45,7 +45,7 @@ func resourceArmAppConfiguration() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.AppConfigurationName, + ValidateFunc: validate.ConfigurationStoreName, }, "location": azure.SchemaLocation(), @@ -195,8 +195,9 @@ func resourceArmAppConfiguration() *schema.Resource { } } -func resourceArmAppConfigurationCreate(d *schema.ResourceData, meta interface{}) error { +func resourceAppConfigurationCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppConfiguration.AppConfigurationsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -204,16 +205,15 @@ func resourceArmAppConfigurationCreate(d *schema.ResourceData, meta interface{}) name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) - + resourceId := parse.NewConfigurationStoreID(subscriptionId, resourceGroup, name).ID() existing, err := client.Get(ctx, resourceGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { return fmt.Errorf("Error checking for presence of existing App Configuration %q (Resource Group %q): %s", name, resourceGroup, err) } } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_app_configuration", *existing.ID) + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_app_configuration", resourceId) } parameters := appconfiguration.ConfigurationStore{ @@ -235,26 +235,17 @@ func resourceArmAppConfigurationCreate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error waiting for creation of App Configuration %q (Resource Group %q): %+v", name, resourceGroup, err) } - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error retrieving App Configuration %q (Resource Group %q): %+v", name, resourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read App Configuration %s (resource Group %q) ID", name, resourceGroup) - } - - d.SetId(*read.ID) - - return resourceArmAppConfigurationRead(d, meta) + d.SetId(resourceId) + return resourceAppConfigurationRead(d, meta) } -func resourceArmAppConfigurationUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceAppConfigurationUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppConfiguration.AppConfigurationsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for Azure ARM App Configuration update.") - id, err := parse.AppConfigurationID(d.Id()) + id, err := parse.ConfigurationStoreID(d.Id()) if err != nil { return err } @@ -289,15 +280,15 @@ func resourceArmAppConfigurationUpdate(d *schema.ResourceData, meta interface{}) d.SetId(*read.ID) - return resourceArmAppConfigurationRead(d, meta) + return resourceAppConfigurationRead(d, meta) } -func resourceArmAppConfigurationRead(d *schema.ResourceData, meta interface{}) error { +func resourceAppConfigurationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppConfiguration.AppConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AppConfigurationID(d.Id()) + id, err := parse.ConfigurationStoreID(d.Id()) if err != nil { return err } @@ -317,7 +308,7 @@ func resourceArmAppConfigurationRead(d *schema.ResourceData, meta interface{}) e return fmt.Errorf("Failed to receive access keys for App Configuration %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("name", resp.Name) + d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) @@ -346,12 +337,12 @@ func resourceArmAppConfigurationRead(d *schema.ResourceData, meta interface{}) e return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmAppConfigurationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceAppConfigurationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppConfiguration.AppConfigurationsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AppConfigurationID(d.Id()) + id, err := parse.ConfigurationStoreID(d.Id()) if err != nil { return err } @@ -456,6 +447,7 @@ func expandAppConfigurationIdentity(identities []interface{}) *appconfiguration. Type: identityType, } } + func flattenAppConfigurationIdentity(identity *appconfiguration.ResourceIdentity) []interface{} { if identity == nil || identity.Type == appconfiguration.None { return []interface{}{} diff --git a/azurerm/internal/services/appconfiguration/app_configuration_resource_test.go b/azurerm/internal/services/appconfiguration/app_configuration_resource_test.go new file mode 100644 index 000000000000..d4faa2f18a3a --- /dev/null +++ b/azurerm/internal/services/appconfiguration/app_configuration_resource_test.go @@ -0,0 +1,290 @@ +package appconfiguration_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration/parse" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type AppConfigurationResource struct { +} + +func TestAccAppConfiguration_free(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.free(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAppConfiguration_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAppConfiguration_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.free(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAppConfiguration_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAppConfiguration_identity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAppConfiguration_identityUpdated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.identity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("identity.#").HasValue("1"), + check.That(data.ResourceName).Key("identity.0.type").HasValue("SystemAssigned"), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAppConfiguration_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") + r := AppConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.completeUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func (t AppConfigurationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ConfigurationStoreID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.AppConfiguration.AppConfigurationsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving App Configuration %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ConfigurationStoreProperties != nil), nil +} + +func (AppConfigurationResource) free(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appconfig-%d" + location = "%s" +} + +resource "azurerm_app_configuration" "test" { + name = "testacc-appconf%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "free" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AppConfigurationResource) standard(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appconfig-%d" + location = "%s" +} + +resource "azurerm_app_configuration" "test" { + name = "testaccappconf%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r AppConfigurationResource) requiresImport(data acceptance.TestData) string { + template := r.standard(data) + return fmt.Sprintf(` +%s + +resource "azurerm_app_configuration" "import" { + name = azurerm_app_configuration.test.name + resource_group_name = azurerm_app_configuration.test.resource_group_name + location = azurerm_app_configuration.test.location + sku = azurerm_app_configuration.test.sku +} +`, template) +} + +func (AppConfigurationResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appconfig-%d" + location = "%s" +} + +resource "azurerm_app_configuration" "test" { + name = "testaccappconf%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" + + tags = { + environment = "development" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AppConfigurationResource) identity(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appconfig-%d" + location = "%s" +} + +resource "azurerm_app_configuration" "test" { + name = "testaccappconf%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" + + identity { + type = "SystemAssigned" + } + + tags = { + ENVironment = "DEVelopment" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AppConfigurationResource) completeUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appconfig-%d" + location = "%s" +} + +resource "azurerm_app_configuration" "test" { + name = "testaccappconf%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" + + tags = { + Environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/appconfiguration/parse/app_configuration.go b/azurerm/internal/services/appconfiguration/parse/app_configuration.go deleted file mode 100644 index ccfbadc5449c..000000000000 --- a/azurerm/internal/services/appconfiguration/parse/app_configuration.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type AppConfigurationId struct { - ResourceGroup string - Name string -} - -func AppConfigurationID(input string) (*AppConfigurationId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse App Configuration Server ID %q: %+v", input, err) - } - - server := AppConfigurationId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("configurationStores"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/appconfiguration/parse/app_configuration_test.go b/azurerm/internal/services/appconfiguration/parse/app_configuration_test.go deleted file mode 100644 index 911e91ad25c2..000000000000 --- a/azurerm/internal/services/appconfiguration/parse/app_configuration_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestAppConfigurationID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *AppConfigurationId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Stores Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppConfiguration/configurationStores/", - Expected: nil, - }, - { - Name: "App Configuration ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppConfiguration/configurationStores/Store1", - Expected: &AppConfigurationId{ - Name: "Store1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppConfiguration/ConfigurationStores/Store1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := AppConfigurationID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/appconfiguration/parse/configuration_store.go b/azurerm/internal/services/appconfiguration/parse/configuration_store.go new file mode 100644 index 000000000000..bb7e30ae6738 --- /dev/null +++ b/azurerm/internal/services/appconfiguration/parse/configuration_store.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ConfigurationStoreId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewConfigurationStoreID(subscriptionId, resourceGroup, name string) ConfigurationStoreId { + return ConfigurationStoreId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ConfigurationStoreId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Configuration Store", segmentsStr) +} + +func (id ConfigurationStoreId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.AppConfiguration/configurationStores/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ConfigurationStoreID parses a ConfigurationStore ID into an ConfigurationStoreId struct +func ConfigurationStoreID(input string) (*ConfigurationStoreId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ConfigurationStoreId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("configurationStores"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/appconfiguration/parse/configuration_store_test.go b/azurerm/internal/services/appconfiguration/parse/configuration_store_test.go new file mode 100644 index 000000000000..8b783cbda90e --- /dev/null +++ b/azurerm/internal/services/appconfiguration/parse/configuration_store_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ConfigurationStoreId{} + +func TestConfigurationStoreIDFormatter(t *testing.T) { + actual := NewConfigurationStoreID("12345678-1234-9876-4563-123456789012", "group1", "store1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/configurationStores/store1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestConfigurationStoreID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ConfigurationStoreId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/configurationStores/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/configurationStores/store1", + Expected: &ConfigurationStoreId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "store1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.APPCONFIGURATION/CONFIGURATIONSTORES/STORE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ConfigurationStoreID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/appconfiguration/registration.go b/azurerm/internal/services/appconfiguration/registration.go index 2c6d8d31992d..725171049148 100644 --- a/azurerm/internal/services/appconfiguration/registration.go +++ b/azurerm/internal/services/appconfiguration/registration.go @@ -28,6 +28,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_app_configuration": resourceArmAppConfiguration(), + "azurerm_app_configuration": resourceAppConfiguration(), } } diff --git a/azurerm/internal/services/appconfiguration/resourceids.go b/azurerm/internal/services/appconfiguration/resourceids.go new file mode 100644 index 000000000000..b49b87985aba --- /dev/null +++ b/azurerm/internal/services/appconfiguration/resourceids.go @@ -0,0 +1,3 @@ +package appconfiguration + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ConfigurationStore -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/configurationStores/store1 diff --git a/azurerm/internal/services/appconfiguration/tests/app_configuration_data_source_test.go b/azurerm/internal/services/appconfiguration/tests/app_configuration_data_source_test.go deleted file mode 100644 index 5bf1e9d87942..000000000000 --- a/azurerm/internal/services/appconfiguration/tests/app_configuration_data_source_test.go +++ /dev/null @@ -1,57 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAppConfigurationDataSource_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_standard(data), - }, - { - Config: testAppConfigurationDataSource_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "location"), - resource.TestCheckResourceAttrSet(data.ResourceName, "sku"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_read_key.0.connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_read_key.0.id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_read_key.0.secret"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_write_key.0.connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_write_key.0.id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_write_key.0.secret"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_read_key.0.connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_read_key.0.id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_read_key.0.secret"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_write_key.0.connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_write_key.0.id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_write_key.0.secret"), - ), - }, - }, - }) -} - -func testAppConfigurationDataSource_basic(data acceptance.TestData) string { - template := testAppConfigurationResource_standard(data) - return fmt.Sprintf(` -%s - -data "azurerm_app_configuration" "test" { - name = azurerm_app_configuration.test.name - resource_group_name = azurerm_app_configuration.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/appconfiguration/tests/app_configuration_resource_test.go b/azurerm/internal/services/appconfiguration/tests/app_configuration_resource_test.go deleted file mode 100644 index 7d960e64c16f..000000000000 --- a/azurerm/internal/services/appconfiguration/tests/app_configuration_resource_test.go +++ /dev/null @@ -1,356 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration/parse" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAppConfigurationResource_free(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_free(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAppConfigurationResource_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAppConfigurationResource_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_free(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAppConfigurationResource_requiresImport), - }, - }) -} - -func TestAccAppConfigurationResource_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAppConfigurationResource_identity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_identity(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAppConfigurationResource_identityUpdated(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAppConfigurationResource_identity(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "identity.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "identity.0.type", "SystemAssigned"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - { - Config: testAppConfigurationResource_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAppConfigurationResource_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_app_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAppConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAppConfigurationResource_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - { - Config: testAppConfigurationResource_completeUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAppConfigurationExists(data.ResourceName), - ), - }, - }, - }) -} - -func testCheckAppConfigurationDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppConfiguration.AppConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_app_configuration" { - continue - } - - id, err := parse.AppConfigurationID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - - return nil -} - -func testCheckAppConfigurationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppConfiguration.AppConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.AppConfigurationID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on appConfigurationsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: App Configuration %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testAppConfigurationResource_free(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_app_configuration" "test" { - name = "testacc-appconf%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "free" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAppConfigurationResource_standard(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_app_configuration" "test" { - name = "testaccappconf%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "standard" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAppConfigurationResource_requiresImport(data acceptance.TestData) string { - template := testAppConfigurationResource_free(data) - return fmt.Sprintf(` -%s - -resource "azurerm_app_configuration" "import" { - name = azurerm_app_configuration.test.name - resource_group_name = azurerm_app_configuration.test.resource_group_name - location = azurerm_app_configuration.test.location - sku = azurerm_app_configuration.test.sku -} -`, template) -} - -func testAppConfigurationResource_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_app_configuration" "test" { - name = "testaccappconf%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "free" - - tags = { - environment = "development" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAppConfigurationResource_identity(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_app_configuration" "test" { - name = "testaccappconf%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "standard" - - identity { - type = "SystemAssigned" - } - - tags = { - environment = "development" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAppConfigurationResource_completeUpdated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_app_configuration" "test" { - name = "testaccappconf%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "free" - - tags = { - environment = "production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/appconfiguration/validate/configuration_store_id.go b/azurerm/internal/services/appconfiguration/validate/configuration_store_id.go new file mode 100644 index 000000000000..9ae4995e247e --- /dev/null +++ b/azurerm/internal/services/appconfiguration/validate/configuration_store_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appconfiguration/parse" +) + +func ConfigurationStoreID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ConfigurationStoreID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/appconfiguration/validate/configuration_store_id_test.go b/azurerm/internal/services/appconfiguration/validate/configuration_store_id_test.go new file mode 100644 index 000000000000..3e2f5dab2caf --- /dev/null +++ b/azurerm/internal/services/appconfiguration/validate/configuration_store_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestConfigurationStoreID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/configurationStores/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AppConfiguration/configurationStores/store1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.APPCONFIGURATION/CONFIGURATIONSTORES/STORE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ConfigurationStoreID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/appconfiguration/validate/configuration_store_name.go b/azurerm/internal/services/appconfiguration/validate/configuration_store_name.go new file mode 100644 index 000000000000..85104f0e2cb4 --- /dev/null +++ b/azurerm/internal/services/appconfiguration/validate/configuration_store_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ConfigurationStoreName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + if matched := regexp.MustCompile(`^[a-zA-Z0-9-]{5,50}$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and dashes and must be between 5-50 chars", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/appconfiguration/validate/configuration_store_name_test.go b/azurerm/internal/services/appconfiguration/validate/configuration_store_name_test.go new file mode 100644 index 000000000000..4ecaf6b42b70 --- /dev/null +++ b/azurerm/internal/services/appconfiguration/validate/configuration_store_name_test.go @@ -0,0 +1,60 @@ +package validate + +import ( + "testing" +) + +func TestAppConfigurationName(t *testing.T) { + cases := []struct { + Value string + ErrCount int + }{ + { + Value: "four", + ErrCount: 1, + }, + { + Value: "5five", + ErrCount: 0, + }, + { + Value: "hello-world", + ErrCount: 0, + }, + { + Value: "hello_world", + ErrCount: 1, + }, + { + Value: "helloWorld", + ErrCount: 0, + }, + { + Value: "helloworld12", + ErrCount: 0, + }, + { + Value: "hello@world", + ErrCount: 1, + }, + { + Value: "qfvbdsbvipqdbwsbddbdcwqffewsqwcdw21ddwqwd3324120", + ErrCount: 0, + }, + { + Value: "qfvbdsbvipqdbwsbddbdcwqffewsqwcdw21ddwqwd332412020", + ErrCount: 0, + }, + { + Value: "qfvbdsbvipqdbwsbddbdcwqfjjfewsqwcdw21ddwqwd33241201", + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := ConfigurationStoreName(tc.Value, "azurerm_app_configuration") + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Azure App Configuration Name to trigger a validation error: %v", tc) + } + } +} diff --git a/azurerm/internal/services/appconfiguration/validate/name.go b/azurerm/internal/services/appconfiguration/validate/name.go deleted file mode 100644 index 8b3df0f8afcc..000000000000 --- a/azurerm/internal/services/appconfiguration/validate/name.go +++ /dev/null @@ -1,15 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" -) - -func AppConfigurationName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - if matched := regexp.MustCompile(`^[a-zA-Z0-9-]{5,50}$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and dashes and must be between 5-50 chars", k)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/appconfiguration/validate/name_test.go b/azurerm/internal/services/appconfiguration/validate/name_test.go deleted file mode 100644 index 1c40161a2aab..000000000000 --- a/azurerm/internal/services/appconfiguration/validate/name_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestAppConfigurationName(t *testing.T) { - cases := []struct { - Value string - ErrCount int - }{ - { - Value: "four", - ErrCount: 1, - }, - { - Value: "5five", - ErrCount: 0, - }, - { - Value: "hello-world", - ErrCount: 0, - }, - { - Value: "hello_world", - ErrCount: 1, - }, - { - Value: "helloWorld", - ErrCount: 0, - }, - { - Value: "helloworld12", - ErrCount: 0, - }, - { - Value: "hello@world", - ErrCount: 1, - }, - { - Value: "qfvbdsbvipqdbwsbddbdcwqffewsqwcdw21ddwqwd3324120", - ErrCount: 0, - }, - { - Value: "qfvbdsbvipqdbwsbddbdcwqffewsqwcdw21ddwqwd332412020", - ErrCount: 0, - }, - { - Value: "qfvbdsbvipqdbwsbddbdcwqfjjfewsqwcdw21ddwqwd33241201", - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := AppConfigurationName(tc.Value, "azurerm_app_configuration") - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Azure App Configuration Name to trigger a validation error: %v", tc) - } - } -} diff --git a/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource.go b/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource.go index 0c6aaeb2e5f3..6839da892219 100644 --- a/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource.go +++ b/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource.go @@ -12,12 +12,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" ) -func resourceArmApplicationInsightsAnalyticsItem() *schema.Resource { +func resourceApplicationInsightsAnalyticsItem() *schema.Resource { return &schema.Resource{ - Create: resourceArmApplicationInsightsAnalyticsItemCreate, - Read: resourceArmApplicationInsightsAnalyticsItemRead, - Update: resourceArmApplicationInsightsAnalyticsItemUpdate, - Delete: resourceArmApplicationInsightsAnalyticsItemDelete, + Create: resourceApplicationInsightsAnalyticsItemCreate, + Read: resourceApplicationInsightsAnalyticsItemRead, + Update: resourceApplicationInsightsAnalyticsItemUpdate, + Delete: resourceApplicationInsightsAnalyticsItemDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -94,13 +94,15 @@ func resourceArmApplicationInsightsAnalyticsItem() *schema.Resource { } } -func resourceArmApplicationInsightsAnalyticsItemCreate(d *schema.ResourceData, meta interface{}) error { - return resourceArmApplicationInsightsAnalyticsItemCreateUpdate(d, meta, false) +func resourceApplicationInsightsAnalyticsItemCreate(d *schema.ResourceData, meta interface{}) error { + return resourceApplicationInsightsAnalyticsItemCreateUpdate(d, meta, false) } -func resourceArmApplicationInsightsAnalyticsItemUpdate(d *schema.ResourceData, meta interface{}) error { - return resourceArmApplicationInsightsAnalyticsItemCreateUpdate(d, meta, true) + +func resourceApplicationInsightsAnalyticsItemUpdate(d *schema.ResourceData, meta interface{}) error { + return resourceApplicationInsightsAnalyticsItemCreateUpdate(d, meta, true) } -func resourceArmApplicationInsightsAnalyticsItemCreateUpdate(d *schema.ResourceData, meta interface{}, overwrite bool) error { + +func resourceApplicationInsightsAnalyticsItemCreateUpdate(d *schema.ResourceData, meta interface{}, overwrite bool) error { client := meta.(*clients.Client).AppInsights.AnalyticsItemsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -159,10 +161,10 @@ func resourceArmApplicationInsightsAnalyticsItemCreateUpdate(d *schema.ResourceD generatedID := appInsightsID + resourcesArmApplicationInsightsAnalyticsItemGenerateIDSuffix(itemScope, *result.ID) d.SetId(generatedID) - return resourceArmApplicationInsightsAnalyticsItemRead(d, meta) + return resourceApplicationInsightsAnalyticsItemRead(d, meta) } -func resourceArmApplicationInsightsAnalyticsItemRead(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsAnalyticsItemRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.AnalyticsItemsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -196,7 +198,7 @@ func resourceArmApplicationInsightsAnalyticsItemRead(d *schema.ResourceData, met return nil } -func resourceArmApplicationInsightsAnalyticsItemDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsAnalyticsItemDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.AnalyticsItemsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource_test.go b/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource_test.go new file mode 100644 index 000000000000..7db37cb70cde --- /dev/null +++ b/azurerm/internal/services/applicationinsights/application_insights_analytics_item_resource_test.go @@ -0,0 +1,217 @@ +package applicationinsights_test + +import ( + "context" + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type AppInsightsAnalyticsItemResource struct { +} + +func TestAccApplicationInsightsAnalyticsItem_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test") + r := AppInsightsAnalyticsItemResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("testquery"), + check.That(data.ResourceName).Key("scope").HasValue("shared"), + check.That(data.ResourceName).Key("type").HasValue("query"), + check.That(data.ResourceName).Key("content").HasValue("requests #test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsightsAnalyticsItem_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test") + r := AppInsightsAnalyticsItemResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("testquery"), + check.That(data.ResourceName).Key("scope").HasValue("shared"), + check.That(data.ResourceName).Key("type").HasValue("query"), + check.That(data.ResourceName).Key("content").HasValue("requests #test"), + ), + }, + data.ImportStep(), + { + Config: r.basic2(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("testquery"), + check.That(data.ResourceName).Key("scope").HasValue("shared"), + check.That(data.ResourceName).Key("type").HasValue("query"), + check.That(data.ResourceName).Key("content").HasValue("requests #updated"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsightsAnalyticsItem_multiple(t *testing.T) { + r1 := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test1") + r2 := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test2") + r3 := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test3") + r := AppInsightsAnalyticsItemResource{} + + r1.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiple(r1), + Check: resource.ComposeTestCheckFunc( + check.That(r1.ResourceName).ExistsInAzure(r), + check.That(r2.ResourceName).ExistsInAzure(r), + check.That(r3.ResourceName).ExistsInAzure(r), + check.That(r1.ResourceName).Key("name").HasValue("testquery1"), + check.That(r1.ResourceName).Key("scope").HasValue("shared"), + check.That(r1.ResourceName).Key("type").HasValue("query"), + check.That(r1.ResourceName).Key("content").HasValue("requests #test1"), + check.That(r2.ResourceName).Key("name").HasValue("testquery2"), + check.That(r2.ResourceName).Key("scope").HasValue("user"), + check.That(r2.ResourceName).Key("type").HasValue("query"), + check.That(r2.ResourceName).Key("content").HasValue("requests #test2"), + check.That(r3.ResourceName).Key("name").HasValue("testfunction1"), + check.That(r3.ResourceName).Key("scope").HasValue("shared"), + check.That(r3.ResourceName).Key("type").HasValue("function"), + check.That(r3.ResourceName).Key("content").HasValue("requests #test3"), + check.That(r3.ResourceName).Key("function_alias").HasValue("myfunction"), + ), + }, + r1.ImportStep(), + r2.ImportStep(), + r3.ImportStep(), + }) +} + +func (t AppInsightsAnalyticsItemResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + resGroup, appInsightsName, itemScopePath, itemID, err := applicationinsights.ResourcesArmApplicationInsightsAnalyticsItemParseID(state.ID) + if err != nil { + return nil, fmt.Errorf("Failed to parse ID (id: %s): %+v", state.ID, err) + } + + resp, err := clients.AppInsights.AnalyticsItemsClient.Get(ctx, resGroup, appInsightsName, itemScopePath, itemID, "") + if err != nil { + return nil, fmt.Errorf("retrieving Application Insights AnalyticsItem %q (resource group: %q, app insight: %s, item scope: %s): %+v", resGroup, appInsightsName, itemScopePath, itemID, err) + } + + return utils.Bool(resp.StatusCode != http.StatusNotFound), nil +} + +func (AppInsightsAnalyticsItemResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_application_insights_analytics_item" "test" { + name = "testquery" + application_insights_id = azurerm_application_insights.test.id + content = "requests #test" + scope = "shared" + type = "query" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AppInsightsAnalyticsItemResource) basic2(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_application_insights_analytics_item" "test" { + name = "testquery" + application_insights_id = azurerm_application_insights.test.id + content = "requests #updated" + scope = "shared" + type = "query" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AppInsightsAnalyticsItemResource) multiple(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_application_insights_analytics_item" "test1" { + name = "testquery1" + application_insights_id = azurerm_application_insights.test.id + content = "requests #test1" + scope = "shared" + type = "query" +} + +resource "azurerm_application_insights_analytics_item" "test2" { + name = "testquery2" + application_insights_id = azurerm_application_insights.test.id + content = "requests #test2" + scope = "user" + type = "query" +} + +resource "azurerm_application_insights_analytics_item" "test3" { + name = "testfunction1" + application_insights_id = azurerm_application_insights.test.id + content = "requests #test3" + scope = "shared" + type = "function" + function_alias = "myfunction" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/applicationinsights/application_insights_api_key_resource.go b/azurerm/internal/services/applicationinsights/application_insights_api_key_resource.go index 4291599759cb..0b07d6148ab0 100644 --- a/azurerm/internal/services/applicationinsights/application_insights_api_key_resource.go +++ b/azurerm/internal/services/applicationinsights/application_insights_api_key_resource.go @@ -15,11 +15,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApplicationInsightsAPIKey() *schema.Resource { +func resourceApplicationInsightsAPIKey() *schema.Resource { return &schema.Resource{ - Create: resourceArmApplicationInsightsAPIKeyCreate, - Read: resourceArmApplicationInsightsAPIKeyRead, - Delete: resourceArmApplicationInsightsAPIKeyDelete, + Create: resourceApplicationInsightsAPIKeyCreate, + Read: resourceApplicationInsightsAPIKeyRead, + Delete: resourceApplicationInsightsAPIKeyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -77,7 +77,7 @@ func resourceArmApplicationInsightsAPIKey() *schema.Resource { } } -func resourceArmApplicationInsightsAPIKeyCreate(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsAPIKeyCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.APIKeysClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -149,10 +149,10 @@ func resourceArmApplicationInsightsAPIKeyCreate(d *schema.ResourceData, meta int // API key can only retrieved at key creation d.Set("api_key", result.APIKey) - return resourceArmApplicationInsightsAPIKeyRead(d, meta) + return resourceApplicationInsightsAPIKeyRead(d, meta) } -func resourceArmApplicationInsightsAPIKeyRead(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsAPIKeyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.APIKeysClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -193,7 +193,7 @@ func resourceArmApplicationInsightsAPIKeyRead(d *schema.ResourceData, meta inter return nil } -func resourceArmApplicationInsightsAPIKeyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsAPIKeyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.APIKeysClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/applicationinsights/application_insights_api_key_resource_test.go b/azurerm/internal/services/applicationinsights/application_insights_api_key_resource_test.go new file mode 100644 index 000000000000..cfd09096c74a --- /dev/null +++ b/azurerm/internal/services/applicationinsights/application_insights_api_key_resource_test.go @@ -0,0 +1,178 @@ +package applicationinsights_test + +import ( + "context" + "fmt" + "net/http" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type AppInsightsAPIKey struct { +} + +func TestAccApplicationInsightsAPIKey_no_permission(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") + r := AppInsightsAPIKey{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "[]", "[]"), + ExpectError: regexp.MustCompile("The API Key needs to have a Role"), + }, + }) +} + +func TestAccApplicationInsightsAPIKey_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") + r := AppInsightsAPIKey{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "[]", `["annotations"]`), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_permissions.#").HasValue("0"), + check.That(data.ResourceName).Key("write_permissions.#").HasValue("1"), + ), + }, + { + Config: r.requiresImport(data, "[]", `["annotations"]`), + ExpectError: acceptance.RequiresImportError("azurerm_application_insights_api_key"), + }, + }) +} + +func TestAccApplicationInsightsAPIKey_read_telemetry_permissions(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") + r := AppInsightsAPIKey{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, `["aggregate", "api", "draft", "extendqueries", "search"]`, "[]"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_permissions.#").HasValue("5"), + check.That(data.ResourceName).Key("write_permissions.#").HasValue("0"), + ), + }, + data.ImportStep("api_key"), + }) +} + +func TestAccApplicationInsightsAPIKey_write_annotations_permission(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") + r := AppInsightsAPIKey{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "[]", `["annotations"]`), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_permissions.#").HasValue("0"), + check.That(data.ResourceName).Key("write_permissions.#").HasValue("1"), + ), + }, + data.ImportStep("api_key"), + }) +} + +func TestAccApplicationInsightsAPIKey_authenticate_permission(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") + r := AppInsightsAPIKey{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, `["agentconfig"]`, "[]"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_permissions.#").HasValue("1"), + check.That(data.ResourceName).Key("write_permissions.#").HasValue("0"), + ), + }, + data.ImportStep("api_key"), + }) +} + +func TestAccApplicationInsightsAPIKey_full_permissions(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") + r := AppInsightsAPIKey{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, `["agentconfig", "aggregate", "api", "draft", "extendqueries", "search"]`, `["annotations"]`), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_permissions.#").HasValue("6"), + check.That(data.ResourceName).Key("write_permissions.#").HasValue("1"), + ), + }, + data.ImportStep("api_key"), + }) +} + +func (t AppInsightsAPIKey) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.Attributes["id"]) + if err != nil { + return nil, err + } + keyID := id.Path["APIKeys"] + resGroup := id.ResourceGroup + appInsightsName := id.Path["components"] + + resp, err := clients.AppInsights.APIKeysClient.Get(ctx, resGroup, appInsightsName, keyID) + if err != nil { + return nil, fmt.Errorf("retrieving Application Insights API Key '%q' (resource group: '%q') does not exist", keyID, resGroup) + } + + return utils.Bool(resp.StatusCode != http.StatusNotFound), nil +} + +func (AppInsightsAPIKey) basic(data acceptance.TestData, readPerms, writePerms string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_application_insights_api_key" "test" { + name = "acctestappinsightsapikey-%d" + application_insights_id = azurerm_application_insights.test.id + read_permissions = %s + write_permissions = %s +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, readPerms, writePerms) +} + +func (AppInsightsAPIKey) requiresImport(data acceptance.TestData, readPerms, writePerms string) string { + template := AppInsightsAPIKey{}.basic(data, readPerms, writePerms) + return fmt.Sprintf(` +%s + +resource "azurerm_application_insights_api_key" "import" { + name = azurerm_application_insights_api_key.test.name + application_insights_id = azurerm_application_insights_api_key.test.application_insights_id + read_permissions = azurerm_application_insights_api_key.test.read_permissions + write_permissions = azurerm_application_insights_api_key.test.write_permissions +} +`, template) +} diff --git a/azurerm/internal/services/applicationinsights/application_insights_data_source.go b/azurerm/internal/services/applicationinsights/application_insights_data_source.go index 363afb4347e3..ef808e9776b1 100644 --- a/azurerm/internal/services/applicationinsights/application_insights_data_source.go +++ b/azurerm/internal/services/applicationinsights/application_insights_data_source.go @@ -8,12 +8,14 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmApplicationInsights() *schema.Resource { +func dataSourceApplicationInsights() *schema.Resource { return &schema.Resource{ Read: dataSourceArmApplicationInsightsRead, @@ -74,6 +76,7 @@ func dataSourceArmApplicationInsights() *schema.Resource { func dataSourceArmApplicationInsightsRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.ComponentsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -83,20 +86,24 @@ func dataSourceArmApplicationInsightsRead(d *schema.ResourceData, meta interface resp, err := client.Get(ctx, resGroup, name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error: Application Insights bucket %q (Resource Group %q) was not found", name, resGroup) + return fmt.Errorf("Application Insights %q (Resource Group %q) was not found", name, resGroup) } - return fmt.Errorf("Error making Read request on Application Insights bucket %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("retrieving Application Insights %q (Resource Group %q): %+v", name, resGroup, err) } - d.SetId(*resp.ID) - d.Set("instrumentation_key", resp.InstrumentationKey) - d.Set("connection_string", resp.ConnectionString) - d.Set("location", resp.Location) - d.Set("app_id", resp.AppID) - d.Set("application_type", resp.ApplicationType) - if v := resp.RetentionInDays; v != nil { - d.Set("retention_in_days", v) + d.SetId(parse.NewComponentID(subscriptionId, resGroup, name).ID()) + d.Set("location", location.NormalizeNilable(resp.Location)) + if props := resp.ApplicationInsightsComponentProperties; props != nil { + d.Set("app_id", props.AppID) + d.Set("application_type", props.ApplicationType) + d.Set("connection_string", props.ConnectionString) + d.Set("instrumentation_key", props.InstrumentationKey) + retentionInDays := 0 + if props.RetentionInDays != nil { + retentionInDays = int(*props.RetentionInDays) + } + d.Set("retention_in_days", retentionInDays) } return tags.FlattenAndSet(d, resp.Tags) } diff --git a/azurerm/internal/services/applicationinsights/application_insights_data_source_test.go b/azurerm/internal/services/applicationinsights/application_insights_data_source_test.go new file mode 100644 index 000000000000..58765f04228f --- /dev/null +++ b/azurerm/internal/services/applicationinsights/application_insights_data_source_test.go @@ -0,0 +1,61 @@ +package applicationinsights_test + +import ( + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +type AppInsightsDataSource struct { +} + +func TestAccApplicationInsightsDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_application_insights", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: AppInsightsDataSource{}.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("instrumentation_key").Exists(), + check.That(data.ResourceName).Key("app_id").Exists(), + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("application_type").HasValue("other"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.foo").HasValue("bar"), + ), + }, + }) +} + +func (AppInsightsDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%[1]d" + location = "%[2]s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "other" + + tags = { + "foo" = "bar" + } +} + +data "azurerm_application_insights" "test" { + resource_group_name = azurerm_resource_group.test.name + name = azurerm_application_insights.test.name +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/applicationinsights/application_insights_resource.go b/azurerm/internal/services/applicationinsights/application_insights_resource.go index 1b0213ae7335..59bd5de1009f 100644 --- a/azurerm/internal/services/applicationinsights/application_insights_resource.go +++ b/azurerm/internal/services/applicationinsights/application_insights_resource.go @@ -12,20 +12,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApplicationInsights() *schema.Resource { +func resourceApplicationInsights() *schema.Resource { return &schema.Resource{ - Create: resourceArmApplicationInsightsCreateUpdate, - Read: resourceArmApplicationInsightsRead, - Update: resourceArmApplicationInsightsCreateUpdate, - Delete: resourceArmApplicationInsightsDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceApplicationInsightsCreateUpdate, + Read: resourceApplicationInsightsRead, + Update: resourceApplicationInsightsCreateUpdate, + Delete: resourceApplicationInsightsDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ComponentID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -126,11 +129,11 @@ func resourceArmApplicationInsights() *schema.Resource { } } -func resourceArmApplicationInsightsCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.ComponentsClient billingClient := meta.(*clients.Client).AppInsights.BillingClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() log.Printf("[INFO] preparing arguments for AzureRM Application Insights creation.") @@ -138,6 +141,7 @@ func resourceArmApplicationInsightsCreateUpdate(d *schema.ResourceData, meta int name := d.Get("name").(string) resGroup := d.Get("resource_group_name").(string) + resourceId := parse.NewComponentID(subscriptionId, resGroup, name).ID() if d.IsNewResource() { existing, err := client.Get(ctx, resGroup, name) if err != nil { @@ -146,8 +150,8 @@ func resourceArmApplicationInsightsCreateUpdate(d *schema.ResourceData, meta int } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_application_insights", *existing.ID) + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_application_insights", resourceId) } } @@ -211,43 +215,40 @@ func resourceArmApplicationInsightsCreateUpdate(d *schema.ResourceData, meta int return fmt.Errorf("Error update Application Insights Billing Feature %q (Resource Group %q): %+v", name, resGroup, err) } - d.SetId(*read.ID) + d.SetId(resourceId) - return resourceArmApplicationInsightsRead(d, meta) + return resourceApplicationInsightsRead(d, meta) } -func resourceArmApplicationInsightsRead(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.ComponentsClient billingClient := meta.(*clients.Client).AppInsights.BillingClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ComponentID(d.Id()) if err != nil { return err } log.Printf("[DEBUG] Reading AzureRM Application Insights '%s'", id) - resGroup := id.ResourceGroup - name := id.Path["components"] - - resp, err := client.Get(ctx, resGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on AzureRM Application Insights '%s': %+v", name, err) + return fmt.Errorf("Error making Read request on AzureRM Application Insights '%s': %+v", id.Name, err) } - billingResp, err := billingClient.Get(ctx, resGroup, name) + billingResp, err := billingClient.Get(ctx, id.ResourceGroup, id.Name) if err != nil { - return fmt.Errorf("Error making Read request on AzureRM Application Insights Billing Feature '%s': %+v", name, err) + return fmt.Errorf("Error making Read request on AzureRM Application Insights Billing Feature '%s': %+v", id.Name, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) } @@ -272,26 +273,24 @@ func resourceArmApplicationInsightsRead(d *schema.ResourceData, meta interface{} return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmApplicationInsightsDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.ComponentsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ComponentID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["components"] - log.Printf("[DEBUG] Deleting AzureRM Application Insights '%s' (resource group '%s')", name, resGroup) + log.Printf("[DEBUG] Deleting AzureRM Application Insights %q (resource group %q)", id.Name, id.ResourceGroup) - resp, err := client.Delete(ctx, resGroup, name) + resp, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if resp.StatusCode == http.StatusNotFound { return nil } - return fmt.Errorf("Error issuing AzureRM delete request for Application Insights '%s': %+v", name, err) + return fmt.Errorf("Error issuing AzureRM delete request for Application Insights %q: %+v", id.Name, err) } return err diff --git a/azurerm/internal/services/applicationinsights/application_insights_resource_test.go b/azurerm/internal/services/applicationinsights/application_insights_resource_test.go new file mode 100644 index 000000000000..61c13ae7d5e6 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/application_insights_resource_test.go @@ -0,0 +1,248 @@ +package applicationinsights_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type AppInsightsResource struct { +} + +func TestAccApplicationInsights_basicWeb(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "web"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("web"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsights_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "web"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("web"), + ), + }, + { + Config: r.requiresImport(data, "web"), + ExpectError: acceptance.RequiresImportError("azurerm_application_insights"), + }, + }) +} + +func TestAccApplicationInsights_basicJava(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "java"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("java"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsights_basicMobileCenter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "MobileCenter"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("MobileCenter"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsights_basicOther(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "other"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("other"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsights_basicPhone(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "phone"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("phone"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsights_basicStore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "store"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("store"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsights_basiciOS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "ios"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("ios"), + ), + }, + data.ImportStep(), + }) +} + +func (t AppInsightsResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ComponentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.AppInsights.ComponentsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Application Insights %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) + } + + return utils.Bool(resp.ApplicationInsightsComponentProperties != nil), nil +} + +func TestAccApplicationInsights_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") + r := AppInsightsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "web"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("application_type").HasValue("web"), + check.That(data.ResourceName).Key("retention_in_days").HasValue("120"), + check.That(data.ResourceName).Key("sampling_percentage").HasValue("50"), + check.That(data.ResourceName).Key("daily_data_cap_in_gb").HasValue("50"), + check.That(data.ResourceName).Key("daily_data_cap_notifications_disabled").HasValue("true"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Hello").HasValue("World"), + ), + }, + data.ImportStep(), + }) +} + +func (AppInsightsResource) basic(data acceptance.TestData, applicationType string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "%s" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, applicationType) +} + +func (AppInsightsResource) requiresImport(data acceptance.TestData, applicationType string) string { + template := AppInsightsResource{}.basic(data, applicationType) + return fmt.Sprintf(` +%s + +resource "azurerm_application_insights" "import" { + name = azurerm_application_insights.test.name + location = azurerm_application_insights.test.location + resource_group_name = azurerm_application_insights.test.resource_group_name + application_type = azurerm_application_insights.test.application_type +} +`, template) +} + +func (AppInsightsResource) complete(data acceptance.TestData, applicationType string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "%s" + retention_in_days = 120 + sampling_percentage = 50 + daily_data_cap_in_gb = 50 + daily_data_cap_notifications_disabled = true + disable_ip_masking = true + + tags = { + Hello = "World" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, applicationType) +} diff --git a/azurerm/internal/services/applicationinsights/application_insights_webtests_resource.go b/azurerm/internal/services/applicationinsights/application_insights_webtests_resource.go index cb3c32172d11..a758196fbf75 100644 --- a/azurerm/internal/services/applicationinsights/application_insights_webtests_resource.go +++ b/azurerm/internal/services/applicationinsights/application_insights_webtests_resource.go @@ -14,21 +14,24 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmApplicationInsightsWebTests() *schema.Resource { +func resourceApplicationInsightsWebTests() *schema.Resource { return &schema.Resource{ - Create: resourceArmApplicationInsightsWebTestsCreateUpdate, - Read: resourceArmApplicationInsightsWebTestsRead, - Update: resourceArmApplicationInsightsWebTestsCreateUpdate, - Delete: resourceArmApplicationInsightsWebTestsDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceApplicationInsightsWebTestsCreateUpdate, + Read: resourceApplicationInsightsWebTestsRead, + Update: resourceApplicationInsightsWebTestsCreateUpdate, + Delete: resourceApplicationInsightsWebTestsDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.WebTestID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -126,7 +129,7 @@ func resourceArmApplicationInsightsWebTests() *schema.Resource { } } -func resourceArmApplicationInsightsWebTestsCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsWebTestsCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.WebTestsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -135,20 +138,16 @@ func resourceArmApplicationInsightsWebTestsCreateUpdate(d *schema.ResourceData, name := d.Get("name").(string) resGroup := d.Get("resource_group_name").(string) - appInsightsID := d.Get("application_insights_id").(string) - - id, err := azure.ParseAzureResourceID(appInsightsID) + appInsightsId, err := parse.ComponentID(d.Get("application_insights_id").(string)) if err != nil { return err } - appInsightsName := id.Path["components"] - if d.IsNewResource() { existing, err := client.Get(ctx, resGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Application Insights WebTests %q (Resource Group %q): %s", name, resGroup, err) + return fmt.Errorf("checking for presence of existing Application Insights WebTests %q (Resource Group %q): %s", name, resGroup, err) } } @@ -169,7 +168,7 @@ func resourceArmApplicationInsightsWebTestsCreateUpdate(d *schema.ResourceData, testConf := d.Get("configuration").(string) t := d.Get("tags").(map[string]interface{}) - tagKey := fmt.Sprintf("hidden-link:/subscriptions/%s/resourceGroups/%s/providers/microsoft.insights/components/%s", client.SubscriptionID, resGroup, appInsightsName) + tagKey := fmt.Sprintf("hidden-link:%s", appInsightsId.ID()) t[tagKey] = "Resource" webTest := insights.WebTest{ @@ -195,37 +194,34 @@ func resourceArmApplicationInsightsWebTestsCreateUpdate(d *schema.ResourceData, resp, err := client.CreateOrUpdate(ctx, resGroup, name, webTest) if err != nil { - return fmt.Errorf("Error creating Application Insights WebTest %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("creating/updating Application Insights WebTest %q (Resource Group %q): %+v", name, resGroup, err) } d.SetId(*resp.ID) - return resourceArmApplicationInsightsWebTestsRead(d, meta) + return resourceApplicationInsightsWebTestsRead(d, meta) } -func resourceArmApplicationInsightsWebTestsRead(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsWebTestsRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.WebTestsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.WebTestID(d.Id()) if err != nil { return err } - log.Printf("[DEBUG] Reading AzureRM Application Insights WebTests '%s'", id) - - resGroup := id.ResourceGroup - name := id.Path["webtests"] + log.Printf("[DEBUG] Reading AzureRM Application Insights WebTests %q", id) - resp, err := client.Get(ctx, resGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Application Insights WebTest %q was not found in Resource Group %q - removing from state!", name, resGroup) + log.Printf("[DEBUG] Application Insights WebTest %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error retrieving Application Insights WebTests %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("retrieving Application Insights WebTests %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } appInsightsId := "" @@ -235,8 +231,8 @@ func resourceArmApplicationInsightsWebTestsRead(d *schema.ResourceData, meta int } } d.Set("application_insights_id", appInsightsId) - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) d.Set("kind", resp.Kind) if location := resp.Location; location != nil { @@ -267,26 +263,24 @@ func resourceArmApplicationInsightsWebTestsRead(d *schema.ResourceData, meta int return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmApplicationInsightsWebTestsDelete(d *schema.ResourceData, meta interface{}) error { +func resourceApplicationInsightsWebTestsDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).AppInsights.WebTestsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.WebTestID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["webtests"] - log.Printf("[DEBUG] Deleting AzureRM Application Insights WebTest '%s' (resource group '%s')", name, resGroup) + log.Printf("[DEBUG] Deleting AzureRM Application Insights WebTest '%s' (resource group '%s')", id.Name, id.ResourceGroup) - resp, err := client.Delete(ctx, resGroup, name) + resp, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if resp.StatusCode == http.StatusNotFound { return nil } - return fmt.Errorf("Error issuing AzureRM delete request for Application Insights WebTest '%s': %+v", name, err) + return fmt.Errorf("Error issuing AzureRM delete request for Application Insights WebTest '%s': %+v", id.Name, err) } return err diff --git a/azurerm/internal/services/applicationinsights/application_insights_webtests_resource_test.go b/azurerm/internal/services/applicationinsights/application_insights_webtests_resource_test.go new file mode 100644 index 000000000000..72cf8fb80023 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/application_insights_webtests_resource_test.go @@ -0,0 +1,214 @@ +package applicationinsights_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type AppInsightsWebTestsResource struct { +} + +func TestAccApplicationInsightsWebTests_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") + r := AppInsightsWebTestsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsightsWebTests_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") + r := AppInsightsWebTestsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccApplicationInsightsWebTests_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") + r := AppInsightsWebTestsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("geo_locations.#").HasValue("1"), + check.That(data.ResourceName).Key("frequency").HasValue("300"), + check.That(data.ResourceName).Key("timeout").HasValue("30"), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("geo_locations.#").HasValue("2"), + check.That(data.ResourceName).Key("frequency").HasValue("900"), + check.That(data.ResourceName).Key("timeout").HasValue("120"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("geo_locations.#").HasValue("1"), + check.That(data.ResourceName).Key("frequency").HasValue("300"), + check.That(data.ResourceName).Key("timeout").HasValue("30"), + ), + }, + }) +} + +func TestAccApplicationInsightsWebTests_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") + r := AppInsightsWebTestsResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t AppInsightsWebTestsResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.WebTestID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.AppInsights.WebTestsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Application Insights '%q' (resource group: '%q') does not exist", id.ResourceGroup, id.Name) + } + + return utils.Bool(resp.WebTestProperties != nil), nil +} + +func (AppInsightsWebTestsResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_application_insights_web_test" "test" { + name = "acctestappinsightswebtests-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_insights_id = azurerm_application_insights.test.id + kind = "ping" + geo_locations = ["us-tx-sn1-azr"] + + configuration = < + + + + +XML + + lifecycle { + ignore_changes = ["tags"] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (AppInsightsWebTestsResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-appinsights-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestappinsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_application_insights_web_test" "test" { + name = "acctestappinsightswebtests-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_insights_id = azurerm_application_insights.test.id + kind = "ping" + frequency = 900 + timeout = 120 + enabled = true + geo_locations = ["us-tx-sn1-azr", "us-il-ch1-azr"] + + configuration = < + + + + +XML + + lifecycle { + ignore_changes = ["tags"] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (AppInsightsWebTestsResource) requiresImport(data acceptance.TestData) string { + template := AppInsightsWebTestsResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_application_insights_web_test" "import" { + name = azurerm_application_insights_web_test.test.name + location = azurerm_application_insights_web_test.test.location + resource_group_name = azurerm_application_insights_web_test.test.resource_group_name + application_insights_id = azurerm_application_insights_web_test.test.application_insights_id + kind = azurerm_application_insights_web_test.test.kind + configuration = azurerm_application_insights_web_test.test.configuration + geo_locations = azurerm_application_insights_web_test.test.geo_locations +} +`, template) +} diff --git a/azurerm/internal/services/applicationinsights/parse/application_insights.go b/azurerm/internal/services/applicationinsights/parse/application_insights.go deleted file mode 100644 index 0953801e91f2..000000000000 --- a/azurerm/internal/services/applicationinsights/parse/application_insights.go +++ /dev/null @@ -1,59 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type ApplicationInsightsId struct { - ResourceGroup string - Name string -} - -func ApplicationInsightsID(input string) (*ApplicationInsightsId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Application Insights ID %q: %+v", input, err) - } - - appId := ApplicationInsightsId{ - ResourceGroup: id.ResourceGroup, - } - - if appId.Name, err = id.PopSegment("components"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &appId, nil -} - -type ApplicationInsightsWebTestId struct { - ResourceGroup string - Name string -} - -func ApplicationInsightsWebTestID(input string) (*ApplicationInsightsWebTestId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Application Insights Web Test ID %q: %+v", input, err) - } - - testid := ApplicationInsightsWebTestId{ - ResourceGroup: id.ResourceGroup, - } - - if testid.Name, err = id.PopSegment("webtests"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &testid, nil -} diff --git a/azurerm/internal/services/applicationinsights/parse/application_insights_test.go b/azurerm/internal/services/applicationinsights/parse/application_insights_test.go deleted file mode 100644 index f34c57d9fbb1..000000000000 --- a/azurerm/internal/services/applicationinsights/parse/application_insights_test.go +++ /dev/null @@ -1,139 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestApplicationInsightsID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *ApplicationInsightsId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "No Provider", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers", - Error: true, - }, - { - Name: "No component", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/microsoft.insights/components", - Error: true, - }, - { - Name: "Correct", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/microsoft.insights/components/appinsights1", - Expect: &ApplicationInsightsId{ - ResourceGroup: "group1", - Name: "appinsights1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ApplicationInsightsID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get") - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} - -func TestApplicationInsightsWebTestID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *ApplicationInsightsWebTestId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "No Provider", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers", - Error: true, - }, - { - Name: "No webtest", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/microsoft.insights/webtests", - Error: true, - }, - { - Name: "Correct", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/microsoft.insights/webtests/test1", - Expect: &ApplicationInsightsWebTestId{ - ResourceGroup: "group1", - Name: "test1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ApplicationInsightsWebTestID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get") - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/applicationinsights/parse/component.go b/azurerm/internal/services/applicationinsights/parse/component.go new file mode 100644 index 000000000000..b439bc144bf9 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/parse/component.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ComponentId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewComponentID(subscriptionId, resourceGroup, name string) ComponentId { + return ComponentId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ComponentId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Component", segmentsStr) +} + +func (id ComponentId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/microsoft.insights/components/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ComponentID parses a Component ID into an ComponentId struct +func ComponentID(input string) (*ComponentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ComponentId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("components"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/applicationinsights/parse/component_test.go b/azurerm/internal/services/applicationinsights/parse/component_test.go new file mode 100644 index 000000000000..c3340ea2ee90 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/parse/component_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ComponentId{} + +func TestComponentIDFormatter(t *testing.T) { + actual := NewComponentID("12345678-1234-9876-4563-123456789012", "group1", "component1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/components/component1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestComponentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ComponentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/components/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/components/component1", + Expected: &ComponentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "component1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.INSIGHTS/COMPONENTS/COMPONENT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ComponentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/applicationinsights/parse/web_test_id.go b/azurerm/internal/services/applicationinsights/parse/web_test_id.go new file mode 100644 index 000000000000..3bf2d442e318 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/parse/web_test_id.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type WebTestId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewWebTestID(subscriptionId, resourceGroup, name string) WebTestId { + return WebTestId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id WebTestId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Web Test", segmentsStr) +} + +func (id WebTestId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/microsoft.insights/webtests/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// WebTestID parses a WebTest ID into an WebTestId struct +func WebTestID(input string) (*WebTestId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := WebTestId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("webtests"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/applicationinsights/parse/web_test_id_test.go b/azurerm/internal/services/applicationinsights/parse/web_test_id_test.go new file mode 100644 index 000000000000..76bb83fef761 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/parse/web_test_id_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = WebTestId{} + +func TestWebTestIDFormatter(t *testing.T) { + actual := NewWebTestID("12345678-1234-9876-4563-123456789012", "group1", "test1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/webtests/test1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestWebTestID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *WebTestId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/webtests/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/webtests/test1", + Expected: &WebTestId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "test1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.INSIGHTS/WEBTESTS/TEST1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := WebTestID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/applicationinsights/registration.go b/azurerm/internal/services/applicationinsights/registration.go index ae2acee73a02..6b4f385236be 100644 --- a/azurerm/internal/services/applicationinsights/registration.go +++ b/azurerm/internal/services/applicationinsights/registration.go @@ -21,16 +21,16 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_application_insights": dataSourceArmApplicationInsights(), + "azurerm_application_insights": dataSourceApplicationInsights(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_application_insights_api_key": resourceArmApplicationInsightsAPIKey(), - "azurerm_application_insights": resourceArmApplicationInsights(), - "azurerm_application_insights_analytics_item": resourceArmApplicationInsightsAnalyticsItem(), - "azurerm_application_insights_web_test": resourceArmApplicationInsightsWebTests(), + "azurerm_application_insights_api_key": resourceApplicationInsightsAPIKey(), + "azurerm_application_insights": resourceApplicationInsights(), + "azurerm_application_insights_analytics_item": resourceApplicationInsightsAnalyticsItem(), + "azurerm_application_insights_web_test": resourceApplicationInsightsWebTests(), } } diff --git a/azurerm/internal/services/applicationinsights/resourceids.go b/azurerm/internal/services/applicationinsights/resourceids.go new file mode 100644 index 000000000000..b184c6025654 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/resourceids.go @@ -0,0 +1,4 @@ +package applicationinsights + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Component -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/components/component1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=WebTest -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/webtests/test1 diff --git a/azurerm/internal/services/applicationinsights/tests/application_insights_analytics_item_resource_test.go b/azurerm/internal/services/applicationinsights/tests/application_insights_analytics_item_resource_test.go deleted file mode 100644 index a4b4778cd49f..000000000000 --- a/azurerm/internal/services/applicationinsights/tests/application_insights_analytics_item_resource_test.go +++ /dev/null @@ -1,274 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMApplicationInsightsAnalyticsItem_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightAnalyticsItemDestroy(), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAnalyticsItem_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAnalyticsItemExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "testquery"), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "shared"), - resource.TestCheckResourceAttr(data.ResourceName, "type", "query"), - resource.TestCheckResourceAttr(data.ResourceName, "content", "requests #test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsightsAnalyticsItem_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test") - config1 := testAccAzureRMApplicationInsightsAnalyticsItem_basic(data) - config2 := testAccAzureRMApplicationInsightsAnalyticsItem_basic2(data) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightAnalyticsItemDestroy(), - Steps: []resource.TestStep{ - { - Config: config1, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAnalyticsItemExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "testquery"), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "shared"), - resource.TestCheckResourceAttr(data.ResourceName, "type", "query"), - resource.TestCheckResourceAttr(data.ResourceName, "content", "requests #test"), - ), - }, - data.ImportStep(), - { - Config: config2, - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAnalyticsItemExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "testquery"), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "shared"), - resource.TestCheckResourceAttr(data.ResourceName, "type", "query"), - resource.TestCheckResourceAttr(data.ResourceName, "content", "requests #updated"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsightsAnalyticsItem_multiple(t *testing.T) { - r1 := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test1") - r2 := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test2") - r3 := acceptance.BuildTestData(t, "azurerm_application_insights_analytics_item", "test3") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightAnalyticsItemDestroy(), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAnalyticsItem_multiple(r1), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAnalyticsItemExists(r1.ResourceName), - testCheckAzureRMApplicationInsightsAnalyticsItemExists(r1.ResourceName), - testCheckAzureRMApplicationInsightsAnalyticsItemExists(r1.ResourceName), - resource.TestCheckResourceAttr(r1.ResourceName, "name", "testquery1"), - resource.TestCheckResourceAttr(r1.ResourceName, "scope", "shared"), - resource.TestCheckResourceAttr(r1.ResourceName, "type", "query"), - resource.TestCheckResourceAttr(r1.ResourceName, "content", "requests #test1"), - resource.TestCheckResourceAttr(r2.ResourceName, "name", "testquery2"), - resource.TestCheckResourceAttr(r2.ResourceName, "scope", "user"), - resource.TestCheckResourceAttr(r2.ResourceName, "type", "query"), - resource.TestCheckResourceAttr(r2.ResourceName, "content", "requests #test2"), - resource.TestCheckResourceAttr(r3.ResourceName, "name", "testfunction1"), - resource.TestCheckResourceAttr(r3.ResourceName, "scope", "shared"), - resource.TestCheckResourceAttr(r3.ResourceName, "type", "function"), - resource.TestCheckResourceAttr(r3.ResourceName, "content", "requests #test3"), - resource.TestCheckResourceAttr(r3.ResourceName, "function_alias", "myfunction"), - ), - }, - r1.ImportStep(), - r2.ImportStep(), - r3.ImportStep(), - }, - }) -} - -func testCheckAzureRMApplicationInsightAnalyticsItemDestroy() resource.TestCheckFunc { - return func(s *terraform.State) error { - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_application_insights_analytics_item" { - continue - } - name := rs.Primary.Attributes["name"] - - exists, err := testCheckAzureRMApplicationInsightsAnalyticsItemExistsInternal(rs) - if err != nil { - return fmt.Errorf("Error checking if item has been destroyed: %s", err) - } - if exists { - return fmt.Errorf("Bad: Application Insights AnalyticsItem '%q' still exists", name) - } - } - - return nil - } -} - -func testCheckAzureRMApplicationInsightsAnalyticsItemExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - - exists, err := testCheckAzureRMApplicationInsightsAnalyticsItemExistsInternal(rs) - if err != nil { - return fmt.Errorf("Error checking if item exists: %s", err) - } - if !exists { - return fmt.Errorf("Bad: Application Insights AnalyticsItem '%q' does not exist", name) - } - - return nil - } -} - -func testCheckAzureRMApplicationInsightsAnalyticsItemExistsInternal(rs *terraform.ResourceState) (bool, error) { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.AnalyticsItemsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - id := rs.Primary.Attributes["id"] - - resGroup, appInsightsName, itemScopePath, itemID, err := applicationinsights.ResourcesArmApplicationInsightsAnalyticsItemParseID(id) - if err != nil { - return false, fmt.Errorf("Failed to parse ID (id: %s): %+v", id, err) - } - - response, err := conn.Get(ctx, resGroup, appInsightsName, itemScopePath, itemID, "") - if err != nil { - if response.Response.IsHTTPStatus(404) { - return false, nil - } - return false, fmt.Errorf("Bad: Get on appInsightsAnalyticsItemsClient (id: %s): %+v", id, err) - } - _ = response - - return true, nil -} - -func testAccAzureRMApplicationInsightsAnalyticsItem_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_application_insights_analytics_item" "test" { - name = "testquery" - application_insights_id = azurerm_application_insights.test.id - content = "requests #test" - scope = "shared" - type = "query" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApplicationInsightsAnalyticsItem_basic2(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_application_insights_analytics_item" "test" { - name = "testquery" - application_insights_id = azurerm_application_insights.test.id - content = "requests #updated" - scope = "shared" - type = "query" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMApplicationInsightsAnalyticsItem_multiple(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_application_insights_analytics_item" "test1" { - name = "testquery1" - application_insights_id = azurerm_application_insights.test.id - content = "requests #test1" - scope = "shared" - type = "query" -} - -resource "azurerm_application_insights_analytics_item" "test2" { - name = "testquery2" - application_insights_id = azurerm_application_insights.test.id - content = "requests #test2" - scope = "user" - type = "query" -} - -resource "azurerm_application_insights_analytics_item" "test3" { - name = "testfunction1" - application_insights_id = azurerm_application_insights.test.id - content = "requests #test3" - scope = "shared" - type = "function" - function_alias = "myfunction" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/applicationinsights/tests/application_insights_api_key_resource_test.go b/azurerm/internal/services/applicationinsights/tests/application_insights_api_key_resource_test.go deleted file mode 100644 index c5405f9c5f28..000000000000 --- a/azurerm/internal/services/applicationinsights/tests/application_insights_api_key_resource_test.go +++ /dev/null @@ -1,242 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMApplicationInsightsAPIKey_no_permission(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsAPIKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAPIKey_basic(data, "[]", "[]"), - ExpectError: regexp.MustCompile("The API Key needs to have a Role"), - }, - }, - }) -} - -func TestAccAzureRMApplicationInsightsAPIKey_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsAPIKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAPIKey_basic(data, "[]", `["annotations"]`), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAPIKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_permissions.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "write_permissions.#", "1"), - ), - }, - { - Config: testAccAzureRMApplicationInsightsAPIKey_requiresImport(data, "[]", `["annotations"]`), - ExpectError: acceptance.RequiresImportError("azurerm_application_insights_api_key"), - }, - }, - }) -} - -func TestAccAzureRMApplicationInsightsAPIKey_read_telemetry_permissions(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsAPIKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAPIKey_basic(data, `["aggregate", "api", "draft", "extendqueries", "search"]`, "[]"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAPIKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_permissions.#", "5"), - resource.TestCheckResourceAttr(data.ResourceName, "write_permissions.#", "0"), - ), - }, - data.ImportStep("api_key"), - }, - }) -} - -func TestAccAzureRMApplicationInsightsAPIKey_write_annotations_permission(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsAPIKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAPIKey_basic(data, "[]", `["annotations"]`), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAPIKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_permissions.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "write_permissions.#", "1"), - ), - }, - data.ImportStep("api_key"), - }, - }) -} - -func TestAccAzureRMApplicationInsightsAPIKey_authenticate_permission(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsAPIKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAPIKey_basic(data, `["agentconfig"]`, "[]"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAPIKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_permissions.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "write_permissions.#", "0"), - ), - }, - data.ImportStep("api_key"), - }, - }) -} - -func TestAccAzureRMApplicationInsightsAPIKey_full_permissions(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_api_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsAPIKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsAPIKey_basic(data, `["agentconfig", "aggregate", "api", "draft", "extendqueries", "search"]`, `["annotations"]`), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsAPIKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_permissions.#", "6"), - resource.TestCheckResourceAttr(data.ResourceName, "write_permissions.#", "1"), - ), - }, - data.ImportStep("api_key"), - }, - }) -} - -func testCheckAzureRMApplicationInsightsAPIKeyDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.APIKeysClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_application_insights_api_key" { - continue - } - - name := rs.Primary.Attributes["name"] - id, err := azure.ParseAzureResourceID(rs.Primary.Attributes["id"]) - if err != nil { - return err - } - resGroup := id.ResourceGroup - appInsightsName := id.Path["components"] - - resp, err := conn.Get(ctx, resGroup, appInsightsName, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Application Insights API Key still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMApplicationInsightsAPIKeyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.APIKeysClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := azure.ParseAzureResourceID(rs.Primary.Attributes["id"]) - if err != nil { - return err - } - keyID := id.Path["APIKeys"] - resGroup := id.ResourceGroup - appInsightsName := id.Path["components"] - - resp, err := conn.Get(ctx, resGroup, appInsightsName, keyID) - if err != nil { - return fmt.Errorf("Bad: Get on appInsightsAPIKeyClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Application Insights API Key '%q' (resource group: '%q') does not exist", keyID, resGroup) - } - - return nil - } -} - -func testAccAzureRMApplicationInsightsAPIKey_basic(data acceptance.TestData, readPerms, writePerms string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_application_insights_api_key" "test" { - name = "acctestappinsightsapikey-%d" - application_insights_id = azurerm_application_insights.test.id - read_permissions = %s - write_permissions = %s -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, readPerms, writePerms) -} - -func testAccAzureRMApplicationInsightsAPIKey_requiresImport(data acceptance.TestData, readPerms, writePerms string) string { - template := testAccAzureRMApplicationInsightsAPIKey_basic(data, readPerms, writePerms) - return fmt.Sprintf(` -%s - -resource "azurerm_application_insights_api_key" "import" { - name = azurerm_application_insights_api_key.test.name - application_insights_id = azurerm_application_insights_api_key.test.application_insights_id - read_permissions = azurerm_application_insights_api_key.test.read_permissions - write_permissions = azurerm_application_insights_api_key.test.write_permissions -} -`, template) -} diff --git a/azurerm/internal/services/applicationinsights/tests/application_insights_data_source_test.go b/azurerm/internal/services/applicationinsights/tests/application_insights_data_source_test.go deleted file mode 100644 index 81b463feb386..000000000000 --- a/azurerm/internal/services/applicationinsights/tests/application_insights_data_source_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" -) - -func TestAccDataSourceApplicationInsights_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccResourceApplicationInsights_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "instrumentation_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "app_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "location"), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "other"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.foo", "bar"), - ), - }, - }, - }) -} - -func testAccResourceApplicationInsights_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "other" - - tags = { - "foo" = "bar" - } -} - -data "azurerm_application_insights" "test" { - resource_group_name = azurerm_resource_group.test.name - name = azurerm_application_insights.test.name -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/applicationinsights/tests/application_insights_resource_test.go b/azurerm/internal/services/applicationinsights/tests/application_insights_resource_test.go deleted file mode 100644 index 51026dd098de..000000000000 --- a/azurerm/internal/services/applicationinsights/tests/application_insights_resource_test.go +++ /dev/null @@ -1,320 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMApplicationInsights_basicWeb(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "web"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "web"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsights_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "web"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "web"), - ), - }, - { - Config: testAccAzureRMApplicationInsights_requiresImport(data, "web"), - ExpectError: acceptance.RequiresImportError("azurerm_application_insights"), - }, - }, - }) -} - -func TestAccAzureRMApplicationInsights_basicJava(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "java"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "java"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsights_basicMobileCenter(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "MobileCenter"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "MobileCenter"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsights_basicOther(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "other"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "other"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsights_basicPhone(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "phone"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "phone"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsights_basicStore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "store"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "store"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsights_basiciOS(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_basic(data, "ios"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "ios"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMApplicationInsightsDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.ComponentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_application_insights" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Application Insights still exists:\n%#v", resp.ApplicationInsightsComponentProperties) - } - } - - return nil -} - -func testCheckAzureRMApplicationInsightsExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.ComponentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for App Insights: %s", name) - } - - resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on appInsightsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Application Insights '%q' (resource group: '%q') does not exist", name, resourceGroup) - } - - return nil - } -} - -func TestAccAzureRMApplicationInsights_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsights_complete(data, "web"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "application_type", "web"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_in_days", "120"), - resource.TestCheckResourceAttr(data.ResourceName, "sampling_percentage", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_data_cap_in_gb", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_data_cap_notifications_disabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Hello", "World"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMApplicationInsights_basic(data acceptance.TestData, applicationType string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "%s" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, applicationType) -} - -func testAccAzureRMApplicationInsights_requiresImport(data acceptance.TestData, applicationType string) string { - template := testAccAzureRMApplicationInsights_basic(data, applicationType) - return fmt.Sprintf(` -%s - -resource "azurerm_application_insights" "import" { - name = azurerm_application_insights.test.name - location = azurerm_application_insights.test.location - resource_group_name = azurerm_application_insights.test.resource_group_name - application_type = azurerm_application_insights.test.application_type -} -`, template) -} - -func testAccAzureRMApplicationInsights_complete(data acceptance.TestData, applicationType string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "%s" - retention_in_days = 120 - sampling_percentage = 50 - daily_data_cap_in_gb = 50 - daily_data_cap_notifications_disabled = true - disable_ip_masking = true - - tags = { - Hello = "World" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, applicationType) -} diff --git a/azurerm/internal/services/applicationinsights/tests/application_insights_webtests_resource_test.go b/azurerm/internal/services/applicationinsights/tests/application_insights_webtests_resource_test.go deleted file mode 100644 index b33dddc9f1b5..000000000000 --- a/azurerm/internal/services/applicationinsights/tests/application_insights_webtests_resource_test.go +++ /dev/null @@ -1,263 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMApplicationInsightsWebTests_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsWebTestsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsWebTests_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsWebTestExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsightsWebTests_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsWebTestsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsWebTests_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsWebTestExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMApplicationInsightsWebTests_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsWebTestsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsWebTests_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsWebTestExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "geo_locations.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "frequency", "300"), - resource.TestCheckResourceAttr(data.ResourceName, "timeout", "30"), - ), - }, - { - Config: testAccAzureRMApplicationInsightsWebTests_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsWebTestExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "geo_locations.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "frequency", "900"), - resource.TestCheckResourceAttr(data.ResourceName, "timeout", "120"), - ), - }, - { - Config: testAccAzureRMApplicationInsightsWebTests_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsWebTestExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "geo_locations.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "frequency", "300"), - resource.TestCheckResourceAttr(data.ResourceName, "timeout", "30"), - ), - }, - }, - }) -} - -func TestAccAzureRMApplicationInsightsWebTests_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_application_insights_web_test", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMApplicationInsightsWebTestsDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMApplicationInsightsWebTests_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMApplicationInsightsWebTestExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMApplicationInsightsWebTests_requiresImport), - }, - }) -} - -func testCheckAzureRMApplicationInsightsWebTestsDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.WebTestsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_application_insights_web_test" { - continue - } - - name := rs.Primary.Attributes["name"] - resGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Application Insights WebTest still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMApplicationInsightsWebTestExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).AppInsights.WebTestsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up a WebTest - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on appInsightsWebTestClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Application Insights WebTest '%q' (resource group: '%q') does not exist", name, resGroup) - } - - return nil - } -} - -func testAccAzureRMApplicationInsightsWebTests_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_application_insights_web_test" "test" { - name = "acctestappinsightswebtests-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_insights_id = azurerm_application_insights.test.id - kind = "ping" - geo_locations = ["us-tx-sn1-azr"] - - configuration = < - - - - -XML - - lifecycle { - ignore_changes = ["tags"] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApplicationInsightsWebTests_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestappinsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_application_insights_web_test" "test" { - name = "acctestappinsightswebtests-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_insights_id = azurerm_application_insights.test.id - kind = "ping" - frequency = 900 - timeout = 120 - enabled = true - geo_locations = ["us-tx-sn1-azr", "us-il-ch1-azr"] - - configuration = < - - - - -XML - - lifecycle { - ignore_changes = ["tags"] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMApplicationInsightsWebTests_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMApplicationInsightsWebTests_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_application_insights_web_test" "import" { - name = azurerm_application_insights_web_test.test.name - location = azurerm_application_insights_web_test.test.location - resource_group_name = azurerm_application_insights_web_test.test.resource_group_name - application_insights_id = azurerm_application_insights_web_test.test.application_insights_id - kind = azurerm_application_insights_web_test.test.kind - configuration = azurerm_application_insights_web_test.test.configuration - geo_locations = azurerm_application_insights_web_test.test.geo_locations -} -`, template) -} diff --git a/azurerm/internal/services/applicationinsights/validate/application_insights.go b/azurerm/internal/services/applicationinsights/validate/application_insights.go deleted file mode 100644 index 2722004664db..000000000000 --- a/azurerm/internal/services/applicationinsights/validate/application_insights.go +++ /dev/null @@ -1,37 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" -) - -func ApplicationInsightsID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ApplicationInsightsID(v); err != nil { - errors = append(errors, fmt.Errorf("parsing %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} - -func ApplicationInsightsWebTestID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ApplicationInsightsWebTestID(v); err != nil { - errors = append(errors, fmt.Errorf("parsing %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/applicationinsights/validate/component_id.go b/azurerm/internal/services/applicationinsights/validate/component_id.go new file mode 100644 index 000000000000..87ec14ac1b29 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/validate/component_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" +) + +func ComponentID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ComponentID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/applicationinsights/validate/component_id_test.go b/azurerm/internal/services/applicationinsights/validate/component_id_test.go new file mode 100644 index 000000000000..dec4f88baf1e --- /dev/null +++ b/azurerm/internal/services/applicationinsights/validate/component_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestComponentID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/components/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/components/component1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.INSIGHTS/COMPONENTS/COMPONENT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ComponentID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/applicationinsights/validate/web_test_id.go b/azurerm/internal/services/applicationinsights/validate/web_test_id.go new file mode 100644 index 000000000000..d682fed2df7e --- /dev/null +++ b/azurerm/internal/services/applicationinsights/validate/web_test_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/applicationinsights/parse" +) + +func WebTestID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.WebTestID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/applicationinsights/validate/web_test_id_test.go b/azurerm/internal/services/applicationinsights/validate/web_test_id_test.go new file mode 100644 index 000000000000..e800aeb7fb76 --- /dev/null +++ b/azurerm/internal/services/applicationinsights/validate/web_test_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestWebTestID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/webtests/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/webtests/test1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.INSIGHTS/WEBTESTS/TEST1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := WebTestID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/appplatform/parse/spring_cloud_app.go b/azurerm/internal/services/appplatform/parse/spring_cloud_app.go deleted file mode 100644 index be2038282915..000000000000 --- a/azurerm/internal/services/appplatform/parse/spring_cloud_app.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type SpringCloudAppId struct { - ResourceGroup string - ServiceName string - Name string -} - -func SpringCloudAppID(input string) (*SpringCloudAppId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Spring Cloud App ID %q: %+v", input, err) - } - - app := SpringCloudAppId{ - ResourceGroup: id.ResourceGroup, - } - - if app.ServiceName, err = id.PopSegment("Spring"); err != nil { - return nil, err - } - - if app.Name, err = id.PopSegment("apps"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &app, nil -} diff --git a/azurerm/internal/services/appplatform/parse/spring_cloud_app_test.go b/azurerm/internal/services/appplatform/parse/spring_cloud_app_test.go deleted file mode 100644 index c45cbf6619f2..000000000000 --- a/azurerm/internal/services/appplatform/parse/spring_cloud_app_test.go +++ /dev/null @@ -1,93 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestSpringCloudAppID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *SpringCloudAppId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "No Spring Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/", - Expected: nil, - }, - { - Name: "Missing Apps Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/", - Expected: nil, - }, - { - Name: "Missing Apps Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/apps/", - Expected: nil, - }, - { - Name: "Spring Cloud App ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/apps/app1", - Expected: &SpringCloudAppId{ - ResourceGroup: "resGroup1", - ServiceName: "spring1", - Name: "app1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/Apps/app1", - Expected: nil, - }, - { - Name: "invalid app name Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/Apps/App1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := SpringCloudAppID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.ServiceName != v.Expected.ServiceName { - t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/appplatform/parse/spring_cloud_certificate.go b/azurerm/internal/services/appplatform/parse/spring_cloud_certificate.go deleted file mode 100644 index 8a4d7cabd02c..000000000000 --- a/azurerm/internal/services/appplatform/parse/spring_cloud_certificate.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type SpringCloudCertificateId struct { - ResourceGroup string - ServiceName string - Name string -} - -func SpringCloudCertificateID(input string) (*SpringCloudCertificateId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Spring Cloud Certificate ID %q: %+v", input, err) - } - - cert := SpringCloudCertificateId{ - ResourceGroup: id.ResourceGroup, - } - - if cert.ServiceName, err = id.PopSegment("Spring"); err != nil { - return nil, err - } - - if cert.Name, err = id.PopSegment("certificates"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &cert, nil -} diff --git a/azurerm/internal/services/appplatform/parse/spring_cloud_certificate_test.go b/azurerm/internal/services/appplatform/parse/spring_cloud_certificate_test.go deleted file mode 100644 index 8ee82159a983..000000000000 --- a/azurerm/internal/services/appplatform/parse/spring_cloud_certificate_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestSpringCloudCertificateID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *SpringCloudCertificateId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "No Spring Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/", - Expected: nil, - }, - { - Name: "Missing Certificates Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/", - Expected: nil, - }, - { - Name: "Missing Certificates Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/certificates/", - Expected: nil, - }, - { - Name: "Spring Cloud Certificate ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/certificates/cert1", - Expected: &SpringCloudCertificateId{ - ResourceGroup: "resGroup1", - ServiceName: "spring1", - Name: "cert1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1/Certificates/cert1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := SpringCloudCertificateID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.ServiceName != v.Expected.ServiceName { - t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/appplatform/parse/spring_cloud_service.go b/azurerm/internal/services/appplatform/parse/spring_cloud_service.go deleted file mode 100644 index b2f2564ffb6b..000000000000 --- a/azurerm/internal/services/appplatform/parse/spring_cloud_service.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type SpringCloudServiceId struct { - ResourceGroup string - Name string -} - -func SpringCloudServiceID(input string) (*SpringCloudServiceId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse Spring Cloud Service ID %q: %+v", input, err) - } - - server := SpringCloudServiceId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("Spring"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/appplatform/parse/spring_cloud_service_test.go b/azurerm/internal/services/appplatform/parse/spring_cloud_service_test.go deleted file mode 100644 index ffa5305e7227..000000000000 --- a/azurerm/internal/services/appplatform/parse/spring_cloud_service_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestSpringCloudServiceID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *SpringCloudServiceId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Spring Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/", - Expected: nil, - }, - { - Name: "Spring Cloud Service ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/Spring/spring1", - Expected: &SpringCloudServiceId{ - Name: "spring1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.AppPlatform/spring/spring1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := SpringCloudServiceID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/appplatform/registration.go b/azurerm/internal/services/appplatform/registration.go deleted file mode 100644 index f935e85abc7c..000000000000 --- a/azurerm/internal/services/appplatform/registration.go +++ /dev/null @@ -1,35 +0,0 @@ -package appplatform - -import ( - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" -) - -type Registration struct{} - -// Name is the name of this Service -func (r Registration) Name() string { - return "App Platform" -} - -// WebsiteCategories returns a list of categories which can be used for the sidebar -func (r Registration) WebsiteCategories() []string { - return []string{ - "Spring Cloud", - } -} - -// SupportedDataSources returns the supported Data Sources supported by this Service -func (r Registration) SupportedDataSources() map[string]*schema.Resource { - return map[string]*schema.Resource{ - "azurerm_spring_cloud_service": dataSourceArmSpringCloudService(), - } -} - -// SupportedResources returns the supported Resources supported by this Service -func (r Registration) SupportedResources() map[string]*schema.Resource { - return map[string]*schema.Resource{ - "azurerm_spring_cloud_app": resourceArmSpringCloudApp(), - "azurerm_spring_cloud_certificate": resourceArmSpringCloudCertificate(), - "azurerm_spring_cloud_service": resourceArmSpringCloudService(), - } -} diff --git a/azurerm/internal/services/appplatform/spring_cloud_certificate_resource.go b/azurerm/internal/services/appplatform/spring_cloud_certificate_resource.go deleted file mode 100644 index 42d29c0201cf..000000000000 --- a/azurerm/internal/services/appplatform/spring_cloud_certificate_resource.go +++ /dev/null @@ -1,149 +0,0 @@ -package appplatform - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/preview/appplatform/mgmt/2019-05-01-preview/appplatform" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appplatform/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appplatform/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSpringCloudCertificate() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSpringCloudCertificateCreate, - Read: resourceArmSpringCloudCertificateRead, - Delete: resourceArmSpringCloudCertificateDelete, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.SpringCloudCertificateID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "service_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.SpringCloudServiceName, - }, - - "key_vault_certificate_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateKeyVaultChildId, - }, - }, - } -} - -func resourceArmSpringCloudCertificateCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).AppPlatform.CertificatesClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - serviceName := d.Get("service_name").(string) - - existing, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Spring Cloud Service Certificate %q (Spring Cloud Service %q / Resource Group %q): %+v", name, serviceName, resourceGroup, err) - } - } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_spring_cloud_certificate", *existing.ID) - } - - keyVaultCertificateId, _ := azure.ParseKeyVaultChildID(d.Get("key_vault_certificate_id").(string)) - cert := appplatform.CertificateResource{ - Properties: &appplatform.CertificateProperties{ - VaultURI: &keyVaultCertificateId.KeyVaultBaseUrl, - KeyVaultCertName: &keyVaultCertificateId.Name, - }, - } - - if _, err := client.CreateOrUpdate(ctx, resourceGroup, serviceName, name, cert); err != nil { - return fmt.Errorf("creating Spring Cloud Certificate %q (Spring Cloud Service %q / Resource Group %q): %+v", name, serviceName, resourceGroup, err) - } - - resp, err := client.Get(ctx, resourceGroup, serviceName, name) - if err != nil { - return fmt.Errorf("retrieving Spring Cloud Certificate %q (Spring Cloud Service %q / Resource Group %q): %+v", name, serviceName, resourceGroup, err) - } - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("read Spring Cloud Certificate %q (Spring Cloud Service %q / Resource Group %q) ID", name, serviceName, resourceGroup) - } - d.SetId(*resp.ID) - - return resourceArmSpringCloudCertificateRead(d, meta) -} - -func resourceArmSpringCloudCertificateRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).AppPlatform.CertificatesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.SpringCloudCertificateID(d.Id()) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Spring Cloud Certificate %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("reading Spring Cloud Certificate %q (Spring Cloud Service %q / Resource Group %q): %+v", id.Name, id.ServiceName, id.ResourceGroup, err) - } - - d.Set("name", resp.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("service_name", id.ServiceName) - - return nil -} - -func resourceArmSpringCloudCertificateDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).AppPlatform.CertificatesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.SpringCloudCertificateID(d.Id()) - if err != nil { - return err - } - - if _, err := client.Delete(ctx, id.ResourceGroup, id.ServiceName, id.Name); err != nil { - return fmt.Errorf("deleting Spring Cloud Certificate %q (Spring Cloud Service %q / Resource Group %q): %+v", id.Name, id.ServiceName, id.ResourceGroup, err) - } - - return nil -} diff --git a/azurerm/internal/services/appplatform/spring_cloud_service_data_source.go b/azurerm/internal/services/appplatform/spring_cloud_service_data_source.go deleted file mode 100644 index 0d2383d9979c..000000000000 --- a/azurerm/internal/services/appplatform/spring_cloud_service_data_source.go +++ /dev/null @@ -1,146 +0,0 @@ -package appplatform - -import ( - "fmt" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/appplatform/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmSpringCloudService() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmSpringCloudServiceRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.SpringCloudServiceName, - }, - - "location": azure.SchemaLocationForDataSource(), - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "config_server_git_setting": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "uri": { - Type: schema.TypeString, - Computed: true, - }, - - "label": { - Type: schema.TypeString, - Computed: true, - }, - - "search_paths": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "http_basic_auth": DataSourceSchemaConfigServerHttpBasicAuth(), - - "ssh_auth": DataSourceSchemaConfigServerSSHAuth(), - - "repository": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - "uri": { - Type: schema.TypeString, - Computed: true, - }, - "label": { - Type: schema.TypeString, - Computed: true, - }, - "pattern": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "search_paths": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "http_basic_auth": DataSourceSchemaConfigServerHttpBasicAuth(), - - "ssh_auth": DataSourceSchemaConfigServerSSHAuth(), - }, - }, - }, - }, - }, - }, - - "tags": tags.SchemaDataSource(), - }, - } -} - -func dataSourceArmSpringCloudServiceRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).AppPlatform.ServicesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error: Spring Cloud %q (Resource Group %q) was not found", name, resourceGroup) - } - return fmt.Errorf("Error reading Spring Cloud %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("Error retrieving Spring Cloud Service %q (Resource Group %q): ID was nil or empty", name, resourceGroup) - } - - d.SetId(*resp.ID) - - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if resp.Properties != nil && resp.Properties.ConfigServerProperties != nil && resp.Properties.ConfigServerProperties.ConfigServer != nil { - if props := resp.Properties.ConfigServerProperties.ConfigServer.GitProperty; props != nil { - if err := d.Set("config_server_git_setting", flattenArmSpringCloudConfigServerGitProperty(props, d)); err != nil { - return fmt.Errorf("failure setting AzureRM Spring Cloud Service Config Server error: %+v", err) - } - } - } - - return tags.FlattenAndSet(d, resp.Tags) -} diff --git a/azurerm/internal/services/appplatform/tests/spring_cloud_app_resource_test.go b/azurerm/internal/services/appplatform/tests/spring_cloud_app_resource_test.go deleted file mode 100644 index 4f25acf30c64..000000000000 --- a/azurerm/internal/services/appplatform/tests/spring_cloud_app_resource_test.go +++ /dev/null @@ -1,216 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMSpringCloudApp_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_app", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudAppDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudApp_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudAppExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMSpringCloudApp_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_app", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudAppDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudApp_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudAppExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMSpringCloudApp_requiresImport), - }, - }) -} - -func TestAccAzureRMSpringCloudApp_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_app", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudAppDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudApp_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudAppExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMSpringCloudApp_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_app", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudAppDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudApp_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudAppExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMSpringCloudApp_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudAppExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMSpringCloudApp_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudAppExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMSpringCloudAppExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Spring Cloud App not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["service_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).AppPlatform.AppsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, resourceGroup, serviceName, name, ""); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Spring Cloud App %q (Spring Cloud Name %q / Resource Group %q) does not exist", name, serviceName, resourceGroup) - } - return fmt.Errorf("bad: Get on AppPlatform.AppsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMSpringCloudAppDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AppPlatform.AppsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_spring_cloud_app" { - continue - } - - name := rs.Primary.Attributes["name"] - resGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["service_name"] - - if resp, err := client.Get(ctx, resGroup, serviceName, name, ""); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Get on AppPlatform.AppsClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMSpringCloudApp_basic(data acceptance.TestData) string { - template := testAccAzureRMSpringCloudApp_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_spring_cloud_app" "test" { - name = "acctest-sca-%d" - resource_group_name = azurerm_spring_cloud_service.test.resource_group_name - service_name = azurerm_spring_cloud_service.test.name -} -`, template, data.RandomInteger) -} - -func testAccAzureRMSpringCloudApp_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMSpringCloudApp_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_spring_cloud_app" "import" { - name = azurerm_spring_cloud_app.test.name - resource_group_name = azurerm_spring_cloud_app.test.resource_group_name - service_name = azurerm_spring_cloud_app.test.service_name -} -`, template) -} - -func testAccAzureRMSpringCloudApp_complete(data acceptance.TestData) string { - template := testAccAzureRMSpringCloudApp_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_spring_cloud_app" "test" { - name = "acctest-sca-%d" - resource_group_name = azurerm_spring_cloud_service.test.resource_group_name - service_name = azurerm_spring_cloud_service.test.name - - identity { - type = "SystemAssigned" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMSpringCloudApp_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-spring-%d" - location = "%s" -} - -resource "azurerm_spring_cloud_service" "test" { - name = "acctest-sc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/appplatform/tests/spring_cloud_certificate_resource_test.go b/azurerm/internal/services/appplatform/tests/spring_cloud_certificate_resource_test.go deleted file mode 100644 index 1292f8612eb1..000000000000 --- a/azurerm/internal/services/appplatform/tests/spring_cloud_certificate_resource_test.go +++ /dev/null @@ -1,222 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMSpringCloudCertificate_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudCertificateExists(data.ResourceName), - ), - }, - data.ImportStep("key_vault_certificate_id"), - }, - }) -} - -func TestAccAzureRMSpringCloudCertificate_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudCertificateExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMSpringCloudCertificate_requiresImport), - }, - }) -} - -func testCheckAzureRMSpringCloudCertificateExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Spring Cloud Certificate not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["service_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).AppPlatform.CertificatesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, resourceGroup, serviceName, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Spring Cloud Certificate %q (Spring Cloud Name %q / Resource Group %q) does not exist", name, serviceName, resourceGroup) - } - return fmt.Errorf("bad: Get on AppPlatform.CertificatesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMSpringCloudCertificateDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AppPlatform.CertificatesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_spring_cloud_certificate" { - continue - } - - name := rs.Primary.Attributes["name"] - resGroup := rs.Primary.Attributes["resource_group_name"] - serviceName := rs.Primary.Attributes["service_name"] - resp, err := client.Get(ctx, resGroup, serviceName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Get on AppPlatform.CertificatesClient: %+v", err) - } - return nil - } - return fmt.Errorf("expected no spring cloud certificate but found %+v", resp) - } - - return nil -} - -func testAccAzureRMSpringCloudCertificate_basic(data acceptance.TestData) string { - template := testAccAzureRMSpringCloudCertificate_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_spring_cloud_certificate" "test" { - name = "acctest-scc-%d" - resource_group_name = azurerm_spring_cloud_service.test.resource_group_name - service_name = azurerm_spring_cloud_service.test.name - key_vault_certificate_id = azurerm_key_vault_certificate.test.id -} -`, template, data.RandomInteger) -} - -func testAccAzureRMSpringCloudCertificate_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMSpringCloudCertificate_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_spring_cloud_certificate" "import" { - name = azurerm_spring_cloud_certificate.test.name - resource_group_name = azurerm_spring_cloud_certificate.test.resource_group_name - service_name = azurerm_spring_cloud_certificate.test.service_name - key_vault_certificate_id = azurerm_spring_cloud_certificate.test.key_vault_certificate_id -} -`, template) -} - -func testAccAzureRMSpringCloudCertificate_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-spring-%d" - location = "%s" -} - -data "azurerm_client_config" "current" { -} - -data "azuread_service_principal" "test" { - display_name = "Azure Spring Cloud Domain-Management" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - secret_permissions = ["set"] - certificate_permissions = ["create", "delete", "get", "update"] - } - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azuread_service_principal.test.object_id - secret_permissions = ["get", "list"] - certificate_permissions = ["get", "list"] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=contoso.com" - validity_in_months = 12 - } - } -} - -resource "azurerm_spring_cloud_service" "test" { - name = "acctest-sc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomInteger) -} diff --git a/azurerm/internal/services/appplatform/tests/spring_cloud_service_data_source_test.go b/azurerm/internal/services/appplatform/tests/spring_cloud_service_data_source_test.go deleted file mode 100644 index db5c6950f68c..000000000000 --- a/azurerm/internal/services/appplatform/tests/spring_cloud_service_data_source_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMSpringCloudService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_spring_cloud_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceSpringCloudService_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - ), - }, - }, - }) -} - -func testAccDataSourceSpringCloudService_basic(data acceptance.TestData) string { - config := testAccAzureRMSpringCloudService_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_spring_cloud_service" "test" { - name = azurerm_spring_cloud_service.test.name - resource_group_name = azurerm_spring_cloud_service.test.resource_group_name -} -`, config) -} diff --git a/azurerm/internal/services/appplatform/tests/spring_cloud_service_resource_test.go b/azurerm/internal/services/appplatform/tests/spring_cloud_service_resource_test.go deleted file mode 100644 index 9ebd3d437b22..000000000000 --- a/azurerm/internal/services/appplatform/tests/spring_cloud_service_resource_test.go +++ /dev/null @@ -1,378 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMSpringCloudService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMSpringCloudService_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMSpringCloudService_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - ), - }, - data.ImportStep( - // those field returned by api are "*" - // import state verify ignore those fields - "config_server_git_setting.0.ssh_auth.0.private_key", - "config_server_git_setting.0.ssh_auth.0.host_key", - "config_server_git_setting.0.ssh_auth.0.host_key_algorithm", - "config_server_git_setting.0.repository.0.http_basic_auth.0.username", - "config_server_git_setting.0.repository.0.http_basic_auth.0.password", - ), - { - Config: testAccAzureRMSpringCloudService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMSpringCloudService_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudService_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - ), - }, - data.ImportStep( - // those field returned by api are "*" - // import state verify ignore those fields - "config_server_git_setting.0.ssh_auth.0.private_key", - "config_server_git_setting.0.ssh_auth.0.host_key", - "config_server_git_setting.0.ssh_auth.0.host_key_algorithm", - "config_server_git_setting.0.repository.0.http_basic_auth.0.username", - "config_server_git_setting.0.repository.0.http_basic_auth.0.password", - ), - }, - }) -} - -func TestAccAzureRMSpringCloudService_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudService_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "network.0.service_runtime_network_resource_group"), - resource.TestCheckResourceAttrSet(data.ResourceName, "network.0.app_network_resource_group"), - ), - }, - data.ImportStep( - // those field returned by api are "*" - // import state verify ignore those fields - "config_server_git_setting.0.ssh_auth.0.private_key", - "config_server_git_setting.0.ssh_auth.0.host_key", - "config_server_git_setting.0.ssh_auth.0.host_key_algorithm", - ), - }, - }) -} - -func TestAccAzureRMSpringCloudService_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spring_cloud_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpringCloudServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpringCloudService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpringCloudServiceExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMSpringCloudService_requiresImport), - }, - }) -} - -func testCheckAzureRMSpringCloudServiceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Spring Cloud not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).AppPlatform.ServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Spring Cloud Service %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on AppPlatform.ServicesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMSpringCloudServiceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).AppPlatform.ServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_spring_cloud_service" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on AppPlatform.ServicesClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMSpringCloudService_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-spring-%d" - location = "%s" -} - -resource "azurerm_spring_cloud_service" "test" { - name = "acctest-sc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMSpringCloudService_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-spring-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestai-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_spring_cloud_service" "test" { - name = "acctest-sc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - config_server_git_setting { - uri = "git@bitbucket.org:Azure-Samples/piggymetrics.git" - label = "config" - search_paths = ["dir1", "dir4"] - - ssh_auth { - private_key = file("testdata/private_key") - host_key = file("testdata/host_key") - host_key_algorithm = "ssh-rsa" - strict_host_key_checking_enabled = false - } - - repository { - name = "repo1" - uri = "https://github.com/Azure-Samples/piggymetrics" - label = "config" - search_paths = ["dir1", "dir2"] - http_basic_auth { - username = "username" - password = "password" - } - } - - repository { - name = "repo2" - uri = "https://github.com/Azure-Samples/piggymetrics" - label = "config" - search_paths = ["dir1", "dir2"] - } - } - - trace { - instrumentation_key = azurerm_application_insights.test.instrumentation_key - } - - tags = { - Env = "Test" - version = "1" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMSpringCloudService_virtualNetwork(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-spring-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestai-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.1.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "internal1" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.1.0.0/24" -} - -resource "azurerm_subnet" "test2" { - name = "internal2" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.1.1.0/24" -} - -data "azuread_service_principal" "test" { - display_name = "Azure Spring Cloud Resource Provider" -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_virtual_network.test.id - role_definition_name = "Owner" - principal_id = data.azuread_service_principal.test.object_id -} - -resource "azurerm_spring_cloud_service" "test" { - name = "acctest-sc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - network { - app_subnet_id = azurerm_subnet.test1.id - service_runtime_subnet_id = azurerm_subnet.test2.id - cidr_ranges = ["10.4.0.0/16", "10.5.0.0/16", "10.3.0.1/16"] - } - - config_server_git_setting { - uri = "git@bitbucket.org:Azure-Samples/piggymetrics.git" - label = "config" - search_paths = ["dir1", "dir4"] - - ssh_auth { - private_key = file("testdata/private_key") - host_key = file("testdata/host_key") - host_key_algorithm = "ssh-rsa" - strict_host_key_checking_enabled = false - } - } - - trace { - instrumentation_key = azurerm_application_insights.test.instrumentation_key - } - - tags = { - Env = "Test" - } - - depends_on = [azurerm_role_assignment.test] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMSpringCloudService_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMSpringCloudService_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_spring_cloud_service" "import" { - name = azurerm_spring_cloud_service.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/appplatform/validate/spring_cloud_service.go b/azurerm/internal/services/appplatform/validate/spring_cloud_service.go deleted file mode 100644 index d89800969e8b..000000000000 --- a/azurerm/internal/services/appplatform/validate/spring_cloud_service.go +++ /dev/null @@ -1,42 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - "strings" -) - -func SpringCloudServiceName(i interface{}, k string) (_ []string, errors []error) { - v, ok := i.(string) - if !ok { - return nil, append(errors, fmt.Errorf("expected type of %s to be string", k)) - } - - // The name attribute rules are : - // 1. can contain only lowercase letters, numbers and hyphens. - // 2. The first character must be a letter. - // 3. The last character must be a letter or number - // 3. The value must be between 4 and 32 characters long - - if !regexp.MustCompile(`^([a-z])([a-z\d-]{2,30})([a-z\d])$`).MatchString(v) { - errors = append(errors, fmt.Errorf("%s must begin with a letter, end with a letter or number, contain only lowercase letters, numbers and hyphens. The value must be between 4 and 32 characters long.", k)) - } - - return nil, errors -} - -func ConfigServerURI(i interface{}, k string) (_ []string, errors []error) { - v, ok := i.(string) - if !ok { - return nil, append(errors, fmt.Errorf("expected type of %s to be string", k)) - } - - // the config server URI should be started with http://, https://, git@, or ssh:// - if !strings.HasPrefix(v, "http://") && - !strings.HasPrefix(v, "https://") && - !strings.HasPrefix(v, "git@") && - !strings.HasPrefix(v, "ssh://") { - errors = append(errors, fmt.Errorf("%s should be started with http://, https://, git@, or ssh://", k)) - } - return nil, errors -} diff --git a/azurerm/internal/services/attestation/attestation_provider_data_source.go b/azurerm/internal/services/attestation/attestation_provider_data_source.go index dfbbc4454bc4..d9dedeb6196f 100644 --- a/azurerm/internal/services/attestation/attestation_provider_data_source.go +++ b/azurerm/internal/services/attestation/attestation_provider_data_source.go @@ -8,6 +8,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/attestation/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -48,6 +49,7 @@ func dataSourceAttestationProvider() *schema.Resource { func dataSourceArmAttestationProviderRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Attestation.ProviderClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -59,9 +61,11 @@ func dataSourceArmAttestationProviderRead(d *schema.ResourceData, meta interface if utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Attestation Provider %q (Resource Group %q) was not found", name, resourceGroup) } - return fmt.Errorf("retrieving Attestation %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("retrieving Attestation Provider %q (Resource Group %q): %+v", name, resourceGroup, err) } + d.SetId(parse.NewProviderID(subscriptionId, resourceGroup, name).ID()) + d.Set("name", name) d.Set("resource_group_name", resourceGroup) d.Set("location", location.NormalizeNilable(resp.Location)) @@ -71,10 +75,5 @@ func dataSourceArmAttestationProviderRead(d *schema.ResourceData, meta interface d.Set("trust_model", props.TrustModel) } - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("empty or nil ID returned for Attestation Provider %q (Resource Group %q)", name, resourceGroup) - } - d.SetId(*resp.ID) - return tags.FlattenAndSet(d, resp.Tags) } diff --git a/azurerm/internal/services/attestation/attestation_provider_data_source_test.go b/azurerm/internal/services/attestation/attestation_provider_data_source_test.go new file mode 100644 index 000000000000..0e5457f80535 --- /dev/null +++ b/azurerm/internal/services/attestation/attestation_provider_data_source_test.go @@ -0,0 +1,41 @@ +package attestation_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type AttestationProviderDataSource struct { +} + +func TestAccAttestationProviderDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_attestation_provider", "test") + randStr := strings.ToLower(acctest.RandString(10)) + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: AttestationProviderDataSource{}.basic(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(AttestationProviderResource{}), + ), + }, + }) +} + +func (AttestationProviderDataSource) basic(data acceptance.TestData, randStr string) string { + config := AttestationProviderResource{}.basic(data, randStr) + return fmt.Sprintf(` +%s + +data "azurerm_attestation_provider" "test" { + name = azurerm_attestation_provider.test.name + resource_group_name = azurerm_attestation_provider.test.resource_group_name +} +`, config) +} diff --git a/azurerm/internal/services/attestation/attestation_provider_resource.go b/azurerm/internal/services/attestation/attestation_provider_resource.go index 87948c482983..0917ae35f8af 100644 --- a/azurerm/internal/services/attestation/attestation_provider_resource.go +++ b/azurerm/internal/services/attestation/attestation_provider_resource.go @@ -21,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmAttestationProvider() *schema.Resource { +func resourceAttestationProvider() *schema.Resource { return &schema.Resource{ - Create: resourceArmAttestationProviderCreate, - Read: resourceArmAttestationProviderRead, - Update: resourceArmAttestationProviderUpdate, - Delete: resourceArmAttestationProviderDelete, + Create: resourceAttestationProviderCreate, + Read: resourceAttestationProviderRead, + Update: resourceAttestationProviderUpdate, + Delete: resourceAttestationProviderDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -36,7 +36,7 @@ func resourceArmAttestationProvider() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.AttestationId(id) + _, err := parse.ProviderID(id) return err }), @@ -45,7 +45,7 @@ func resourceArmAttestationProvider() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.AttestationName, + ValidateFunc: validate.AttestationProviderName, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -73,22 +73,25 @@ func resourceArmAttestationProvider() *schema.Resource { }, } } -func resourceArmAttestationProviderCreate(d *schema.ResourceData, meta interface{}) error { + +func resourceAttestationProviderCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Attestation.ProviderClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + resourceId := parse.NewProviderID(subscriptionId, resourceGroup, name).ID() existing, err := client.Get(ctx, resourceGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { return fmt.Errorf("checking for presence of existing Attestation Provider %q (Resource Group %q): %+v", name, resourceGroup, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_attestation_provider", *existing.ID) + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_attestation_provider", resourceId) } props := attestation.ServiceCreationParams{ @@ -118,40 +121,31 @@ func resourceArmAttestationProviderCreate(d *schema.ResourceData, meta interface return fmt.Errorf("creating Attestation Provider %q (Resource Group %q): %+v", name, resourceGroup, err) } - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("retrieving Attestation Provider %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("empty or nil ID returned for Attestation Provider %q (Resource Group %q)", name, resourceGroup) - } - - d.SetId(*resp.ID) - return resourceArmAttestationProviderRead(d, meta) + d.SetId(resourceId) + return resourceAttestationProviderRead(d, meta) } -func resourceArmAttestationProviderRead(d *schema.ResourceData, meta interface{}) error { +func resourceAttestationProviderRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Attestation.ProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AttestationId(d.Id()) + id, err := parse.ProviderID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.AttestationProviderName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] attestation %q does not exist - removing from state", d.Id()) + log.Printf("[INFO] Attestation Provider %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("retrieving Attestation Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Attestation Provider %q (Resource Group %q): %+v", id.AttestationProviderName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.AttestationProviderName) d.Set("resource_group_name", id.ResourceGroup) d.Set("location", location.NormalizeNilable(resp.Location)) @@ -163,12 +157,12 @@ func resourceArmAttestationProviderRead(d *schema.ResourceData, meta interface{} return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmAttestationProviderUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceAttestationProviderUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Attestation.ProviderClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AttestationId(d.Id()) + id, err := parse.ProviderID(d.Id()) if err != nil { return err } @@ -178,24 +172,24 @@ func resourceArmAttestationProviderUpdate(d *schema.ResourceData, meta interface updateParams.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) } - if _, err := client.Update(ctx, id.ResourceGroup, id.Name, updateParams); err != nil { - return fmt.Errorf("updating Attestation Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + if _, err := client.Update(ctx, id.ResourceGroup, id.AttestationProviderName, updateParams); err != nil { + return fmt.Errorf("updating Attestation Provider %q (Resource Group %q): %+v", id.AttestationProviderName, id.ResourceGroup, err) } - return resourceArmAttestationProviderRead(d, meta) + return resourceAttestationProviderRead(d, meta) } -func resourceArmAttestationProviderDelete(d *schema.ResourceData, meta interface{}) error { +func resourceAttestationProviderDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Attestation.ProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.AttestationId(d.Id()) + id, err := parse.ProviderID(d.Id()) if err != nil { return err } - if _, err := client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { - return fmt.Errorf("deleting Attestation Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + if _, err := client.Delete(ctx, id.ResourceGroup, id.AttestationProviderName); err != nil { + return fmt.Errorf("deleting Attestation Provider %q (Resource Group %q): %+v", id.AttestationProviderName, id.ResourceGroup, err) } return nil } diff --git a/azurerm/internal/services/attestation/attestation_provider_resource_test.go b/azurerm/internal/services/attestation/attestation_provider_resource_test.go new file mode 100644 index 000000000000..31bcf983262e --- /dev/null +++ b/azurerm/internal/services/attestation/attestation_provider_resource_test.go @@ -0,0 +1,275 @@ +package attestation_test + +import ( + "bytes" + "context" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "fmt" + "math/big" + "strings" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/attestation/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type AttestationProviderResource struct { +} + +func TestAccAttestationProvider_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_attestation_provider", "test") + r := AttestationProviderResource{} + randStr := strings.ToLower(acctest.RandString(10)) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAttestationProvider_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_attestation_provider", "test") + r := AttestationProviderResource{} + randStr := strings.ToLower(acctest.RandString(10)) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(AttestationProviderResource{}.requiresImport), + }) +} + +func TestAccAttestationProvider_completeString(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_attestation_provider", "test") + r := AttestationProviderResource{} + randStr := strings.ToLower(acctest.RandString(10)) + testCertificate, err := testGenerateTestCertificate("ENCOM") + if err != nil { + t.Fatalf("Test case failed: '%+v'", err) + } + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeString(data, randStr, testCertificate), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + // must ignore policy_signing_certificate since the API does not return these values + data.ImportStep("policy_signing_certificate"), + }) +} + +func TestAccAttestationProvider_completeFile(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_attestation_provider", "test") + r := AttestationProviderResource{} + randStr := strings.ToLower(acctest.RandString(10)) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeFile(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + // must ignore policy_signing_certificate since the API does not return these values + data.ImportStep("policy_signing_certificate"), + }) +} + +func TestAccAttestationProvider_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_attestation_provider", "test") + r := AttestationProviderResource{} + randStr := strings.ToLower(acctest.RandString(10)) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data, randStr), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t AttestationProviderResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ProviderID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Attestation.ProviderClient.Get(ctx, id.ResourceGroup, id.AttestationProviderName) + if err != nil { + return nil, fmt.Errorf("retrieving Attestation Provider %q (resource group: %q): %+v", id.AttestationProviderName, id.ResourceGroup, err) + } + + return utils.Bool(resp.StatusResult != nil), nil +} + +func testGenerateTestCertificate(organization string) (string, error) { + privateKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + if err != nil { + return "", err + } + + rawCert := x509.Certificate{ + SerialNumber: big.NewInt(1), + Subject: pkix.Name{ + Organization: []string{organization}, + }, + NotBefore: time.Now(), + NotAfter: time.Now().Add(time.Hour * 24 * 180), + + KeyUsage: x509.KeyUsageDigitalSignature, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, + BasicConstraintsValid: true, + } + + certBytes, err := x509.CreateCertificate(rand.Reader, &rawCert, &rawCert, &privateKey.PublicKey, privateKey) + if err != nil { + return "", fmt.Errorf("unable to create test certificate: %+v", err) + } + + encoded := &bytes.Buffer{} + if err := pem.Encode(encoded, &pem.Block{Type: "CERTIFICATE", Bytes: certBytes}); err != nil { + return "", fmt.Errorf("unable to pem encode test certificate: %+v", err) + } + + return encoded.String(), nil +} + +// currently only supported in "East US 2", "West Central US" & "UK South" +func (AttestationProviderResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +// TODO: switch to using regular regions when this is supported +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-attestation-%d" + location = "%s" +} +`, data.RandomInteger, "uksouth") +} + +func (AttestationProviderResource) basic(data acceptance.TestData, randStr string) string { + template := AttestationProviderResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_attestation_provider" "test" { + name = "acctestap%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, template, randStr) +} + +func (AttestationProviderResource) update(data acceptance.TestData, randStr string) string { + template := AttestationProviderResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_attestation_provider" "test" { + name = "acctestap%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + ENV = "Test" + } +} +`, template, randStr) +} + +func (AttestationProviderResource) requiresImport(data acceptance.TestData) string { + randStr := strings.ToLower(acctest.RandString(10)) + config := AttestationProviderResource{}.basic(data, randStr) + + return fmt.Sprintf(` +%s + +resource "azurerm_attestation_provider" "import" { + name = azurerm_attestation_provider.test.name + resource_group_name = azurerm_attestation_provider.test.resource_group_name + location = azurerm_attestation_provider.test.location +} +`, config) +} + +func (AttestationProviderResource) completeString(data acceptance.TestData, randStr string, testCertificate string) string { + template := AttestationProviderResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_attestation_provider" "test" { + name = "acctestap%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + policy_signing_certificate_data = <*%&:\\?.+/]{0,127}[^<>*%&:\\?.+/\s]$`), - `The name length must be from 1 to 128 characters. The name cannot contain special characters < > * % & : \ ? . + / and cannot end with a whitespace character.`, - ) -} diff --git a/azurerm/internal/services/automation/validate/runbook_name.go b/azurerm/internal/services/automation/validate/runbook_name.go new file mode 100644 index 000000000000..5d466d0fccc6 --- /dev/null +++ b/azurerm/internal/services/automation/validate/runbook_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +// RunbookName validates Automation Account Runbook names +func RunbookName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[0-9a-zA-Z][-_0-9a-zA-Z]{0,62}$`), + `The name can contain only letters, numbers, underscores and dashes. The name must begin with a letter. The name must be less than 64 characters.`, + ) +} diff --git a/azurerm/internal/services/automation/validate/schedule_name.go b/azurerm/internal/services/automation/validate/schedule_name.go new file mode 100644 index 000000000000..c0e9d82391f3 --- /dev/null +++ b/azurerm/internal/services/automation/validate/schedule_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +// ScheduleName validates Automation Account Schedule names +func ScheduleName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[^<>*%&:\\?.+/]{0,127}[^<>*%&:\\?.+/\s]$`), + `The name length must be from 1 to 128 characters. The name cannot contain special characters < > * % & : \ ? . + / and cannot end with a whitespace character.`, + ) +} diff --git a/azurerm/internal/services/azurestackhci/client/client.go b/azurerm/internal/services/azurestackhci/client/client.go new file mode 100644 index 000000000000..4fef8bbeee29 --- /dev/null +++ b/azurerm/internal/services/azurestackhci/client/client.go @@ -0,0 +1,19 @@ +package client + +import ( + "github.com/Azure/azure-sdk-for-go/services/azurestackhci/mgmt/2020-10-01/azurestackhci" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" +) + +type Client struct { + ClusterClient *azurestackhci.ClustersClient +} + +func NewClient(o *common.ClientOptions) *Client { + clusterClient := azurestackhci.NewClustersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&clusterClient.Client, o.ResourceManagerAuthorizer) + + return &Client{ + ClusterClient: &clusterClient, + } +} diff --git a/azurerm/internal/services/azurestackhci/parse/cluster.go b/azurerm/internal/services/azurestackhci/parse/cluster.go new file mode 100644 index 000000000000..8d3d7ad6c62a --- /dev/null +++ b/azurerm/internal/services/azurestackhci/parse/cluster.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ClusterId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewClusterID(subscriptionId, resourceGroup, name string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ClusterId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) +} + +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.AzureStackHCI/clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ClusterID parses a Cluster ID into an ClusterId struct +func ClusterID(input string) (*ClusterId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ClusterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("clusters"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/azurestackhci/parse/cluster_test.go b/azurerm/internal/services/azurestackhci/parse/cluster_test.go new file mode 100644 index 000000000000..607ff5f46b2f --- /dev/null +++ b/azurerm/internal/services/azurestackhci/parse/cluster_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ClusterId{} + +func TestClusterIDFormatter(t *testing.T) { + actual := NewClusterID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/clusters/cluster1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestClusterID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ClusterId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/clusters/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/clusters/cluster1", + Expected: &ClusterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "cluster1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.AZURESTACKHCI/CLUSTERS/CLUSTER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ClusterID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/azurestackhci/registration.go b/azurerm/internal/services/azurestackhci/registration.go new file mode 100644 index 000000000000..8cecdf01e527 --- /dev/null +++ b/azurerm/internal/services/azurestackhci/registration.go @@ -0,0 +1,29 @@ +package azurestackhci + +import "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + +type Registration struct{} + +// Name is the name of this Service +func (r Registration) Name() string { + return "Azure Stack HCI" +} + +// WebsiteCategories returns a list of categories which can be used for the sidebar +func (r Registration) WebsiteCategories() []string { + return []string{ + "Azure Stack HCI", + } +} + +// SupportedDataSources returns the supported Data Sources supported by this Service +func (r Registration) SupportedDataSources() map[string]*schema.Resource { + return map[string]*schema.Resource{} +} + +// SupportedResources returns the supported Resources supported by this Service +func (r Registration) SupportedResources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_stack_hci_cluster": resourceArmStackHCICluster(), + } +} diff --git a/azurerm/internal/services/azurestackhci/resourceids.go b/azurerm/internal/services/azurestackhci/resourceids.go new file mode 100644 index 000000000000..ec451ad1ecc4 --- /dev/null +++ b/azurerm/internal/services/azurestackhci/resourceids.go @@ -0,0 +1,3 @@ +package azurestackhci + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/clusters/cluster1 diff --git a/azurerm/internal/services/azurestackhci/stack_hci_cluster_resource.go b/azurerm/internal/services/azurestackhci/stack_hci_cluster_resource.go new file mode 100644 index 000000000000..b16fafbb98be --- /dev/null +++ b/azurerm/internal/services/azurestackhci/stack_hci_cluster_resource.go @@ -0,0 +1,191 @@ +package azurestackhci + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/azurestackhci/mgmt/2020-10-01/azurestackhci" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/azurestackhci/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/azurestackhci/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmStackHCICluster() *schema.Resource { + return &schema.Resource{ + Create: resourceArmStackHCIClusterCreate, + Read: resourceArmStackHCIClusterRead, + Update: resourceArmStackHCIClusterUpdate, + Delete: resourceArmStackHCIClusterDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ClusterID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ClusterName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "client_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "tenant_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceArmStackHCIClusterCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).AzureStackHCI.ClusterClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + tenantId := meta.(*clients.Client).Account.TenantId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + id := parse.NewClusterID(subscriptionId, resourceGroup, name) + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Azure Stack HCI Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_stack_hci_cluster", id.ID()) + } + + cluster := azurestackhci.Cluster{ + Location: utils.String(location.Normalize(d.Get("location").(string))), + ClusterProperties: &azurestackhci.ClusterProperties{ + AadClientID: utils.String(d.Get("client_id").(string)), + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + if v, ok := d.GetOk("tenant_id"); ok { + cluster.ClusterProperties.AadTenantID = utils.String(v.(string)) + } else { + cluster.ClusterProperties.AadTenantID = utils.String(tenantId) + } + + if _, err := client.Create(ctx, resourceGroup, name, cluster); err != nil { + return fmt.Errorf("creating Azure Stack HCI Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(id.ID()) + + return resourceArmStackHCIClusterRead(d, meta) +} + +func resourceArmStackHCIClusterRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).AzureStackHCI.ClusterClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ClusterID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Azure Stack HCI Cluster %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Azure Stack HCI Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if props := resp.ClusterProperties; props != nil { + d.Set("client_id", props.AadClientID) + d.Set("tenant_id", props.AadTenantID) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmStackHCIClusterUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).AzureStackHCI.ClusterClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ClusterID(d.Id()) + if err != nil { + return err + } + + cluster := azurestackhci.ClusterUpdate{} + + if d.HasChange("tags") { + cluster.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + if _, err := client.Update(ctx, id.ResourceGroup, id.Name, cluster); err != nil { + return fmt.Errorf("updating Azure Stack HCI Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return resourceArmStackHCIClusterRead(d, meta) +} + +func resourceArmStackHCIClusterDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).AzureStackHCI.ClusterClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ClusterID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { + return fmt.Errorf("deleting Azure Stack HCI Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/azurestackhci/stack_hci_cluster_resource_test.go b/azurerm/internal/services/azurestackhci/stack_hci_cluster_resource_test.go new file mode 100644 index 000000000000..1dd579b3767c --- /dev/null +++ b/azurerm/internal/services/azurestackhci/stack_hci_cluster_resource_test.go @@ -0,0 +1,190 @@ +package azurestackhci_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/azurestackhci/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type StackHCIClusterResource struct{} + +func TestAccStackHCICluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_stack_hci_cluster", "test") + r := StackHCIClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccStackHCICluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_stack_hci_cluster", "test") + r := StackHCIClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccStackHCICluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_stack_hci_cluster", "test") + r := StackHCIClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccStackHCICluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_stack_hci_cluster", "test") + r := StackHCIClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r StackHCIClusterResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + clusterClient := client.AzureStackHCI.ClusterClient + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clusterClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + + return nil, fmt.Errorf("retrieving Azure Stack HCI Cluster %q: %+v", state.ID, err) + } + + return utils.Bool(resp.ClusterProperties != nil), nil +} + +func (r StackHCIClusterResource) basic(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_stack_hci_cluster" "test" { + name = "acctest-StackHCICluster-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + client_id = azuread_application.test.application_id + tenant_id = data.azurerm_client_config.current.tenant_id +} +`, template, data.RandomInteger) +} + +func (r StackHCIClusterResource) requiresImport(data acceptance.TestData) string { + config := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_stack_hci_cluster" "import" { + name = azurerm_stack_hci_cluster.test.name + resource_group_name = azurerm_stack_hci_cluster.test.resource_group_name + location = azurerm_stack_hci_cluster.test.location + client_id = azurerm_stack_hci_cluster.test.client_id + tenant_id = azurerm_stack_hci_cluster.test.tenant_id +} +`, config) +} + +func (r StackHCIClusterResource) complete(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_stack_hci_cluster" "test" { + name = "acctest-StackHCICluster-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + client_id = azuread_application.test.application_id + tenant_id = data.azurerm_client_config.current.tenant_id + + tags = { + ENV = "Test" + } +} +`, template, data.RandomInteger) +} + +func (r StackHCIClusterResource) update(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_stack_hci_cluster" "test" { + name = "acctest-StackHCICluster-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + client_id = azuread_application.test.application_id + tenant_id = data.azurerm_client_config.current.tenant_id + + tags = { + ENv = "Test2" + } +} +`, template, data.RandomInteger) +} + +func (r StackHCIClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azuread_application" "test" { + name = "acctestspa-%d" +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-hci-%d" + location = "%s" +} +`, data.RandomInteger, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/azurestackhci/validate/cluster_id.go b/azurerm/internal/services/azurestackhci/validate/cluster_id.go new file mode 100644 index 000000000000..4eb34cf49cac --- /dev/null +++ b/azurerm/internal/services/azurestackhci/validate/cluster_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/azurestackhci/parse" +) + +func ClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/azurestackhci/validate/cluster_id_test.go b/azurerm/internal/services/azurestackhci/validate/cluster_id_test.go new file mode 100644 index 000000000000..4c268391dd50 --- /dev/null +++ b/azurerm/internal/services/azurestackhci/validate/cluster_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestClusterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/clusters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.AzureStackHCI/clusters/cluster1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.AZURESTACKHCI/CLUSTERS/CLUSTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ClusterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/azurestackhci/validate/cluster_name.go b/azurerm/internal/services/azurestackhci/validate/cluster_name.go new file mode 100644 index 000000000000..f1083360df3f --- /dev/null +++ b/azurerm/internal/services/azurestackhci/validate/cluster_name.go @@ -0,0 +1,18 @@ +package validate + +import "fmt" + +func ClusterName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return warnings, errors + } + + if len(v) == 0 || len(v) > 260 { + errors = append(errors, fmt.Errorf("%s cannot be empty and must not exceed 260 characters", k)) + return warnings, errors + } + + return warnings, errors +} diff --git a/azurerm/internal/services/azurestackhci/validate/cluster_name_test.go b/azurerm/internal/services/azurestackhci/validate/cluster_name_test.go new file mode 100644 index 000000000000..993cb5137578 --- /dev/null +++ b/azurerm/internal/services/azurestackhci/validate/cluster_name_test.go @@ -0,0 +1,48 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestClusterName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + input: "", + expected: false, + }, + { + input: "test", + expected: true, + }, + { + input: "test-abc", + expected: true, + }, + { + input: strings.Repeat("s", 259), + expected: true, + }, + { + input: strings.Repeat("s", 260), + expected: true, + }, + { + input: strings.Repeat("s", 261), + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ClusterName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/batch/batch_account.go b/azurerm/internal/services/batch/batch_account.go index 03275b65d138..d0bcac4a57ca 100644 --- a/azurerm/internal/services/batch/batch_account.go +++ b/azurerm/internal/services/batch/batch_account.go @@ -3,7 +3,7 @@ package batch import ( "fmt" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" ) // expandBatchAccountKeyVaultReference expands Batch account KeyVault reference diff --git a/azurerm/internal/services/batch/batch_account_data_source.go b/azurerm/internal/services/batch/batch_account_data_source.go index 7b49c70b63e6..c63600b057fd 100644 --- a/azurerm/internal/services/batch/batch_account_data_source.go +++ b/azurerm/internal/services/batch/batch_account_data_source.go @@ -4,7 +4,7 @@ import ( "fmt" "time" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmBatchAccount() *schema.Resource { +func dataSourceBatchAccount() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmBatchAccountRead, + Read: dataSourceBatchAccountRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -72,7 +72,7 @@ func dataSourceArmBatchAccount() *schema.Resource { } } -func dataSourceArmBatchAccountRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceBatchAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.AccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -107,7 +107,6 @@ func dataSourceArmBatchAccountRead(d *schema.ResourceData, meta interface{}) err if poolAllocationMode == string(batch.BatchService) { keys, err := client.GetKeys(ctx, resourceGroup, name) - if err != nil { return fmt.Errorf("Cannot read keys for Batch account %q (resource group %q): %v", name, resourceGroup, err) } diff --git a/azurerm/internal/services/batch/batch_account_data_source_test.go b/azurerm/internal/services/batch/batch_account_data_source_test.go index fd22145aeee4..05b63e1daa27 100644 --- a/azurerm/internal/services/batch/batch_account_data_source_test.go +++ b/azurerm/internal/services/batch/batch_account_data_source_test.go @@ -8,79 +8,71 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" ) +type BatchAccountDataSource struct { +} + func TestAccBatchAccountDataSource_basic(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_batch_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccountDataSource_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "BatchService"), - ), - }, + r := BatchAccountDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("location").HasValue(azure.NormalizeLocation(data.Locations.Primary)), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("BatchService"), + ), }, }) } func TestAccBatchAccountDataSource_complete(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_batch_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccountDataSource_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "BatchService"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), - ), - }, + r := BatchAccountDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("location").HasValue(azure.NormalizeLocation(data.Locations.Primary)), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("BatchService"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.env").HasValue("test"), + ), }, }) } func TestAccBatchAccountDataSource_userSubscription(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_batch_account", "test") + r := BatchAccountDataSource{} tenantID := os.Getenv("ARM_TENANT_ID") subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccountDataSource_userSubscription(data, tenantID, subscriptionID), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "UserSubscription"), - resource.TestCheckResourceAttr(data.ResourceName, "key_vault_reference.#", "1"), - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.userSubscription(data, tenantID, subscriptionID), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("location").HasValue(azure.NormalizeLocation(data.Locations.Primary)), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("UserSubscription"), + check.That(data.ResourceName).Key("key_vault_reference.#").HasValue("1"), + ), }, }) } -func testAccBatchAccountDataSource_basic(data acceptance.TestData) string { +func (BatchAccountDataSource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batch" + name = "testaccRG-batch-%d" location = "%s" } @@ -98,14 +90,14 @@ data "azurerm_batch_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString) } -func testAccBatchAccountDataSource_complete(data acceptance.TestData) string { +func (BatchAccountDataSource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batch" + name = "testaccRG-batch-%d" location = "%s" } @@ -136,7 +128,7 @@ data "azurerm_batch_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchAccountDataSource_userSubscription(data acceptance.TestData, tenantID string, subscriptionID string) string { +func (BatchAccountDataSource) userSubscription(data acceptance.TestData, tenantID string, subscriptionID string) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -147,7 +139,7 @@ data "azuread_service_principal" "test" { } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchaccount" + name = "testaccRG-batch-%d" location = "%s" } diff --git a/azurerm/internal/services/batch/batch_account_resource.go b/azurerm/internal/services/batch/batch_account_resource.go index b06c935e3df6..17abe1627123 100644 --- a/azurerm/internal/services/batch/batch_account_resource.go +++ b/azurerm/internal/services/batch/batch_account_resource.go @@ -6,7 +6,7 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmBatchAccount() *schema.Resource { +func resourceBatchAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmBatchAccountCreate, - Read: resourceArmBatchAccountRead, - Update: resourceArmBatchAccountUpdate, - Delete: resourceArmBatchAccountDelete, + Create: resourceBatchAccountCreate, + Read: resourceBatchAccountRead, + Update: resourceBatchAccountUpdate, + Delete: resourceBatchAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -35,7 +35,7 @@ func resourceArmBatchAccount() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.BatchAccountID(id) + _, err := parse.AccountID(id) return err }), @@ -105,7 +105,7 @@ func resourceArmBatchAccount() *schema.Resource { } } -func resourceArmBatchAccountCreate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchAccountCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.AccountClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -181,30 +181,30 @@ func resourceArmBatchAccountCreate(d *schema.ResourceData, meta interface{}) err d.SetId(*read.ID) - return resourceArmBatchAccountRead(d, meta) + return resourceBatchAccountRead(d, meta) } -func resourceArmBatchAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceBatchAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.AccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") - log.Printf("[DEBUG] Batch Account %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Batch Account %q was not found in Resource Group %q - removing from state!", id.BatchAccountName, id.ResourceGroup) return nil } - return fmt.Errorf("Error reading the state of Batch account %q: %+v", id.Name, err) + return fmt.Errorf("Error reading the state of Batch account %q: %+v", id.BatchAccountName, err) } - d.Set("name", id.Name) + d.Set("name", id.BatchAccountName) d.Set("resource_group_name", id.ResourceGroup) d.Set("account_endpoint", resp.AccountEndpoint) @@ -220,10 +220,9 @@ func resourceArmBatchAccountRead(d *schema.ResourceData, meta interface{}) error } if d.Get("pool_allocation_mode").(string) == string(batch.BatchService) { - keys, err := client.GetKeys(ctx, id.ResourceGroup, id.Name) - + keys, err := client.GetKeys(ctx, id.ResourceGroup, id.BatchAccountName) if err != nil { - return fmt.Errorf("Cannot read keys for Batch account %q (resource group %q): %v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Cannot read keys for Batch account %q (resource group %q): %v", id.BatchAccountName, id.ResourceGroup, err) } d.Set("primary_access_key", keys.Primary) @@ -233,14 +232,14 @@ func resourceArmBatchAccountRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmBatchAccountUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchAccountUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.AccountClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for Azure Batch account update.") - id, err := parse.BatchAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } @@ -257,42 +256,42 @@ func resourceArmBatchAccountUpdate(d *schema.ResourceData, meta interface{}) err Tags: tags.Expand(t), } - if _, err = client.Update(ctx, id.ResourceGroup, id.Name, parameters); err != nil { - return fmt.Errorf("Error updating Batch account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + if _, err = client.Update(ctx, id.ResourceGroup, id.BatchAccountName, parameters); err != nil { + return fmt.Errorf("Error updating Batch account %q (Resource Group %q): %+v", id.BatchAccountName, id.ResourceGroup, err) } - read, err := client.Get(ctx, id.ResourceGroup, id.Name) + read, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName) if err != nil { - return fmt.Errorf("Error retrieving Batch account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Batch account %q (Resource Group %q): %+v", id.BatchAccountName, id.ResourceGroup, err) } if read.ID == nil { - return fmt.Errorf("Cannot read Batch account %q (resource group %q) ID", id.Name, id.ResourceGroup) + return fmt.Errorf("Cannot read Batch account %q (resource group %q) ID", id.BatchAccountName, id.ResourceGroup) } d.SetId(*read.ID) - return resourceArmBatchAccountRead(d, meta) + return resourceBatchAccountRead(d, meta) } -func resourceArmBatchAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceBatchAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.AccountClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.BatchAccountName) if err != nil { - return fmt.Errorf("Error deleting Batch account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Batch account %q (Resource Group %q): %+v", id.BatchAccountName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error waiting for deletion of Batch account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deletion of Batch account %q (Resource Group %q): %+v", id.BatchAccountName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/batch/batch_account_resource_test.go b/azurerm/internal/services/batch/batch_account_resource_test.go index 1996b325a903..9ef433cd0279 100644 --- a/azurerm/internal/services/batch/batch_account_resource_test.go +++ b/azurerm/internal/services/batch/batch_account_resource_test.go @@ -1,8 +1,8 @@ package batch_test import ( + "context" "fmt" - "net/http" "os" "testing" @@ -12,10 +12,14 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type BatchAccountResource struct { +} + func TestValidateBatchAccountName(t *testing.T) { testCases := []struct { input string @@ -44,162 +48,102 @@ func TestValidateBatchAccountName(t *testing.T) { func TestAccBatchAccount_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "BatchService"), - ), - }, + r := BatchAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("BatchService"), + ), }, }) } func TestAccBatchAccount_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchAccountExists(data.ResourceName), - ), - }, - { - Config: testAccBatchAccount_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_batch_account"), - }, + r := BatchAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_batch_account"), }, }) } func TestAccBatchAccount_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "BatchService"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), - ), - }, - { - Config: testAccBatchAccount_completeUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "BatchService"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.version", "2"), - ), - }, + r := BatchAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("BatchService"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.env").HasValue("test"), + ), + }, + { + Config: r.completeUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("BatchService"), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.env").HasValue("test"), + check.That(data.ResourceName).Key("tags.version").HasValue("2"), + ), }, }) } func TestAccBatchAccount_userSubscription(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_account", "test") + r := BatchAccountResource{} tenantID := os.Getenv("ARM_TENANT_ID") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchAccount_userSubscription(data, tenantID), - Check: resource.ComposeTestCheckFunc( - testCheckBatchAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "pool_allocation_mode", "UserSubscription"), - ), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.userSubscription(data, tenantID), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("pool_allocation_mode").HasValue("UserSubscription"), + ), }, }) } -func testCheckBatchAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - conn := acceptance.AzureProvider.Meta().(*clients.Client).Batch.AccountClient - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.BatchAccountID(rs.Primary.ID) - if err != nil { - return err - } - - // Ensure resource group exists in API - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on batchAccountClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Batch account %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil +func (t BatchAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AccountID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckBatchAccountDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Batch.AccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_batch_account" { - continue - } - - id, err := parse.BatchAccountID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil + resp, err := clients.Batch.AccountClient.Get(ctx, id.ResourceGroup, id.BatchAccountName) + if err != nil { + return nil, fmt.Errorf("retrieving Batch Application %q (Resource Group %q) does not exist", id.BatchAccountName, id.ResourceGroup) } - return nil + return utils.Bool(resp.AccountProperties != nil), nil } -func testAccBatchAccount_basic(data acceptance.TestData) string { +func (BatchAccountResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchaccount" + name = "testaccRG-batch-%d" location = "%s" } @@ -212,27 +156,27 @@ resource "azurerm_batch_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString) } -func testAccBatchAccount_requiresImport(data acceptance.TestData) string { - template := testAccBatchAccount_basic(data) +func (BatchAccountResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s + resource "azurerm_batch_account" "import" { name = azurerm_batch_account.test.name resource_group_name = azurerm_batch_account.test.resource_group_name location = azurerm_batch_account.test.location pool_allocation_mode = azurerm_batch_account.test.pool_allocation_mode } -`, template) +`, BatchAccountResource{}.basic(data)) } -func testAccBatchAccount_complete(data acceptance.TestData) string { +func (BatchAccountResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchaccount" + name = "testaccRG-batch-%d" location = "%s" } @@ -258,14 +202,14 @@ resource "azurerm_batch_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchAccount_completeUpdated(data acceptance.TestData) string { +func (BatchAccountResource) completeUpdated(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchaccount" + name = "testaccRG-batch-%d" location = "%s" } @@ -292,7 +236,7 @@ resource "azurerm_batch_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchAccount_userSubscription(data acceptance.TestData, tenantID string) string { +func (BatchAccountResource) userSubscription(data acceptance.TestData, tenantID string) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -303,7 +247,7 @@ data "azuread_service_principal" "test" { } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchaccount" + name = "testaccRG-batch-%d" location = "%s" } diff --git a/azurerm/internal/services/batch/batch_application_resource.go b/azurerm/internal/services/batch/batch_application_resource.go index 284c9d39843a..c63173b9e017 100644 --- a/azurerm/internal/services/batch/batch_application_resource.go +++ b/azurerm/internal/services/batch/batch_application_resource.go @@ -6,7 +6,7 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmBatchApplication() *schema.Resource { +func resourceBatchApplication() *schema.Resource { return &schema.Resource{ - Create: resourceArmBatchApplicationCreate, - Read: resourceArmBatchApplicationRead, - Update: resourceArmBatchApplicationUpdate, - Delete: resourceArmBatchApplicationDelete, + Create: resourceBatchApplicationCreate, + Read: resourceBatchApplicationRead, + Update: resourceBatchApplicationUpdate, + Delete: resourceBatchApplicationDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -32,7 +32,7 @@ func resourceArmBatchApplication() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.BatchApplicationID(id) + _, err := parse.ApplicationID(id) return err }), @@ -74,7 +74,7 @@ func resourceArmBatchApplication() *schema.Resource { } } -func resourceArmBatchApplicationCreate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchApplicationCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.ApplicationClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -118,32 +118,32 @@ func resourceArmBatchApplicationCreate(d *schema.ResourceData, meta interface{}) } d.SetId(*resp.ID) - return resourceArmBatchApplicationRead(d, meta) + return resourceBatchApplicationRead(d, meta) } -func resourceArmBatchApplicationRead(d *schema.ResourceData, meta interface{}) error { +func resourceBatchApplicationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.ApplicationClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchApplicationID(d.Id()) + id, err := parse.ApplicationID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Batch Application %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("Error reading Batch Application %q (Account Name %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + return fmt.Errorf("Error reading Batch Application %q (Account Name %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.AccountName) + d.Set("account_name", id.BatchAccountName) if applicationProperties := resp.ApplicationProperties; applicationProperties != nil { d.Set("allow_updates", applicationProperties.AllowUpdates) d.Set("default_version", applicationProperties.DefaultVersion) @@ -153,12 +153,12 @@ func resourceArmBatchApplicationRead(d *schema.ResourceData, meta interface{}) e return nil } -func resourceArmBatchApplicationUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchApplicationUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.ApplicationClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchApplicationID(d.Id()) + id, err := parse.ApplicationID(d.Id()) if err != nil { return err } @@ -175,25 +175,25 @@ func resourceArmBatchApplicationUpdate(d *schema.ResourceData, meta interface{}) }, } - if _, err := client.Update(ctx, id.ResourceGroup, id.AccountName, id.Name, parameters); err != nil { - return fmt.Errorf("Error updating Batch Application %q (Account Name %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + if _, err := client.Update(ctx, id.ResourceGroup, id.BatchAccountName, id.Name, parameters); err != nil { + return fmt.Errorf("Error updating Batch Application %q (Account Name %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } - return resourceArmBatchApplicationRead(d, meta) + return resourceBatchApplicationRead(d, meta) } -func resourceArmBatchApplicationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceBatchApplicationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.ApplicationClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchApplicationID(d.Id()) + id, err := parse.ApplicationID(d.Id()) if err != nil { return err } - if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name); err != nil { - return fmt.Errorf("Error deleting Batch Application %q (Account Name %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + if _, err := client.Delete(ctx, id.ResourceGroup, id.BatchAccountName, id.Name); err != nil { + return fmt.Errorf("Error deleting Batch Application %q (Account Name %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/batch/batch_application_resource_test.go b/azurerm/internal/services/batch/batch_application_resource_test.go index 3952d46a52a7..4c9229d0771b 100644 --- a/azurerm/internal/services/batch/batch_application_resource_test.go +++ b/azurerm/internal/services/batch/batch_application_resource_test.go @@ -1,115 +1,74 @@ package batch_test import ( + "context" "fmt" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type BatchApplicationResource struct { +} + func TestAccBatchApplication_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_application", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchApplicationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchApplication_template(data, ""), - Check: resource.ComposeTestCheckFunc( - testCheckBatchApplicationExists(data.ResourceName), - ), - }, - data.ImportStep(), + r := BatchApplicationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.template(data, ""), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } func TestAccBatchApplication_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_application", "test") + r := BatchApplicationResource{} displayName := fmt.Sprintf("TestAccDisplayName-%d", data.RandomInteger) - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchApplicationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchApplication_template(data, ""), - Check: resource.ComposeTestCheckFunc( - testCheckBatchApplicationExists(data.ResourceName), - ), - }, - { - Config: testAccBatchApplication_template(data, fmt.Sprintf(`display_name = "%s"`, displayName)), - Check: resource.ComposeTestCheckFunc( - testCheckBatchApplicationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "display_name", displayName), - ), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.template(data, ""), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.template(data, fmt.Sprintf(`display_name = "%s"`, displayName)), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue(displayName), + ), }, }) } -func testCheckBatchApplicationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Batch.ApplicationClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Batch Application not found: %s", resourceName) - } - - id, err := parse.BatchApplicationID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Batch Application %q (Account Name %q / Resource Group %q) does not exist", id.Name, id.AccountName, id.ResourceGroup) - } - return fmt.Errorf("Bad: Get on batchApplicationClient: %+v", err) - } - - return nil +func (t BatchApplicationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApplicationID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckBatchApplicationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Batch.ApplicationClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_batch_application" { - continue - } - - id, err := parse.BatchApplicationID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on batchApplicationClient: %+v", err) - } - } - return nil + resp, err := clients.Batch.ApplicationClient.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Batch Application %q (Account Name %q / Resource Group %q) does not exist", id.Name, id.BatchAccountName, id.ResourceGroup) } - return nil + return utils.Bool(resp.ApplicationProperties != nil), nil } -func testAccBatchApplication_template(data acceptance.TestData, displayName string) string { +func (BatchApplicationResource) template(data acceptance.TestData, displayName string) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/batch/batch_certificate_data_source.go b/azurerm/internal/services/batch/batch_certificate_data_source.go index 9957d8f37e7d..6d79d8481e43 100644 --- a/azurerm/internal/services/batch/batch_certificate_data_source.go +++ b/azurerm/internal/services/batch/batch_certificate_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmBatchCertificate() *schema.Resource { +func dataSourceBatchCertificate() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmBatchCertificateRead, + Read: dataSourceBatchCertificateRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -55,7 +55,7 @@ func dataSourceArmBatchCertificate() *schema.Resource { } } -func dataSourceArmBatchCertificateRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceBatchCertificateRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.CertificateClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -87,6 +87,7 @@ func dataSourceArmBatchCertificateRead(d *schema.ResourceData, meta interface{}) return nil } + func validateAzureRMBatchCertificateName(v interface{}, k string) (warnings []string, errors []error) { value := v.(string) if !regexp.MustCompile(`^[\w]+-[\w]+$`).MatchString(value) { diff --git a/azurerm/internal/services/batch/batch_certificate_data_source_test.go b/azurerm/internal/services/batch/batch_certificate_data_source_test.go index b857602519bb..116562ba2b69 100644 --- a/azurerm/internal/services/batch/batch_certificate_data_source_test.go +++ b/azurerm/internal/services/batch/batch_certificate_data_source_test.go @@ -6,38 +6,38 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" ) +type BatchCertificateDataSource struct { +} + func TestAccBatchCertificateDataSource_basic(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_batch_certificate", "test") + r := BatchCertificateDataSource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccBatchCertificateDataSource_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", "sha1-42c107874fd0e4a9583292a2f1098e8fe4b2edda"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "format", "Pfx"), - resource.TestCheckResourceAttr(data.ResourceName, "public_data", "MIIFqzCCA5OgAwIBAgIJAMs4jwMPq7T1MA0GCSqGSIb3DQEBCwUAMGwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMRgwFgYDVQQKDA9UZXJyYWZvcm0gVGVzdHMxDjAMBgNVBAsMBUF6dXJlMR4wHAYDVQQDDBVUZXJyYWZvcm0gQXBwIEdhdGV3YXkwHhcNMTYxMTAxMTcxOTEyWhcNMjYxMDMwMTcxOTEyWjBsMQswCQYDVQQGEwJVUzETMBEGA1UECAwKU29tZS1TdGF0ZTEYMBYGA1UECgwPVGVycmFmb3JtIFRlc3RzMQ4wDAYDVQQLDAVBenVyZTEeMBwGA1UEAwwVVGVycmFmb3JtIEFwcCBHYXRld2F5MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA49HW2pYIlW/mlaadLA1AsXiV48xVhXAvGVk3DEl1ffjp5bN8rap5WV1D83uMg1Ii7CJM8yNHkRkvN8n5WXFng4R5V1jPxGOTAj+xLybvEASi++GZelWdpOuMk8/nAoKPMbQ5NyKFy5WzlOduMldR7Awt2pwdId3akqm1i9ITG9Js+4P4nYXM8vfJCajILqi4YfhEoCNvS1EUgvlpSFE7pfNhc2W+zsfUWxWmB2SpWwX9MgQ1D4OmdKp+Eo+b6vzst3XArKMHMadPTUAk8H+ZgAnlX9yO+3vQ6z86vma/WgrG2LH6GCGXBjmKlhxVCPMLA5LeRUwEGc/Q7X/ClitGWY9umPN1XVj5e5Di1K2M082Y14mgbTTRTpv/nx7Xlph+MHnVhEWvaGMpqCHuM1W1y7wIS1IREYQ2q+K54xxZSPKYJMSnmj6A0hR/LBV0rL1uVhedEpdviduuO76qCyZrGG4HwBlW4hnIaahLzgqlvlmbDUQonAVPDgi3brVdXJgLv2zi7/ZHFW3IHgDylUVIdig0ccbzxKymlkGQ0RsLBjWOyxak2J8bN5JNVyxSwX43NZqxJ8yOv5xjB+rVMri9SX3Dl5NbFzOjynov601Pmwvb7zYnyttG2Hl5EKrkahjijGRjGy3EWEiBiArLkdTKCDHBlHxykTEvY6ZH5B9waP0CAwEAAaNQME4wHQYDVR0OBBYEFD2/Hq3IivZ5RMOKrPsM7ijIFHmMMB8GA1UdIwQYMBaAFD2/Hq3IivZ5RMOKrPsM7ijIFHmMMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAKxHWO/Q4labjnCVxYi+kaMRCPJUdHj7lga8yi8EGHaL+CbwynkaiyTfPvtmcqiuaZM9BaXsuNMRcHMtXM0EHBsjViwAHk6SrqLXd/opFvMI2QbG93koFUCpczrpyO9GvnRN4iOIYbSPXAdGOB6bkpMbm/XajORoDrua+/ET/X/1FP0GZBTmEFwojuCfOI/VuJXj0OW8XzkLmsXiLpOiakjU1obBup/1lz9DtOEBsiB9Ury+f5gZ+FnZuqhgQxeDxlZ69P6YYAfkzhcfbf7HO+nMKhppAj1BFeR4SBb+F/fLchCGO5yohwkxWz3i2q9gTDhBgo31416viyCKFWSVW3Vn7jbsjZ+Q9MK1jVSOSxC7qoQkRoNy9SKpqylunXZb+K6F3HfBkDQvn3OwsxYiSOcX9JaWpQAInNIZVg+WrJ1PXm8PFIaVPJfMgP3GOdm9vRAMjOM5Bc9iqGr2spimFd5h0GmgLvh35B3jHHWF4i3NupJQ6hUvHQZtYZOxfwxnY0/LVBTyLTVlniFA7dGSI+5Uexm+Pjh7IMGI532jTONlfNm9Bz/jdf1o0FlOclzG6Eif22gml3GM3xCUVlaElylYNAjO2lfvZuRVo5GKdMwtV9acNl0OwSx+0zbMYY2Ni3jQCI4kOL5Csctryf0rHXTlCCvnzBYVDPKmFJPna61T"), - resource.TestCheckResourceAttr(data.ResourceName, "thumbprint", "42c107874fd0e4a9583292a2f1098e8fe4b2edda"), - resource.TestCheckResourceAttr(data.ResourceName, "thumbprint_algorithm", "sha1"), // api now always returns this as lowercase - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("sha1-42c107874fd0e4a9583292a2f1098e8fe4b2edda"), + check.That(data.ResourceName).Key("format").HasValue("Pfx"), + check.That(data.ResourceName).Key("public_data").HasValue("MIIFqzCCA5OgAwIBAgIJAMs4jwMPq7T1MA0GCSqGSIb3DQEBCwUAMGwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMRgwFgYDVQQKDA9UZXJyYWZvcm0gVGVzdHMxDjAMBgNVBAsMBUF6dXJlMR4wHAYDVQQDDBVUZXJyYWZvcm0gQXBwIEdhdGV3YXkwHhcNMTYxMTAxMTcxOTEyWhcNMjYxMDMwMTcxOTEyWjBsMQswCQYDVQQGEwJVUzETMBEGA1UECAwKU29tZS1TdGF0ZTEYMBYGA1UECgwPVGVycmFmb3JtIFRlc3RzMQ4wDAYDVQQLDAVBenVyZTEeMBwGA1UEAwwVVGVycmFmb3JtIEFwcCBHYXRld2F5MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA49HW2pYIlW/mlaadLA1AsXiV48xVhXAvGVk3DEl1ffjp5bN8rap5WV1D83uMg1Ii7CJM8yNHkRkvN8n5WXFng4R5V1jPxGOTAj+xLybvEASi++GZelWdpOuMk8/nAoKPMbQ5NyKFy5WzlOduMldR7Awt2pwdId3akqm1i9ITG9Js+4P4nYXM8vfJCajILqi4YfhEoCNvS1EUgvlpSFE7pfNhc2W+zsfUWxWmB2SpWwX9MgQ1D4OmdKp+Eo+b6vzst3XArKMHMadPTUAk8H+ZgAnlX9yO+3vQ6z86vma/WgrG2LH6GCGXBjmKlhxVCPMLA5LeRUwEGc/Q7X/ClitGWY9umPN1XVj5e5Di1K2M082Y14mgbTTRTpv/nx7Xlph+MHnVhEWvaGMpqCHuM1W1y7wIS1IREYQ2q+K54xxZSPKYJMSnmj6A0hR/LBV0rL1uVhedEpdviduuO76qCyZrGG4HwBlW4hnIaahLzgqlvlmbDUQonAVPDgi3brVdXJgLv2zi7/ZHFW3IHgDylUVIdig0ccbzxKymlkGQ0RsLBjWOyxak2J8bN5JNVyxSwX43NZqxJ8yOv5xjB+rVMri9SX3Dl5NbFzOjynov601Pmwvb7zYnyttG2Hl5EKrkahjijGRjGy3EWEiBiArLkdTKCDHBlHxykTEvY6ZH5B9waP0CAwEAAaNQME4wHQYDVR0OBBYEFD2/Hq3IivZ5RMOKrPsM7ijIFHmMMB8GA1UdIwQYMBaAFD2/Hq3IivZ5RMOKrPsM7ijIFHmMMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAKxHWO/Q4labjnCVxYi+kaMRCPJUdHj7lga8yi8EGHaL+CbwynkaiyTfPvtmcqiuaZM9BaXsuNMRcHMtXM0EHBsjViwAHk6SrqLXd/opFvMI2QbG93koFUCpczrpyO9GvnRN4iOIYbSPXAdGOB6bkpMbm/XajORoDrua+/ET/X/1FP0GZBTmEFwojuCfOI/VuJXj0OW8XzkLmsXiLpOiakjU1obBup/1lz9DtOEBsiB9Ury+f5gZ+FnZuqhgQxeDxlZ69P6YYAfkzhcfbf7HO+nMKhppAj1BFeR4SBb+F/fLchCGO5yohwkxWz3i2q9gTDhBgo31416viyCKFWSVW3Vn7jbsjZ+Q9MK1jVSOSxC7qoQkRoNy9SKpqylunXZb+K6F3HfBkDQvn3OwsxYiSOcX9JaWpQAInNIZVg+WrJ1PXm8PFIaVPJfMgP3GOdm9vRAMjOM5Bc9iqGr2spimFd5h0GmgLvh35B3jHHWF4i3NupJQ6hUvHQZtYZOxfwxnY0/LVBTyLTVlniFA7dGSI+5Uexm+Pjh7IMGI532jTONlfNm9Bz/jdf1o0FlOclzG6Eif22gml3GM3xCUVlaElylYNAjO2lfvZuRVo5GKdMwtV9acNl0OwSx+0zbMYY2Ni3jQCI4kOL5Csctryf0rHXTlCCvnzBYVDPKmFJPna61T"), + check.That(data.ResourceName).Key("thumbprint").HasValue("42c107874fd0e4a9583292a2f1098e8fe4b2edda"), + check.That(data.ResourceName).Key("thumbprint_algorithm").HasValue("sha1"), + ), }, }) } -func testAccBatchCertificateDataSource_basic(data acceptance.TestData) string { +func (BatchCertificateDataSource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccbatch%d" + name = "testaccRG-batch-%d" location = "%s" } diff --git a/azurerm/internal/services/batch/batch_certificate_resource.go b/azurerm/internal/services/batch/batch_certificate_resource.go index 0fb7e1409834..15cd626a8fbc 100644 --- a/azurerm/internal/services/batch/batch_certificate_resource.go +++ b/azurerm/internal/services/batch/batch_certificate_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmBatchCertificate() *schema.Resource { +func resourceBatchCertificate() *schema.Resource { return &schema.Resource{ - Create: resourceArmBatchCertificateCreate, - Read: resourceArmBatchCertificateRead, - Update: resourceArmBatchCertificateUpdate, - Delete: resourceArmBatchCertificateDelete, + Create: resourceBatchCertificateCreate, + Read: resourceBatchCertificateRead, + Update: resourceBatchCertificateUpdate, + Delete: resourceBatchCertificateDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,7 +34,7 @@ func resourceArmBatchCertificate() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.BatchCertificateID(id) + _, err := parse.CertificateID(id) return err }), @@ -100,7 +100,7 @@ func resourceArmBatchCertificate() *schema.Resource { } } -func resourceArmBatchCertificateCreate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchCertificateCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.CertificateClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -160,31 +160,31 @@ func resourceArmBatchCertificateCreate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error retrieving Batch certificate %q (Account %q / Resource Group %q): %+v", name, accountName, resourceGroupName, err) } d.SetId(*read.ID) - return resourceArmBatchCertificateRead(d, meta) + return resourceBatchCertificateRead(d, meta) } -func resourceArmBatchCertificateRead(d *schema.ResourceData, meta interface{}) error { +func resourceBatchCertificateRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.CertificateClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchCertificateID(d.Id()) + id, err := parse.CertificateID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") - log.Printf("[DEBUG] Batch certificate %q was not found in Account %q / Resource Group %q - removing from state!", id.Name, id.AccountName, id.ResourceGroup) + log.Printf("[DEBUG] Batch certificate %q was not found in Account %q / Resource Group %q - removing from state!", id.Name, id.BatchAccountName, id.ResourceGroup) return nil } - return fmt.Errorf("Error retrieving Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } d.Set("name", id.Name) - d.Set("account_name", id.AccountName) + d.Set("account_name", id.BatchAccountName) d.Set("resource_group_name", id.ResourceGroup) if props := resp.CertificateProperties; props != nil { @@ -197,14 +197,14 @@ func resourceArmBatchCertificateRead(d *schema.ResourceData, meta interface{}) e return nil } -func resourceArmBatchCertificateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchCertificateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.CertificateClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for Azure Batch certificate update.") - id, err := parse.BatchCertificateID(d.Id()) + id, err := parse.CertificateID(d.Id()) if err != nil { return err } @@ -230,40 +230,40 @@ func resourceArmBatchCertificateUpdate(d *schema.ResourceData, meta interface{}) }, } - if _, err = client.Update(ctx, id.ResourceGroup, id.AccountName, id.Name, parameters, ""); err != nil { - return fmt.Errorf("Error updating Batch certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + if _, err = client.Update(ctx, id.ResourceGroup, id.BatchAccountName, id.Name, parameters, ""); err != nil { + return fmt.Errorf("Error updating Batch certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } - read, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + read, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { - return fmt.Errorf("Error retrieving Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } if read.ID == nil { - return fmt.Errorf("Cannot read ID for Batch certificate %q (Account: %q, Resource Group %q) ID", id.Name, id.AccountName, id.ResourceGroup) + return fmt.Errorf("Cannot read ID for Batch certificate %q (Account: %q, Resource Group %q) ID", id.Name, id.BatchAccountName, id.ResourceGroup) } - return resourceArmBatchCertificateRead(d, meta) + return resourceBatchCertificateRead(d, meta) } -func resourceArmBatchCertificateDelete(d *schema.ResourceData, meta interface{}) error { +func resourceBatchCertificateDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.CertificateClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchCertificateID(d.Id()) + id, err := parse.CertificateID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { - return fmt.Errorf("Error deleting Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error waiting for deletion of Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deletion of Batch Certificate %q (Account %q / Resource Group %q): %+v", id.Name, id.BatchAccountName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/batch/batch_certificate_resource_test.go b/azurerm/internal/services/batch/batch_certificate_resource_test.go index 9e3c3e32ce61..70ed88b08e1b 100644 --- a/azurerm/internal/services/batch/batch_certificate_resource_test.go +++ b/azurerm/internal/services/batch/batch_certificate_resource_test.go @@ -1,6 +1,7 @@ package batch_test import ( + "context" "fmt" "os" "regexp" @@ -9,105 +10,103 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type BatchCertificateResource struct { +} + func TestAccBatchCertificate_Pfx(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_certificate", "test") + r := BatchCertificateResource{} subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - certificateID := fmt.Sprintf("/subscriptions/%s/resourceGroups/testaccbatch%d/providers/Microsoft.Batch/batchAccounts/testaccbatch%s/certificates/sha1-42c107874fd0e4a9583292a2f1098e8fe4b2edda", subscriptionID, data.RandomInteger, data.RandomString) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchCertificatePfx(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "id", certificateID), - resource.TestCheckResourceAttr(data.ResourceName, "format", "Pfx"), - resource.TestCheckResourceAttr(data.ResourceName, "thumbprint", "42c107874fd0e4a9583292a2f1098e8fe4b2edda"), - resource.TestCheckResourceAttr(data.ResourceName, "thumbprint_algorithm", "sha1"), - ), - }, - { - ResourceName: data.ResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"certificate", "password"}, - }, + certificateID := fmt.Sprintf("/subscriptions/%s/resourceGroups/testacc-batch-%d/providers/Microsoft.Batch/batchAccounts/testaccbatch%s/certificates/sha1-42c107874fd0e4a9583292a2f1098e8fe4b2edda", subscriptionID, data.RandomInteger, data.RandomString) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.pfx(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("id").HasValue(certificateID), + check.That(data.ResourceName).Key("format").HasValue("Pfx"), + check.That(data.ResourceName).Key("thumbprint").HasValue("42c107874fd0e4a9583292a2f1098e8fe4b2edda"), + check.That(data.ResourceName).Key("thumbprint_algorithm").HasValue("sha1"), + ), }, + data.ImportStep("certificate", "password"), }) } func TestAccBatchCertificate_PfxWithoutPassword(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_certificate", "test") + r := BatchCertificateResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchCertificatePfxWithoutPassword(data), - ExpectError: regexp.MustCompile("Password is required"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.pfxWithoutPassword(data), + ExpectError: regexp.MustCompile("Password is required"), }, }) } func TestAccBatchCertificate_Cer(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_certificate", "test") + r := BatchCertificateResource{} subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - certificateID := fmt.Sprintf("/subscriptions/%s/resourceGroups/testaccbatch%d/providers/Microsoft.Batch/batchAccounts/testaccbatch%s/certificates/sha1-312d31a79fa0cef49c00f769afc2b73e9f4edf34", subscriptionID, data.RandomInteger, data.RandomString) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchCertificateCer(data), - Check: resource.ComposeTestCheckFunc( - - resource.TestCheckResourceAttr(data.ResourceName, "id", certificateID), - resource.TestCheckResourceAttr(data.ResourceName, "format", "Cer"), - resource.TestCheckResourceAttr(data.ResourceName, "thumbprint", "312d31a79fa0cef49c00f769afc2b73e9f4edf34"), - resource.TestCheckResourceAttr(data.ResourceName, "thumbprint_algorithm", "sha1"), - ), - }, - data.ImportStep("certificate"), + certificateID := fmt.Sprintf("/subscriptions/%s/resourceGroups/testacc-batch-%d/providers/Microsoft.Batch/batchAccounts/testaccbatch%s/certificates/sha1-312d31a79fa0cef49c00f769afc2b73e9f4edf34", subscriptionID, data.RandomInteger, data.RandomString) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.cer(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("id").HasValue(certificateID), + check.That(data.ResourceName).Key("format").HasValue("Cer"), + check.That(data.ResourceName).Key("thumbprint").HasValue("312d31a79fa0cef49c00f769afc2b73e9f4edf34"), + check.That(data.ResourceName).Key("thumbprint_algorithm").HasValue("sha1"), + ), }, + data.ImportStep("certificate"), }) } func TestAccBatchCertificate_CerWithPassword(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_certificate", "test") + r := BatchCertificateResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchCertificateCerWithPassword(data), - ExpectError: regexp.MustCompile("Password must not be specified"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.cerwithPassword(data), + ExpectError: regexp.MustCompile("Password must not be specified"), }, }) } -func testAccBatchCertificatePfx(data acceptance.TestData) string { +func (t BatchCertificateResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CertificateID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Batch.CertificateClient.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Batch Certificate %q (Account Name %q / Resource Group %q) does not exist", id.Name, id.BatchAccountName, id.ResourceGroup) + } + + return utils.Bool(resp.CertificateProperties != nil), nil +} + +func (BatchCertificateResource) pfx(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccbatch%d" + name = "testaccRG-batch-%d" location = "%s" } @@ -130,14 +129,14 @@ resource "azurerm_batch_certificate" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString) } -func testAccBatchCertificatePfxWithoutPassword(data acceptance.TestData) string { +func (BatchCertificateResource) pfxWithoutPassword(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccbatch%d" + name = "testaccRG-batch-%d" location = "%s" } @@ -158,14 +157,15 @@ resource "azurerm_batch_certificate" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomString) } -func testAccBatchCertificateCer(data acceptance.TestData) string { + +func (BatchCertificateResource) cer(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccbatch%d" + name = "testaccRG-batch-%d" location = "%s" } @@ -186,14 +186,15 @@ resource "azurerm_batch_certificate" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomString) } -func testAccBatchCertificateCerWithPassword(data acceptance.TestData) string { + +func (BatchCertificateResource) cerwithPassword(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccbatch%d" + name = "testaccRG-batch-%d" location = "%s" } @@ -215,30 +216,3 @@ resource "azurerm_batch_certificate" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomString) } - -func testCheckBatchCertificateDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Batch.CertificateClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_batch_certificate" { - continue - } - - id, err := parse.BatchCertificateID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - - return nil -} diff --git a/azurerm/internal/services/batch/batch_pool.go b/azurerm/internal/services/batch/batch_pool.go index 025889e8f515..bb6003c6dc8c 100644 --- a/azurerm/internal/services/batch/batch_pool.go +++ b/azurerm/internal/services/batch/batch_pool.go @@ -7,7 +7,7 @@ import ( "strconv" "strings" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) @@ -583,7 +583,7 @@ func ExpandBatchPoolNetworkConfiguration(list []interface{}) (*batch.NetworkConf if v, ok := networkConfigValue["public_ips"]; ok { publicIPsRaw := v.(*schema.Set).List() - networkConfiguration.PublicIPs = utils.ExpandStringSlice(publicIPsRaw) + networkConfiguration.PublicIPAddressConfiguration.IPAddressIds = utils.ExpandStringSlice(publicIPsRaw) } if v, ok := networkConfigValue["endpoint_configuration"]; ok { @@ -594,6 +594,12 @@ func ExpandBatchPoolNetworkConfiguration(list []interface{}) (*batch.NetworkConf networkConfiguration.EndpointConfiguration = endpoint } + if v, ok := networkConfigValue["public_address_provisioning_type"]; ok { + if value := v.(string); value != "" { + networkConfiguration.PublicIPAddressConfiguration.Provision = batch.IPAddressProvisioningType(value) + } + } + return networkConfiguration, nil } @@ -676,8 +682,9 @@ func FlattenBatchPoolNetworkConfiguration(networkConfig *batch.NetworkConfigurat result["subnet_id"] = *networkConfig.SubnetID } - if networkConfig.PublicIPs != nil { - result["public_ips"] = schema.NewSet(schema.HashString, utils.FlattenStringSlice(networkConfig.PublicIPs)) + if networkConfig.PublicIPAddressConfiguration != nil { + result["public_ips"] = schema.NewSet(schema.HashString, utils.FlattenStringSlice(networkConfig.PublicIPAddressConfiguration.IPAddressIds)) + result["public_address_provisioning_type"] = string(networkConfig.PublicIPAddressConfiguration.Provision) } if cfg := networkConfig.EndpointConfiguration; cfg != nil && cfg.InboundNatPools != nil && len(*cfg.InboundNatPools) != 0 { diff --git a/azurerm/internal/services/batch/batch_pool_data_source.go b/azurerm/internal/services/batch/batch_pool_data_source.go index eb86aaa5d397..11ba12d29c08 100644 --- a/azurerm/internal/services/batch/batch_pool_data_source.go +++ b/azurerm/internal/services/batch/batch_pool_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmBatchPool() *schema.Resource { +func dataSourceBatchPool() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmBatchPoolRead, + Read: dataSourceBatchPoolRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -359,7 +359,7 @@ func dataSourceArmBatchPool() *schema.Resource { } } -func dataSourceArmBatchPoolRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceBatchPoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.PoolClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/batch/batch_pool_data_source_test.go b/azurerm/internal/services/batch/batch_pool_data_source_test.go index 65332a3dd83f..37144cf45645 100644 --- a/azurerm/internal/services/batch/batch_pool_data_source_test.go +++ b/azurerm/internal/services/batch/batch_pool_data_source_test.go @@ -5,67 +5,66 @@ import ( "testing" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" ) +type BatchPoolDataSource struct { +} + func TestAccBatchPoolDataSource_complete(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolDataSource_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testaccpool%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "microsoft-azure-batch"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16-04-lts"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "ubuntu-server-container"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.resize_timeout", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_low_priority_nodes", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "max_tasks_per_node", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.max_task_retry_count", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.environment.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.environment.env", "TEST"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.0.auto_user.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.0.auto_user.0.scope", "Task"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.0.auto_user.0.elevation_level", "NonAdmin"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate.0.id"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.store_location", "CurrentUser"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.store_name", ""), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.visibility.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.visibility.3294600504", "StartTask"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.visibility.4077195354", "RemoteUser"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.type", "DockerCompatible"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.0.registry_server", "myContainerRegistry.azurecr.io"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.0.user_name", "myUserName"), - resource.TestCheckResourceAttr(data.ResourceName, "metadata.tagName", "Example tag"), - ), - }, + r := BatchPoolDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("microsoft-azure-batch"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16-04-lts"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("ubuntu-server-container"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("2"), + check.That(data.ResourceName).Key("fixed_scale.0.resize_timeout").HasValue("PT15M"), + check.That(data.ResourceName).Key("fixed_scale.0.target_low_priority_nodes").HasValue("0"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("max_tasks_per_node").HasValue("2"), + check.That(data.ResourceName).Key("start_task.#").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.max_task_retry_count").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.environment.%").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.environment.env").HasValue("TEST"), + check.That(data.ResourceName).Key("start_task.0.user_identity.#").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.user_identity.0.auto_user.#").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.user_identity.0.auto_user.0.scope").HasValue("Task"), + check.That(data.ResourceName).Key("start_task.0.user_identity.0.auto_user.0.elevation_level").HasValue("NonAdmin"), + check.That(data.ResourceName).Key("certificate.#").HasValue("1"), + check.That(data.ResourceName).Key("certificate.0.id").Exists(), + check.That(data.ResourceName).Key("certificate.0.store_location").HasValue("CurrentUser"), + check.That(data.ResourceName).Key("certificate.0.store_name").HasValue(""), + check.That(data.ResourceName).Key("certificate.0.visibility.#").HasValue("2"), + check.That(data.ResourceName).Key("certificate.0.visibility.3294600504").HasValue("StartTask"), + check.That(data.ResourceName).Key("certificate.0.visibility.4077195354").HasValue("RemoteUser"), + check.That(data.ResourceName).Key("container_configuration.0.type").HasValue("DockerCompatible"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.#").HasValue("1"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.0.registry_server").HasValue("myContainerRegistry.azurecr.io"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.0.user_name").HasValue("myUserName"), + check.That(data.ResourceName).Key("metadata.tagName").HasValue("Example tag"), + ), }, }) } -func testAccBatchPoolDataSource_complete(data acceptance.TestData) string { +func (BatchPoolDataSource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batch" + name = "testaccRG-batch-%d" location = "%s" } diff --git a/azurerm/internal/services/batch/batch_pool_resource.go b/azurerm/internal/services/batch/batch_pool_resource.go index 3d9312df989b..1aefea6b975f 100644 --- a/azurerm/internal/services/batch/batch_pool_resource.go +++ b/azurerm/internal/services/batch/batch_pool_resource.go @@ -9,7 +9,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -23,12 +23,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmBatchPool() *schema.Resource { +func resourceBatchPool() *schema.Resource { return &schema.Resource{ - Create: resourceArmBatchPoolCreate, - Read: resourceArmBatchPoolRead, - Update: resourceArmBatchPoolUpdate, - Delete: resourceArmBatchPoolDelete, + Create: resourceBatchPoolCreate, + Read: resourceBatchPoolRead, + Update: resourceBatchPoolUpdate, + Delete: resourceBatchPoolDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -38,7 +38,7 @@ func resourceArmBatchPool() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.BatchPoolID(id) + _, err := parse.PoolID(id) return err }), Schema: map[string]*schema.Schema{ @@ -413,6 +413,15 @@ func resourceArmBatchPool() *schema.Resource { }, Set: schema.HashString, }, + "public_address_provisioning_type": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(batch.BatchManaged), + string(batch.UserManaged), + string(batch.NoPublicIPAddresses), + }, false), + }, "endpoint_configuration": { Type: schema.TypeList, Optional: true, @@ -489,7 +498,7 @@ func resourceArmBatchPool() *schema.Resource { } } -func resourceArmBatchPoolCreate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchPoolCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.PoolClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -627,20 +636,20 @@ func resourceArmBatchPoolCreate(d *schema.ResourceData, meta interface{}) error } } - return resourceArmBatchPoolRead(d, meta) + return resourceBatchPoolRead(d, meta) } -func resourceArmBatchPoolUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceBatchPoolUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.PoolClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchPoolID(d.Id()) + id, err := parse.PoolID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { return fmt.Errorf("Error retrieving the Batch pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } @@ -653,12 +662,12 @@ func resourceArmBatchPoolUpdate(d *schema.ResourceData, meta interface{}) error } log.Printf("[INFO] stopping the pending resize operation on this pool...") - if _, err = client.StopResize(ctx, id.ResourceGroup, id.AccountName, id.Name); err != nil { + if _, err = client.StopResize(ctx, id.ResourceGroup, id.BatchAccountName, id.Name); err != nil { return fmt.Errorf("Error stopping resize operation for Batch pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } // waiting for the pool to be in steady state - if err = waitForBatchPoolPendingResizeOperation(ctx, client, id.ResourceGroup, id.AccountName, id.Name); err != nil { + if err = waitForBatchPoolPendingResizeOperation(ctx, client, id.ResourceGroup, id.BatchAccountName, id.Name); err != nil { return fmt.Errorf("Error waiting for Batch pool %q (resource group %q) being ready", id.Name, id.ResourceGroup) } } @@ -702,47 +711,47 @@ func resourceArmBatchPoolUpdate(d *schema.ResourceData, meta interface{}) error } if d.HasChange("metadata") { - log.Printf("[DEBUG] Updating the MetaData for Batch pool %q (Account name %q / Resource Group %q)..", id.Name, id.AccountName, id.ResourceGroup) + log.Printf("[DEBUG] Updating the MetaData for Batch pool %q (Account name %q / Resource Group %q)..", id.Name, id.BatchAccountName, id.ResourceGroup) metaDataRaw := d.Get("metadata").(map[string]interface{}) parameters.PoolProperties.Metadata = ExpandBatchMetaData(metaDataRaw) } - result, err := client.Update(ctx, id.ResourceGroup, id.AccountName, id.Name, parameters, "") + result, err := client.Update(ctx, id.ResourceGroup, id.BatchAccountName, id.Name, parameters, "") if err != nil { return fmt.Errorf("Error updating Batch pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } // if the pool is not Steady after the update, wait for it to be Steady if props := result.PoolProperties; props != nil && props.AllocationState != batch.Steady { - if err := waitForBatchPoolPendingResizeOperation(ctx, client, id.ResourceGroup, id.AccountName, id.Name); err != nil { + if err := waitForBatchPoolPendingResizeOperation(ctx, client, id.ResourceGroup, id.BatchAccountName, id.Name); err != nil { return fmt.Errorf("Error waiting for Batch pool %q (resource group %q) being ready", id.Name, id.ResourceGroup) } } - return resourceArmBatchPoolRead(d, meta) + return resourceBatchPoolRead(d, meta) } -func resourceArmBatchPoolRead(d *schema.ResourceData, meta interface{}) error { +func resourceBatchPoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.PoolClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchPoolID(d.Id()) + id, err := parse.PoolID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error: Batch pool %q in account %q (Resource Group %q) was not found", id.Name, id.AccountName, id.ResourceGroup) + return fmt.Errorf("Error: Batch pool %q in account %q (Resource Group %q) was not found", id.Name, id.BatchAccountName, id.ResourceGroup) } return fmt.Errorf("Error making Read request on AzureRM Batch pool %q: %+v", id.Name, err) } d.Set("name", id.Name) - d.Set("account_name", id.AccountName) + d.Set("account_name", id.BatchAccountName) d.Set("resource_group_name", id.ResourceGroup) if props := resp.PoolProperties; props != nil { @@ -792,17 +801,17 @@ func resourceArmBatchPoolRead(d *schema.ResourceData, meta interface{}) error { return nil } -func resourceArmBatchPoolDelete(d *schema.ResourceData, meta interface{}) error { +func resourceBatchPoolDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Batch.PoolClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.BatchPoolID(d.Id()) + id, err := parse.PoolID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) if err != nil { return fmt.Errorf("Error deleting Batch pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } diff --git a/azurerm/internal/services/batch/batch_pool_resource_test.go b/azurerm/internal/services/batch/batch_pool_resource_test.go index d682750beed7..48e71fe20dd2 100644 --- a/azurerm/internal/services/batch/batch_pool_resource_test.go +++ b/azurerm/internal/services/batch/batch_pool_resource_test.go @@ -1,8 +1,8 @@ package batch_test import ( + "context" "fmt" - "net/http" "os" "regexp" "testing" @@ -10,506 +10,399 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type BatchPoolResource struct { +} + func TestAccBatchPool_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("1"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - { - Config: testAccBatchPool_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_batch_pool"), - }, + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("1"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_batch_pool"), }, }) } func TestAccBatchPool_fixedScale_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_fixedScale_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "max_tasks_per_node", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.resize_timeout", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_low_priority_nodes", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fixedScale_complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("max_tasks_per_node").HasValue("2"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("2"), + check.That(data.ResourceName).Key("fixed_scale.0.resize_timeout").HasValue("PT15M"), + check.That(data.ResourceName).Key("fixed_scale.0.target_low_priority_nodes").HasValue("0"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_autoScale_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_autoScale_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.0.evaluation_interval", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoScale_complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("auto_scale.0.evaluation_interval").HasValue("PT15M"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_completeUpdated(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_fixedScale_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.resize_timeout", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_low_priority_nodes", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), - { - Config: testAccBatchPool_autoScale_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.0.evaluation_interval", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fixedScale_complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("2"), + check.That(data.ResourceName).Key("fixed_scale.0.resize_timeout").HasValue("PT15M"), + check.That(data.ResourceName).Key("fixed_scale.0.target_low_priority_nodes").HasValue("0"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), }, + data.ImportStep("stop_pending_resize_operation"), + { + Config: r.autoScale_complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("auto_scale.0.evaluation_interval").HasValue("PT15M"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), + }, + data.ImportStep("stop_pending_resize_operation"), }) } -func TestAccBatchPoolStartTask_basic(t *testing.T) { +func TestAccBatchPool_startTask_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolStartTask_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.resize_timeout", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_low_priority_nodes", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.max_task_retry_count", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.environment.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.environment.env", "TEST"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.environment.bu", "Research&Dev"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.0.auto_user.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.0.auto_user.0.scope", "Task"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.0.user_identity.0.auto_user.0.elevation_level", "NonAdmin"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.startTask_basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.resize_timeout").HasValue("PT15M"), + check.That(data.ResourceName).Key("fixed_scale.0.target_low_priority_nodes").HasValue("0"), + check.That(data.ResourceName).Key("start_task.#").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.max_task_retry_count").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.environment.%").HasValue("2"), + check.That(data.ResourceName).Key("start_task.0.environment.env").HasValue("TEST"), + check.That(data.ResourceName).Key("start_task.0.environment.bu").HasValue("Research&Dev"), + check.That(data.ResourceName).Key("start_task.0.user_identity.#").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.user_identity.0.auto_user.#").HasValue("1"), + check.That(data.ResourceName).Key("start_task.0.user_identity.0.auto_user.0.scope").HasValue("Task"), + check.That(data.ResourceName).Key("start_task.0.user_identity.0.auto_user.0.elevation_level").HasValue("NonAdmin"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_certificates(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") + r := BatchPoolResource{} subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") certificate0ID := fmt.Sprintf("/subscriptions/%s/resourceGroups/testaccbatch%d/providers/Microsoft.Batch/batchAccounts/testaccbatch%s/certificates/sha1-312d31a79fa0cef49c00f769afc2b73e9f4edf34", subscriptionID, data.RandomInteger, data.RandomString) certificate1ID := fmt.Sprintf("/subscriptions/%s/resourceGroups/testaccbatch%d/providers/Microsoft.Batch/batchAccounts/testaccbatch%s/certificates/sha1-42c107874fd0e4a9583292a2f1098e8fe4b2edda", subscriptionID, data.RandomInteger, data.RandomString) - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolCertificates(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.id", certificate0ID), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.store_location", "CurrentUser"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.store_name", ""), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.visibility.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.0.visibility.3294600504", "StartTask"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.1.id", certificate1ID), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.1.store_location", "CurrentUser"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.1.store_name", ""), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.1.visibility.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.1.visibility.3294600504", "StartTask"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate.1.visibility.4077195354", "RemoteUser"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.certificates(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("certificate.#").HasValue("2"), + check.That(data.ResourceName).Key("certificate.0.id").HasValue(certificate0ID), + check.That(data.ResourceName).Key("certificate.0.store_location").HasValue("CurrentUser"), + check.That(data.ResourceName).Key("certificate.0.store_name").HasValue(""), + check.That(data.ResourceName).Key("certificate.0.visibility.#").HasValue("1"), + check.That(data.ResourceName).Key("certificate.0.visibility.3294600504").HasValue("StartTask"), + check.That(data.ResourceName).Key("certificate.1.id").HasValue(certificate1ID), + check.That(data.ResourceName).Key("certificate.1.store_location").HasValue("CurrentUser"), + check.That(data.ResourceName).Key("certificate.1.store_name").HasValue(""), + check.That(data.ResourceName).Key("certificate.1.visibility.#").HasValue("2"), + check.That(data.ResourceName).Key("certificate.1.visibility.3294600504").HasValue("StartTask"), + check.That(data.ResourceName).Key("certificate.1.visibility.4077195354").HasValue("RemoteUser"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_validateResourceFileWithoutSource(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") + r := BatchPoolResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolValidateResourceFileWithoutSource(data), - ExpectError: regexp.MustCompile("Exactly one of auto_storage_container_name, storage_container_url and http_url must be specified"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.validateResourceFileWithoutSource(data), + ExpectError: regexp.MustCompile("Exactly one of auto_storage_container_name, storage_container_url and http_url must be specified"), }, }) } func TestAccBatchPool_container(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolContainerConfiguration(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.type", "DockerCompatible"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_image_names.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.0.registry_server", "myContainerRegistry.azurecr.io"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.0.user_name", "myUserName"), - resource.TestCheckResourceAttr(data.ResourceName, "container_configuration.0.container_registries.0.password", "myPassword"), - ), - }, - data.ImportStep( - "stop_pending_resize_operation", - "container_configuration.0.container_registries.0.password", + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.containerConfiguration(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("container_configuration.0.type").HasValue("DockerCompatible"), + check.That(data.ResourceName).Key("container_configuration.0.container_image_names.#").HasValue("1"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.#").HasValue("1"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.0.registry_server").HasValue("myContainerRegistry.azurecr.io"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.0.user_name").HasValue("myUserName"), + check.That(data.ResourceName).Key("container_configuration.0.container_registries.0.password").HasValue("myPassword"), ), }, + data.ImportStep( + "stop_pending_resize_operation", + "container_configuration.0.container_registries.0.password", + ), }) } func TestAccBatchPool_validateResourceFileWithMultipleSources(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") + r := BatchPoolResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolValidateResourceFileWithMultipleSources(data), - ExpectError: regexp.MustCompile("Exactly one of auto_storage_container_name, storage_container_url and http_url must be specified"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.validateResourceFileWithMultipleSources(data), + ExpectError: regexp.MustCompile("Exactly one of auto_storage_container_name, storage_container_url and http_url must be specified"), }, }) } func TestAccBatchPool_validateResourceFileBlobPrefixWithoutAutoStorageContainerUrl(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") + r := BatchPoolResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolValidateResourceFileBlobPrefixWithoutAutoStorageContainerName(data), - ExpectError: regexp.MustCompile("auto_storage_container_name or storage_container_url must be specified when using blob_prefix"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.validateResourceFileBlobPrefixWithoutAutoStorageContainerName(data), + ExpectError: regexp.MustCompile("auto_storage_container_name or storage_container_url must be specified when using blob_prefix"), }, }) } func TestAccBatchPool_validateResourceFileHttpURLWithoutFilePath(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") + r := BatchPoolResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolValidateResourceFileHttpURLWithoutFilePath(data), - ExpectError: regexp.MustCompile("file_path must be specified when using http_url"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.validateResourceFileHttpURLWithoutFilePath(data), + ExpectError: regexp.MustCompile("file_path must be specified when using http_url"), }, }) } func TestAccBatchPool_customImage(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPoolCustomImageConfiguration(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "max_tasks_per_node", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.resize_timeout", "PT15M"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_low_priority_nodes", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customImageConfiguration(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("max_tasks_per_node").HasValue("2"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("2"), + check.That(data.ResourceName).Key("fixed_scale.0.resize_timeout").HasValue("PT15M"), + check.That(data.ResourceName).Key("fixed_scale.0.target_low_priority_nodes").HasValue("0"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_frontEndPortRanges(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_networkConfiguration(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "vm_size", "STANDARD_A1"), - resource.TestCheckResourceAttr(data.ResourceName, "node_agent_sku_id", "batch.node.ubuntu 16.04"), - resource.TestCheckResourceAttr(data.ResourceName, "account_name", fmt.Sprintf("testaccbatch%s", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.sku", "16.04.0-LTS"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_image_reference.0.offer", "UbuntuServer"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_scale.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fixed_scale.0.target_dedicated_nodes", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "start_task.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "network_configuration.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "network_configuration.0.subnet_id"), - resource.TestCheckResourceAttr(data.ResourceName, "network_configuration.0.public_ips.#", "1"), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + r := BatchPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.networkConfiguration(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("vm_size").HasValue("STANDARD_A1"), + check.That(data.ResourceName).Key("node_agent_sku_id").HasValue("batch.node.ubuntu 16.04"), + check.That(data.ResourceName).Key("storage_image_reference.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_image_reference.0.sku").HasValue("16.04.0-LTS"), + check.That(data.ResourceName).Key("storage_image_reference.0.offer").HasValue("UbuntuServer"), + check.That(data.ResourceName).Key("auto_scale.#").HasValue("0"), + check.That(data.ResourceName).Key("fixed_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("fixed_scale.0.target_dedicated_nodes").HasValue("1"), + check.That(data.ResourceName).Key("start_task.#").HasValue("0"), + check.That(data.ResourceName).Key("network_configuration.#").HasValue("1"), + check.That(data.ResourceName).Key("network_configuration.0.subnet_id").Exists(), + check.That(data.ResourceName).Key("network_configuration.0.public_ips.#").HasValue("1"), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } func TestAccBatchPool_fixedScaleUpdate(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_batch_pool", "test") + r := BatchPoolResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckBatchPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccBatchPool_fixedScale_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - ), - }, - data.ImportStep("stop_pending_resize_operation"), - { - Config: testAccBatchPool_fixedScale_completeUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckBatchPoolExists(data.ResourceName), - ), - }, - data.ImportStep("stop_pending_resize_operation"), + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fixedScale_complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("stop_pending_resize_operation"), + { + Config: r.fixedScale_completeUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep("stop_pending_resize_operation"), }) } -func testCheckBatchPoolExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - conn := acceptance.AzureProvider.Meta().(*clients.Client).Batch.PoolClient - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.BatchPoolID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on batchPoolClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Batch pool %q (account: %q, resource group: %q) does not exist", id.Name, id.AccountName, id.ResourceGroup) - } - - return nil +func (t BatchPoolResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.PoolID(state.ID) + if err != nil { + return nil, err } -} -func testCheckBatchPoolDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - conn := acceptance.AzureProvider.Meta().(*clients.Client).Batch.PoolClient - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_batch_pool" { - continue - } - - id, err := parse.BatchPoolID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil + resp, err := clients.Batch.PoolClient.Get(ctx, id.ResourceGroup, id.BatchAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Batch Pool %q (Account Name %q / Resource Group %q) does not exist", id.Name, id.BatchAccountName, id.ResourceGroup) } - return nil + return utils.Bool(resp.PoolProperties != nil), nil } -func testAccBatchPool_fixedScale_complete(data acceptance.TestData) string { +func (BatchPoolResource) fixedScale_complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchpool" + name = "testaccRG-batch-%d" location = "%s" } @@ -560,14 +453,14 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomString) } -func testAccBatchPool_fixedScale_completeUpdate(data acceptance.TestData) string { +func (BatchPoolResource) fixedScale_completeUpdate(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchpool" + name = "testaccRG-batch-%d" location = "%s" } @@ -618,14 +511,14 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomString) } -func testAccBatchPool_autoScale_complete(data acceptance.TestData) string { +func (BatchPoolResource) autoScale_complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchpool" + name = "testaccRG-batch-%d" location = "%s" } @@ -681,14 +574,14 @@ EOF `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomString) } -func testAccBatchPool_basic(data acceptance.TestData) string { +func (BatchPoolResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchpool" + name = "testaccRG-batch-%d" location = "%s" } @@ -719,7 +612,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPool_requiresImport(data acceptance.TestData) string { +func (BatchPoolResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -741,17 +634,17 @@ resource "azurerm_batch_pool" "import" { version = "latest" } } -`, testAccBatchPool_basic(data)) +`, BatchPoolResource{}.basic(data)) } -func testAccBatchPoolStartTask_basic(data acceptance.TestData) string { +func (BatchPoolResource) startTask_basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchpool" + name = "testaccRG-batch-%d" location = "%s" } @@ -805,7 +698,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPoolValidateResourceFileWithoutSource(data acceptance.TestData) string { +func (BatchPoolResource) validateResourceFileWithoutSource(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -867,7 +760,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPoolValidateResourceFileWithMultipleSources(data acceptance.TestData) string { +func (BatchPoolResource) validateResourceFileWithMultipleSources(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -929,7 +822,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPoolValidateResourceFileBlobPrefixWithoutAutoStorageContainerName(data acceptance.TestData) string { +func (BatchPoolResource) validateResourceFileBlobPrefixWithoutAutoStorageContainerName(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -991,7 +884,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPoolValidateResourceFileHttpURLWithoutFilePath(data acceptance.TestData) string { +func (BatchPoolResource) validateResourceFileHttpURLWithoutFilePath(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -1052,7 +945,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPoolCertificates(data acceptance.TestData) string { +func (BatchPoolResource) certificates(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -1121,7 +1014,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) } -func testAccBatchPoolContainerConfiguration(data acceptance.TestData) string { +func (BatchPoolResource) containerConfiguration(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -1176,14 +1069,14 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomString) } -func testAccBatchPoolCustomImageConfiguration(data acceptance.TestData) string { +func (BatchPoolResource) customImageConfiguration(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} } resource "azurerm_resource_group" "test" { - name = "testaccRG-%d-batchpool" + name = "testaccRG-batch-%d" location = "%s" } @@ -1229,6 +1122,8 @@ resource "azurerm_storage_account" "test" { account_tier = "Standard" account_replication_type = "LRS" + allow_blob_public_access = true + tags = { environment = "Dev" } @@ -1328,7 +1223,7 @@ resource "azurerm_batch_pool" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomString, data.RandomString) } -func testAccBatchPool_networkConfiguration(data acceptance.TestData) string { +func (BatchPoolResource) networkConfiguration(data acceptance.TestData) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { name = "testaccRG-%[1]d-batchpool" diff --git a/azurerm/internal/services/batch/client/client.go b/azurerm/internal/services/batch/client/client.go index b808313522d9..5d790ff482a5 100644 --- a/azurerm/internal/services/batch/client/client.go +++ b/azurerm/internal/services/batch/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2019-08-01/batch" + "github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2020-03-01/batch" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/batch/parse/account.go b/azurerm/internal/services/batch/parse/account.go new file mode 100644 index 000000000000..68b6e48a1b79 --- /dev/null +++ b/azurerm/internal/services/batch/parse/account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccountId struct { + SubscriptionId string + ResourceGroup string + BatchAccountName string +} + +func NewAccountID(subscriptionId, resourceGroup, batchAccountName string) AccountId { + return AccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + BatchAccountName: batchAccountName, + } +} + +func (id AccountId) String() string { + segments := []string{ + fmt.Sprintf("Batch Account Name %q", id.BatchAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Account", segmentsStr) +} + +func (id AccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Batch/batchAccounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.BatchAccountName) +} + +// AccountID parses a Account ID into an AccountId struct +func AccountID(input string) (*AccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.BatchAccountName, err = id.PopSegment("batchAccounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/batch/parse/account_test.go b/azurerm/internal/services/batch/parse/account_test.go new file mode 100644 index 000000000000..dfa7d7668eb6 --- /dev/null +++ b/azurerm/internal/services/batch/parse/account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccountId{} + +func TestAccountIDFormatter(t *testing.T) { + actual := NewAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Error: true, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1", + Expected: &AccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + BatchAccountName: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.BatchAccountName != v.Expected.BatchAccountName { + t.Fatalf("Expected %q but got %q for BatchAccountName", v.Expected.BatchAccountName, actual.BatchAccountName) + } + } +} diff --git a/azurerm/internal/services/batch/parse/application.go b/azurerm/internal/services/batch/parse/application.go new file mode 100644 index 000000000000..0b861deb5337 --- /dev/null +++ b/azurerm/internal/services/batch/parse/application.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ApplicationId struct { + SubscriptionId string + ResourceGroup string + BatchAccountName string + Name string +} + +func NewApplicationID(subscriptionId, resourceGroup, batchAccountName, name string) ApplicationId { + return ApplicationId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + BatchAccountName: batchAccountName, + Name: name, + } +} + +func (id ApplicationId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Batch Account Name %q", id.BatchAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Application", segmentsStr) +} + +func (id ApplicationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Batch/batchAccounts/%s/applications/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.BatchAccountName, id.Name) +} + +// ApplicationID parses a Application ID into an ApplicationId struct +func ApplicationID(input string) (*ApplicationId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApplicationId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.BatchAccountName, err = id.PopSegment("batchAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("applications"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/batch/parse/application_test.go b/azurerm/internal/services/batch/parse/application_test.go new file mode 100644 index 000000000000..c5d60af5c7f2 --- /dev/null +++ b/azurerm/internal/services/batch/parse/application_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ApplicationId{} + +func TestApplicationIDFormatter(t *testing.T) { + actual := NewApplicationID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "application1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/applications/application1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestApplicationID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApplicationId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Error: true, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/applications/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/applications/application1", + Expected: &ApplicationId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + BatchAccountName: "account1", + Name: "application1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1/APPLICATIONS/APPLICATION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApplicationID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.BatchAccountName != v.Expected.BatchAccountName { + t.Fatalf("Expected %q but got %q for BatchAccountName", v.Expected.BatchAccountName, actual.BatchAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/batch/parse/batch_account.go b/azurerm/internal/services/batch/parse/batch_account.go deleted file mode 100644 index 6c08d1700b13..000000000000 --- a/azurerm/internal/services/batch/parse/batch_account.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type BatchAccountId struct { - ResourceGroup string - Name string -} - -func BatchAccountID(input string) (*BatchAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Batch Account ID %q: %+v", input, err) - } - - account := BatchAccountId{ - ResourceGroup: id.ResourceGroup, - } - - if account.Name, err = id.PopSegment("batchAccounts"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &account, nil -} diff --git a/azurerm/internal/services/batch/parse/batch_account_test.go b/azurerm/internal/services/batch/parse/batch_account_test.go deleted file mode 100644 index c787e00ca697..000000000000 --- a/azurerm/internal/services/batch/parse/batch_account_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestBatchAccountId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *BatchAccountId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Accounts Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", - Expected: nil, - }, - { - Name: "Batch Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/Account1", - Expected: &BatchAccountId{ - Name: "Account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/BatchAccounts/Account1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := BatchAccountID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/batch/parse/batch_application.go b/azurerm/internal/services/batch/parse/batch_application.go deleted file mode 100644 index fbf06f9ce0e1..000000000000 --- a/azurerm/internal/services/batch/parse/batch_application.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type BatchApplicationId struct { - ResourceGroup string - AccountName string - Name string -} - -func BatchApplicationID(input string) (*BatchApplicationId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Batch Application ID %q: %+v", input, err) - } - - application := BatchApplicationId{ - ResourceGroup: id.ResourceGroup, - } - - if application.AccountName, err = id.PopSegment("batchAccounts"); err != nil { - return nil, err - } - - if application.Name, err = id.PopSegment("applications"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &application, nil -} diff --git a/azurerm/internal/services/batch/parse/batch_application_test.go b/azurerm/internal/services/batch/parse/batch_application_test.go deleted file mode 100644 index 0fe17533d23b..000000000000 --- a/azurerm/internal/services/batch/parse/batch_application_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestBatchApplicationId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *BatchApplicationId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Accounts Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", - Expected: nil, - }, - { - Name: "Batch Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1", - Expected: nil, - }, - { - Name: "Missing Applications Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/applications/", - Expected: nil, - }, - { - Name: "Batch Application ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/applications/Application1", - Expected: &BatchApplicationId{ - ResourceGroup: "resGroup1", - AccountName: "acctName1", - Name: "Application1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/Applications/Application1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := BatchApplicationID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for AccountName", v.Expected.AccountName, actual.AccountName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/batch/parse/batch_certificate.go b/azurerm/internal/services/batch/parse/batch_certificate.go deleted file mode 100644 index a449c328e47c..000000000000 --- a/azurerm/internal/services/batch/parse/batch_certificate.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type BatchCertificateId struct { - ResourceGroup string - AccountName string - Name string -} - -func BatchCertificateID(input string) (*BatchCertificateId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Batch Certificate ID %q: %+v", input, err) - } - - certificate := BatchCertificateId{ - ResourceGroup: id.ResourceGroup, - } - - if certificate.AccountName, err = id.PopSegment("batchAccounts"); err != nil { - return nil, err - } - - if certificate.Name, err = id.PopSegment("certificates"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &certificate, nil -} diff --git a/azurerm/internal/services/batch/parse/batch_certificate_test.go b/azurerm/internal/services/batch/parse/batch_certificate_test.go deleted file mode 100644 index d156ddb8438e..000000000000 --- a/azurerm/internal/services/batch/parse/batch_certificate_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestBatchCertificateId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *BatchCertificateId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Accounts Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", - Expected: nil, - }, - { - Name: "Batch Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1", - Expected: nil, - }, - { - Name: "Missing Certificates Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/certificates/", - Expected: nil, - }, - { - Name: "Batch Certificate ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/certificates/Certificate1", - Expected: &BatchCertificateId{ - ResourceGroup: "resGroup1", - AccountName: "acctName1", - Name: "Certificate1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/Certificates/Certificate1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := BatchCertificateID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for AccountName", v.Expected.AccountName, actual.AccountName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/batch/parse/batch_pool.go b/azurerm/internal/services/batch/parse/batch_pool.go deleted file mode 100644 index 7bb826428881..000000000000 --- a/azurerm/internal/services/batch/parse/batch_pool.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type BatchPoolId struct { - ResourceGroup string - AccountName string - Name string -} - -func BatchPoolID(input string) (*BatchPoolId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Batch Pool ID %q: %+v", input, err) - } - - pool := BatchPoolId{ - ResourceGroup: id.ResourceGroup, - } - - if pool.AccountName, err = id.PopSegment("batchAccounts"); err != nil { - return nil, err - } - - if pool.Name, err = id.PopSegment("pools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &pool, nil -} diff --git a/azurerm/internal/services/batch/parse/batch_pool_test.go b/azurerm/internal/services/batch/parse/batch_pool_test.go deleted file mode 100644 index 49959b926788..000000000000 --- a/azurerm/internal/services/batch/parse/batch_pool_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestBatchPoolId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *BatchPoolId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Accounts Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", - Expected: nil, - }, - { - Name: "Batch Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1", - Expected: nil, - }, - { - Name: "Missing Pools Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/pools/", - Expected: nil, - }, - { - Name: "Batch Pool ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/pools/Pool1", - Expected: &BatchPoolId{ - ResourceGroup: "resGroup1", - AccountName: "acctName1", - Name: "Pool1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/acctName1/Pools/Pool1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := BatchPoolID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for AccountName", v.Expected.AccountName, actual.AccountName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/batch/parse/certificate.go b/azurerm/internal/services/batch/parse/certificate.go new file mode 100644 index 000000000000..e3fa50d19fc2 --- /dev/null +++ b/azurerm/internal/services/batch/parse/certificate.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CertificateId struct { + SubscriptionId string + ResourceGroup string + BatchAccountName string + Name string +} + +func NewCertificateID(subscriptionId, resourceGroup, batchAccountName, name string) CertificateId { + return CertificateId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + BatchAccountName: batchAccountName, + Name: name, + } +} + +func (id CertificateId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Batch Account Name %q", id.BatchAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Certificate", segmentsStr) +} + +func (id CertificateId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Batch/batchAccounts/%s/certificates/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.BatchAccountName, id.Name) +} + +// CertificateID parses a Certificate ID into an CertificateId struct +func CertificateID(input string) (*CertificateId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CertificateId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.BatchAccountName, err = id.PopSegment("batchAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("certificates"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/batch/parse/certificate_test.go b/azurerm/internal/services/batch/parse/certificate_test.go new file mode 100644 index 000000000000..2340556c5004 --- /dev/null +++ b/azurerm/internal/services/batch/parse/certificate_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CertificateId{} + +func TestCertificateIDFormatter(t *testing.T) { + actual := NewCertificateID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "certificate1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/certificates/certificate1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCertificateID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CertificateId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Error: true, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/certificates/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/certificates/certificate1", + Expected: &CertificateId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + BatchAccountName: "account1", + Name: "certificate1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1/CERTIFICATES/CERTIFICATE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CertificateID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.BatchAccountName != v.Expected.BatchAccountName { + t.Fatalf("Expected %q but got %q for BatchAccountName", v.Expected.BatchAccountName, actual.BatchAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/batch/parse/pool.go b/azurerm/internal/services/batch/parse/pool.go new file mode 100644 index 000000000000..5e781f9b61c1 --- /dev/null +++ b/azurerm/internal/services/batch/parse/pool.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PoolId struct { + SubscriptionId string + ResourceGroup string + BatchAccountName string + Name string +} + +func NewPoolID(subscriptionId, resourceGroup, batchAccountName, name string) PoolId { + return PoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + BatchAccountName: batchAccountName, + Name: name, + } +} + +func (id PoolId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Batch Account Name %q", id.BatchAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Pool", segmentsStr) +} + +func (id PoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Batch/batchAccounts/%s/pools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.BatchAccountName, id.Name) +} + +// PoolID parses a Pool ID into an PoolId struct +func PoolID(input string) (*PoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.BatchAccountName, err = id.PopSegment("batchAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("pools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/batch/parse/pool_test.go b/azurerm/internal/services/batch/parse/pool_test.go new file mode 100644 index 000000000000..94bdd32d5304 --- /dev/null +++ b/azurerm/internal/services/batch/parse/pool_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PoolId{} + +func TestPoolIDFormatter(t *testing.T) { + actual := NewPoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/pools/pool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Error: true, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/pools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/pools/pool1", + Expected: &PoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + BatchAccountName: "account1", + Name: "pool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1/POOLS/POOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.BatchAccountName != v.Expected.BatchAccountName { + t.Fatalf("Expected %q but got %q for BatchAccountName", v.Expected.BatchAccountName, actual.BatchAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/batch/registration.go b/azurerm/internal/services/batch/registration.go index 64b95dd4fbd1..babe51b808d3 100644 --- a/azurerm/internal/services/batch/registration.go +++ b/azurerm/internal/services/batch/registration.go @@ -21,18 +21,18 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_batch_account": dataSourceArmBatchAccount(), - "azurerm_batch_certificate": dataSourceArmBatchCertificate(), - "azurerm_batch_pool": dataSourceArmBatchPool(), + "azurerm_batch_account": dataSourceBatchAccount(), + "azurerm_batch_certificate": dataSourceBatchCertificate(), + "azurerm_batch_pool": dataSourceBatchPool(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_batch_account": resourceArmBatchAccount(), - "azurerm_batch_application": resourceArmBatchApplication(), - "azurerm_batch_certificate": resourceArmBatchCertificate(), - "azurerm_batch_pool": resourceArmBatchPool(), + "azurerm_batch_account": resourceBatchAccount(), + "azurerm_batch_application": resourceBatchApplication(), + "azurerm_batch_certificate": resourceBatchCertificate(), + "azurerm_batch_pool": resourceBatchPool(), } } diff --git a/azurerm/internal/services/batch/resourceids.go b/azurerm/internal/services/batch/resourceids.go new file mode 100644 index 000000000000..c25fa5d50bfe --- /dev/null +++ b/azurerm/internal/services/batch/resourceids.go @@ -0,0 +1,6 @@ +package batch + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Account -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Application -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/applications/application1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Certificate -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/certificates/certificate1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Pool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/pools/pool1 diff --git a/azurerm/internal/services/batch/validate/account_id.go b/azurerm/internal/services/batch/validate/account_id.go new file mode 100644 index 000000000000..62b35f69cba9 --- /dev/null +++ b/azurerm/internal/services/batch/validate/account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" +) + +func AccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/batch/validate/account_id_test.go b/azurerm/internal/services/batch/validate/account_id_test.go new file mode 100644 index 000000000000..315c4c0c2825 --- /dev/null +++ b/azurerm/internal/services/batch/validate/account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Valid: false, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/batch/validate/application_id.go b/azurerm/internal/services/batch/validate/application_id.go new file mode 100644 index 000000000000..efa8e0adae70 --- /dev/null +++ b/azurerm/internal/services/batch/validate/application_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" +) + +func ApplicationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApplicationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/batch/validate/application_id_test.go b/azurerm/internal/services/batch/validate/application_id_test.go new file mode 100644 index 000000000000..0f1c9f716c3c --- /dev/null +++ b/azurerm/internal/services/batch/validate/application_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApplicationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Valid: false, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/applications/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/applications/application1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1/APPLICATIONS/APPLICATION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApplicationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/batch/validate/certificate_id.go b/azurerm/internal/services/batch/validate/certificate_id.go new file mode 100644 index 000000000000..acfdaf5afe12 --- /dev/null +++ b/azurerm/internal/services/batch/validate/certificate_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" +) + +func CertificateID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CertificateID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/batch/validate/certificate_id_test.go b/azurerm/internal/services/batch/validate/certificate_id_test.go new file mode 100644 index 000000000000..b96194787fcd --- /dev/null +++ b/azurerm/internal/services/batch/validate/certificate_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCertificateID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Valid: false, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/certificates/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/certificates/certificate1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1/CERTIFICATES/CERTIFICATE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CertificateID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/batch/validate/pool_id.go b/azurerm/internal/services/batch/validate/pool_id.go new file mode 100644 index 000000000000..a6ae8831989e --- /dev/null +++ b/azurerm/internal/services/batch/validate/pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/batch/parse" +) + +func PoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/batch/validate/pool_id_test.go b/azurerm/internal/services/batch/validate/pool_id_test.go new file mode 100644 index 000000000000..bcdb1865436b --- /dev/null +++ b/azurerm/internal/services/batch/validate/pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/", + Valid: false, + }, + + { + // missing value for BatchAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/pools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Batch/batchAccounts/account1/pools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.BATCH/BATCHACCOUNTS/ACCOUNT1/POOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/blueprints/blueprint.go b/azurerm/internal/services/blueprints/blueprint.go index 64ffaddcdbd1..dd3e0f501239 100644 --- a/azurerm/internal/services/blueprints/blueprint.go +++ b/azurerm/internal/services/blueprints/blueprint.go @@ -42,7 +42,7 @@ func ManagedIdentitySchema() *schema.Schema { MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, - ValidateFunc: validate.UserAssignedIdentityId, + ValidateFunc: validate.UserAssignedIdentityID, }, }, diff --git a/azurerm/internal/services/blueprints/blueprint_assignment_resource.go b/azurerm/internal/services/blueprints/blueprint_assignment_resource.go index 277cf25b8dce..41c3d4f908ed 100644 --- a/azurerm/internal/services/blueprints/blueprint_assignment_resource.go +++ b/azurerm/internal/services/blueprints/blueprint_assignment_resource.go @@ -21,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmBlueprintAssignment() *schema.Resource { +func resourceBlueprintAssignment() *schema.Resource { return &schema.Resource{ - Create: resourceArmBlueprintAssignmentCreateUpdate, - Update: resourceArmBlueprintAssignmentCreateUpdate, - Read: resourceArmBlueprintAssignmentRead, - Delete: resourceArmBlueprintAssignmentDelete, + Create: resourceBlueprintAssignmentCreateUpdate, + Update: resourceBlueprintAssignmentCreateUpdate, + Read: resourceBlueprintAssignmentRead, + Delete: resourceBlueprintAssignmentDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -61,7 +61,7 @@ func resourceArmBlueprintAssignment() *schema.Resource { "version_id": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.BlueprintVersionID, + ValidateFunc: validate.VersionID, }, "parameter_values": { @@ -126,7 +126,7 @@ func resourceArmBlueprintAssignment() *schema.Resource { } } -func resourceArmBlueprintAssignmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceBlueprintAssignmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Blueprints.AssignmentsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -214,10 +214,10 @@ func resourceArmBlueprintAssignmentCreateUpdate(d *schema.ResourceData, meta int d.SetId(*resp.ID) - return resourceArmBlueprintAssignmentRead(d, meta) + return resourceBlueprintAssignmentRead(d, meta) } -func resourceArmBlueprintAssignmentRead(d *schema.ResourceData, meta interface{}) error { +func resourceBlueprintAssignmentRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Blueprints.AssignmentsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -227,18 +227,15 @@ func resourceArmBlueprintAssignmentRead(d *schema.ResourceData, meta interface{} return err } - resourceScope := id.Scope - assignmentName := id.Name - - resp, err := client.Get(ctx, resourceScope, assignmentName) + resp, err := client.Get(ctx, id.Scope, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] the Blueprint Assignment %q does not exist - removing from state", assignmentName) + log.Printf("[INFO] the Blueprint Assignment %q does not exist - removing from state", id.Name) d.SetId("") return nil } - return fmt.Errorf("Read failed for Blueprint Assignment (%q): %+v", assignmentName, err) + return fmt.Errorf("Read failed for Blueprint Assignment (%q): %+v", id.Name, err) } if resp.Name != nil { @@ -298,27 +295,24 @@ func resourceArmBlueprintAssignmentRead(d *schema.ResourceData, meta interface{} return nil } -func resourceArmBlueprintAssignmentDelete(d *schema.ResourceData, meta interface{}) error { +func resourceBlueprintAssignmentDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Blueprints.AssignmentsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - assignmentID, err := parse.AssignmentID(d.Id()) + id, err := parse.AssignmentID(d.Id()) if err != nil { return err } - name := assignmentID.Name - targetScope := assignmentID.Scope - // We use none here to align the previous behaviour of the blueprint resource // TODO: we could add a features flag for the blueprint to empower terraform when deleting the blueprint to delete all the generated resources as well - resp, err := client.Delete(ctx, targetScope, name, blueprint.None) + resp, err := client.Delete(ctx, id.Scope, id.Name, blueprint.None) if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil } - return fmt.Errorf("failed to delete Blueprint Assignment %q from scope %q: %+v", name, targetScope, err) + return fmt.Errorf("failed to delete Blueprint Assignment %q from scope %q: %+v", id.Name, id.Scope, err) } stateConf := &resource.StateChangeConf{ @@ -330,12 +324,12 @@ func resourceArmBlueprintAssignmentDelete(d *schema.ResourceData, meta interface string(blueprint.Failed), }, Target: []string{"NotFound"}, - Refresh: blueprintAssignmentDeleteStateRefreshFunc(ctx, client, targetScope, name), + Refresh: blueprintAssignmentDeleteStateRefreshFunc(ctx, client, id.Scope, id.Name), Timeout: d.Timeout(schema.TimeoutDelete), } if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Failed waiting for Blueprint Assignment %q (Scope %q): %+v", name, targetScope, err) + return fmt.Errorf("Failed waiting for Blueprint Assignment %q (Scope %q): %+v", id.Name, id.Scope, err) } return nil diff --git a/azurerm/internal/services/blueprints/blueprint_assignment_resource_test.go b/azurerm/internal/services/blueprints/blueprint_assignment_resource_test.go new file mode 100644 index 000000000000..cda428ee1185 --- /dev/null +++ b/azurerm/internal/services/blueprints/blueprint_assignment_resource_test.go @@ -0,0 +1,370 @@ +package blueprints_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/blueprints/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type BlueprintAssignmentResource struct { +} + +// Scenario: Basic BP, no artefacts etc. Stored and applied at Subscription. +func TestAccBlueprintAssignment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_blueprint_assignment", "test") + r := BlueprintAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "testAcc_basicSubscription", "v0.1_testAcc"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBlueprintAssignment_basicUpdated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_blueprint_assignment", "test") + r := BlueprintAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "testAcc_basicSubscription", "v0.1_testAcc"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data, "testAcc_basicSubscription", "v0.2_testAcc"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBlueprintAssignment_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_blueprint_assignment", "test") + r := BlueprintAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "testAcc_basicSubscription", "v0.1_testAcc"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data, "testAcc_basicSubscription", "v0.1_testAcc"), + ExpectError: acceptance.RequiresImportError("azurerm_blueprint_assignment"), + }, + }) +} + +// Scenario: BP with RG's, locking and parameters/policies stored at Subscription, applied to subscription +func TestAccBlueprintAssignment_subscriptionComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_blueprint_assignment", "test") + r := BlueprintAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subscriptionComplete(data, "testAcc_subscriptionComplete", "v0.1_testAcc"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +// Scenario: BP stored at Root Management Group, applied to Subscription +func TestAccBlueprintAssignment_managementGroup(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_blueprint_assignment", "test") + r := BlueprintAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.rootManagementGroup(data, "testAcc_basicRootManagementGroup", "v0.1_testAcc"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t BlueprintAssignmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AssignmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Blueprints.AssignmentsClient.Get(ctx, id.Scope, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Blueprint Assignment %q (scope %q) was not found", id.Name, id.Scope) + } + + return utils.Bool(resp.AssignmentProperties != nil), nil +} + +func (BlueprintAssignmentResource) basic(data acceptance.TestData, bpName string, version string) string { + subscription := data.Client().SubscriptionIDAlt + return fmt.Sprintf(` +provider "azurerm" { + subscription_id = "%s" + features {} +} + +data "azurerm_client_config" "current" {} + +data "azurerm_subscription" "test" {} + +data "azurerm_blueprint_definition" "test" { + name = "%s" + scope_id = data.azurerm_subscription.test.id +} + +data "azurerm_blueprint_published_version" "test" { + scope_id = data.azurerm_blueprint_definition.test.scope_id + blueprint_name = data.azurerm_blueprint_definition.test.name + version = "%s" +} + +resource "azurerm_resource_group" "test" { + name = "accTestRG-bp-%d" + location = "westeurope" +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + name = "bp-user-%d" +} + +resource "azurerm_role_assignment" "test" { + scope = data.azurerm_subscription.test.id + role_definition_name = "Blueprint Operator" + principal_id = azurerm_user_assigned_identity.test.principal_id +} + +resource "azurerm_blueprint_assignment" "test" { + name = "testAccBPAssignment" + target_subscription_id = data.azurerm_subscription.test.id + version_id = data.azurerm_blueprint_published_version.test.id + location = "%s" + + identity { + type = "UserAssigned" + identity_ids = [azurerm_user_assigned_identity.test.id] + } + + depends_on = [ + azurerm_role_assignment.test + ] +} +`, subscription, bpName, version, data.RandomInteger, data.RandomInteger, data.Locations.Primary) +} + +// This test config creates a UM-MSI and assigns Owner to the target subscription. This is necessary due to the changes +// the referenced Blueprint Version needs to make to successfully apply. If the test does not exit cleanly, "dangling" +// resources can include the Role Assignment(s) at the Subscription, which will need to be removed +func (BlueprintAssignmentResource) subscriptionComplete(data acceptance.TestData, bpName string, version string) string { + subscription := data.Client().SubscriptionIDAlt + + return fmt.Sprintf(` +provider "azurerm" { + subscription_id = "%s" + features {} +} + +data "azurerm_client_config" "current" {} + +data "azurerm_subscription" "test" {} + +data "azurerm_blueprint_definition" "test" { + name = "%s" + scope_id = data.azurerm_subscription.test.id +} + +data "azurerm_blueprint_published_version" "test" { + scope_id = data.azurerm_blueprint_definition.test.scope_id + blueprint_name = data.azurerm_blueprint_definition.test.name + version = "%s" +} + +resource "azurerm_resource_group" "test" { + name = "accTestRG-bp-%d" + location = "%s" + + tags = { + testAcc = "true" + } +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + name = "bp-user-%d" +} + +resource "azurerm_role_assignment" "operator" { + scope = data.azurerm_subscription.test.id + role_definition_name = "Blueprint Operator" + principal_id = azurerm_user_assigned_identity.test.principal_id +} + +resource "azurerm_role_assignment" "owner" { + scope = data.azurerm_subscription.test.id + role_definition_name = "Owner" + principal_id = azurerm_user_assigned_identity.test.principal_id +} + +resource "azurerm_blueprint_assignment" "test" { + name = "testAccBPAssignment" + target_subscription_id = data.azurerm_subscription.test.id + version_id = data.azurerm_blueprint_published_version.test.id + location = "%s" + + lock_mode = "AllResourcesDoNotDelete" + + lock_exclude_principals = [ + data.azurerm_client_config.current.object_id, + ] + + identity { + type = "UserAssigned" + identity_ids = [azurerm_user_assigned_identity.test.id] + } + + resource_groups = < 0 { - return fmt.Errorf("`global_delivery_policy` and `delivery_rule` are only allowed when `Standard_Microsoft` sku is used. Profile sku: %s", profile.Sku.Name) + return fmt.Errorf("`global_delivery_rule` and `delivery_rule` are only allowed when `Standard_Microsoft` sku is used. Profile sku: %s", profile.Sku.Name) } - endpoint.EndpointProperties.DeliveryPolicy = deliveryPolicy + if profile.Sku.Name == cdn.StandardMicrosoft { + endpoint.EndpointProperties.DeliveryPolicy = deliveryPolicy + } } future, err := endpointsClient.Create(ctx, resourceGroup, profileName, name, endpoint) @@ -316,22 +323,22 @@ func resourceArmCdnEndpointCreate(d *schema.ResourceData, meta interface{}) erro return fmt.Errorf("Error retrieving CDN Endpoint %q (Profile %q / Resource Group %q): %+v", name, profileName, resourceGroup, err) } - id, err := parse.CdnEndpointID(*read.ID) + id, err := parse.EndpointID(*read.ID) if err != nil { return err } - d.SetId(id.ID(subscriptionId)) + d.SetId(id.ID()) - return resourceArmCdnEndpointRead(d, meta) + return resourceCdnEndpointRead(d, meta) } -func resourceArmCdnEndpointUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCdnEndpointUpdate(d *schema.ResourceData, meta interface{}) error { endpointsClient := meta.(*clients.Client).Cdn.EndpointsClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CdnEndpointID(d.Id()) + id, err := parse.EndpointID(d.Id()) if err != nil { return err } @@ -343,14 +350,10 @@ func resourceArmCdnEndpointUpdate(d *schema.ResourceData, meta interface{}) erro originPath := d.Get("origin_path").(string) probePath := d.Get("probe_path").(string) optimizationType := d.Get("optimization_type").(string) - contentTypes := expandArmCdnEndpointContentTypesToCompress(d) - geoFilters := expandCdnEndpointGeoFilters(d) t := d.Get("tags").(map[string]interface{}) endpoint := cdn.EndpointUpdateParameters{ EndpointPropertiesUpdateParameters: &cdn.EndpointPropertiesUpdateParameters{ - ContentTypesToCompress: &contentTypes, - GeoFilters: geoFilters, IsHTTPAllowed: utils.Bool(httpAllowed), IsHTTPSAllowed: utils.Bool(httpsAllowed), QueryStringCachingBehavior: cdn.QueryStringCachingBehavior(cachingBehaviour), @@ -359,6 +362,16 @@ func resourceArmCdnEndpointUpdate(d *schema.ResourceData, meta interface{}) erro Tags: tags.Expand(t), } + if _, ok := d.GetOk("content_types_to_compress"); ok { + contentTypes := expandArmCdnEndpointContentTypesToCompress(d) + endpoint.EndpointPropertiesUpdateParameters.ContentTypesToCompress = &contentTypes + } + + if _, ok := d.GetOk("geo_filter"); ok { + geoFilters := expandCdnEndpointGeoFilters(d) + endpoint.EndpointPropertiesUpdateParameters.GeoFilters = geoFilters + } + if v, ok := d.GetOk("is_compression_enabled"); ok { endpoint.EndpointPropertiesUpdateParameters.IsCompressionEnabled = utils.Bool(v.(bool)) } @@ -390,10 +403,12 @@ func resourceArmCdnEndpointUpdate(d *schema.ResourceData, meta interface{}) erro } if profile.Sku.Name != cdn.StandardMicrosoft && len(*deliveryPolicy.Rules) > 0 { - return fmt.Errorf("`global_delivery_policy` and `delivery_rule` are only allowed when `Standard_Microsoft` sku is used. Profile sku: %s", profile.Sku.Name) + return fmt.Errorf("`global_delivery_rule` and `delivery_rule` are only allowed when `Standard_Microsoft` sku is used. Profile sku: %s", profile.Sku.Name) } - endpoint.EndpointPropertiesUpdateParameters.DeliveryPolicy = deliveryPolicy + if profile.Sku.Name == cdn.StandardMicrosoft { + endpoint.EndpointPropertiesUpdateParameters.DeliveryPolicy = deliveryPolicy + } } future, err := endpointsClient.Update(ctx, id.ResourceGroup, id.ProfileName, id.Name, endpoint) @@ -405,15 +420,15 @@ func resourceArmCdnEndpointUpdate(d *schema.ResourceData, meta interface{}) erro return fmt.Errorf("Error waiting for the CDN Endpoint %q (Profile %q / Resource Group %q) to finish updating: %+v", id.Name, id.ProfileName, id.ResourceGroup, err) } - return resourceArmCdnEndpointRead(d, meta) + return resourceCdnEndpointRead(d, meta) } -func resourceArmCdnEndpointRead(d *schema.ResourceData, meta interface{}) error { +func resourceCdnEndpointRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.EndpointsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CdnEndpointID(d.Id()) + id, err := parse.EndpointID(d.Id()) if err != nil { return err } @@ -450,7 +465,7 @@ func resourceArmCdnEndpointRead(d *schema.ResourceData, meta interface{}) error if _, ok := d.GetOk("is_compression_enabled"); ok { if compressionEnabled := props.IsCompressionEnabled; compressionEnabled != nil { - d.Set("is_compression_enabled", *compressionEnabled) + d.Set("is_compression_enabled", compressionEnabled) } } @@ -484,12 +499,12 @@ func resourceArmCdnEndpointRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmCdnEndpointDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCdnEndpointDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.EndpointsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CdnEndpointID(d.Id()) + id, err := parse.EndpointID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/cdn/cdn_endpoint_resource_test.go b/azurerm/internal/services/cdn/cdn_endpoint_resource_test.go new file mode 100644 index 000000000000..4550792e53ea --- /dev/null +++ b/azurerm/internal/services/cdn/cdn_endpoint_resource_test.go @@ -0,0 +1,1325 @@ +package cdn_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type CdnEndpointResource struct{} + +func TestAccCdnEndpoint_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccCdnEndpoint_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + data.DisappearsStep(acceptance.DisappearsStepData{ + Config: r.basic, + TestResource: r, + }), + }) +} + +func TestAccCdnEndpoint_updateHostHeader(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hostHeader(data, "www.contoso.com"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.hostHeader(data, ""), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.hostHeader(data, "www.example2.com"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_withoutCompression(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withoutCompression(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_optimized(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.optimized(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_withGeoFilters(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.geoFilters(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_fullFields(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fullFields(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("is_http_allowed").HasValue("true"), + check.That(data.ResourceName).Key("is_https_allowed").HasValue("true"), + check.That(data.ResourceName).Key("origin_path").HasValue("/origin-path"), + check.That(data.ResourceName).Key("probe_path").HasValue("/origin-path/probe"), + check.That(data.ResourceName).Key("origin_host_header").HasValue("www.contoso.com"), + check.That(data.ResourceName).Key("optimization_type").HasValue("GeneralWebDelivery"), + check.That(data.ResourceName).Key("querystring_caching_behaviour").HasValue("UseQueryString"), + check.That(data.ResourceName).Key("content_types_to_compress.#").HasValue("1"), + check.That(data.ResourceName).Key("is_compression_enabled").HasValue("true"), + check.That(data.ResourceName).Key("geo_filter.#").HasValue("1"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Production"), + ), + }, + // TODO -- add import step. Import step now gives us an error complaining that `is_compression_enabled` is not imported + }) +} + +func TestAccCdnEndpoint_isHttpAndHttpsAllowedUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.isHttpAndHttpsAllowed(data, "true", "false"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.isHttpAndHttpsAllowed(data, "false", "true"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_globalDeliveryRule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.globalDeliveryRule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.globalDeliveryRuleUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.globalDeliveryRuleRemove(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_deliveryRule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.deliveryRule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.deliveryRuleUpdate1(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.deliveryRuleUpdate2(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.deliveryRuleRemove(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_dnsAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.testAccAzureRMCdnEndpoint_dnsAlias(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_PremiumVerizon(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.PremiumVerizon(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnEndpoint_deliveryRuleOptionalMatchValue(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") + r := CdnEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.deliveryRuleOptionalMatchValue(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r CdnEndpointResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EndpointID(state.ID) + if err != nil { + return nil, err + } + resp, err := client.Cdn.EndpointsClient.Get(ctx, id.ResourceGroup, id.ProfileName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving CDN Endpoint %q (Resource Group %q / Profile Name %q): %+v", id.Name, id.ResourceGroup, id.ProfileName, err) + } + return utils.Bool(true), nil +} + +func (r CdnEndpointResource) Destroy(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EndpointID(state.ID) + if err != nil { + return nil, err + } + + endpointsClient := client.Cdn.EndpointsClient + future, err := endpointsClient.Delete(ctx, id.ResourceGroup, id.ProfileName, id.Name) + if err != nil { + return nil, fmt.Errorf("deleting CDN Endpoint %q (Resource Group %q / Profile %q): %+v", id.Name, id.ResourceGroup, id.ProfileName, err) + } + if err := future.WaitForCompletionRef(ctx, endpointsClient.Client); err != nil { + return nil, fmt.Errorf("waiting for deletion of CDN Endpoint %q (Resource Group %q / Profile %q): %+v", id.Name, id.ResourceGroup, id.ProfileName, err) + } + + return utils.Bool(true), nil +} + +func (r CdnEndpointResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_cdn_endpoint" "import" { + name = azurerm_cdn_endpoint.test.name + profile_name = azurerm_cdn_endpoint.test.profile_name + location = azurerm_cdn_endpoint.test.location + resource_group_name = azurerm_cdn_endpoint.test.resource_group_name + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, template) +} + +func (r CdnEndpointResource) hostHeader(data acceptance.TestData, domain string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + origin_host_header = "%s" + + origin { + name = "acceptanceTestCdnOrigin2" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, domain) +} + +func (r CdnEndpointResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin { + name = "acceptanceTestCdnOrigin2" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin { + name = "acceptanceTestCdnOrigin2" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) geoFilters(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + is_http_allowed = false + is_https_allowed = true + origin_path = "/origin-path" + probe_path = "/origin-path/probe" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + geo_filter { + relative_path = "/some-example-endpoint" + action = "Allow" + country_codes = ["GB"] + } + + geo_filter { + relative_path = "/some-other-endpoint" + action = "Block" + country_codes = ["US"] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) optimized(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + is_http_allowed = false + is_https_allowed = true + optimization_type = "GeneralWebDelivery" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) withoutCompression(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + is_http_allowed = false + is_https_allowed = true + optimization_type = "GeneralWebDelivery" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) fullFields(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + is_http_allowed = true + is_https_allowed = true + content_types_to_compress = ["text/html"] + is_compression_enabled = true + querystring_caching_behaviour = "UseQueryString" + origin_host_header = "www.contoso.com" + optimization_type = "GeneralWebDelivery" + origin_path = "/origin-path" + probe_path = "/origin-path/probe" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + geo_filter { + relative_path = "/some-example-endpoint" + action = "Allow" + country_codes = ["GB"] + } + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) isHttpAndHttpsAllowed(data acceptance.TestData, isHttpAllowed string, isHttpsAllowed string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + is_http_allowed = %s + is_https_allowed = %s + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, isHttpAllowed, isHttpsAllowed) +} + +func (r CdnEndpointResource) globalDeliveryRule(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + global_delivery_rule { + cache_expiration_action { + behavior = "Override" + duration = "5.04:44:23" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) globalDeliveryRuleUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + global_delivery_rule { + cache_expiration_action { + behavior = "SetIfMissing" + duration = "12.04:11:22" + } + + modify_response_header_action { + action = "Overwrite" + name = "Content-Type" + value = "application/json" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) globalDeliveryRuleRemove(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) deliveryRule(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + delivery_rule { + name = "http2https" + order = 1 + + request_scheme_condition { + match_values = ["HTTP"] + } + + url_redirect_action { + redirect_type = "Found" + protocol = "Https" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) deliveryRuleUpdate1(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + delivery_rule { + name = "http2https" + order = 1 + + request_scheme_condition { + negate_condition = true + match_values = ["HTTPS"] + } + + url_redirect_action { + redirect_type = "Found" + protocol = "Https" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) deliveryRuleUpdate2(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + delivery_rule { + name = "http2https" + order = 1 + + request_scheme_condition { + negate_condition = true + match_values = ["HTTPS"] + } + + url_redirect_action { + redirect_type = "Found" + protocol = "Https" + } + } + + delivery_rule { + name = "test" + order = 2 + + device_condition { + match_values = ["Mobile"] + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) deliveryRuleRemove(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) testAccAzureRMCdnEndpoint_dnsAlias(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestcdnep%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnep%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnep%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_cdn_endpoint.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) PremiumVerizon(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Premium_Verizon" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + is_http_allowed = false + is_https_allowed = true + querystring_caching_behaviour = "NotSet" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r CdnEndpointResource) deliveryRuleOptionalMatchValue(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} + +resource "azurerm_cdn_endpoint" "test" { + name = "acctestcdnend%d" + profile_name = azurerm_cdn_profile.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + origin_host_header = "www.contoso.com" + + origin { + name = "acceptanceTestCdnOrigin1" + host_name = "www.contoso.com" + https_port = 443 + http_port = 80 + } + + delivery_rule { + name = "cookieCondition" + order = 1 + + cookies_condition { + selector = "abc" + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "postArg" + order = 2 + + post_arg_condition { + selector = "abc" + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "queryString" + order = 3 + + query_string_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "remoteAddress" + order = 4 + + remote_address_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "requestBody" + order = 5 + + request_body_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "requestHeader" + order = 6 + + request_header_condition { + selector = "abc" + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "requestUri" + order = 7 + + request_uri_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "uriFileExtension" + order = 8 + + url_file_extension_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "uriFileName" + order = 9 + + url_file_name_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } + + delivery_rule { + name = "uriPath" + order = 10 + + url_path_condition { + operator = "Any" + } + + modify_response_header_action { + action = "Delete" + name = "Content-Language" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/cdn/cdn_profile_data_source.go b/azurerm/internal/services/cdn/cdn_profile_data_source.go index cf239b1a730a..bbb44b822312 100644 --- a/azurerm/internal/services/cdn/cdn_profile_data_source.go +++ b/azurerm/internal/services/cdn/cdn_profile_data_source.go @@ -14,9 +14,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmCdnProfile() *schema.Resource { +func dataSourceCdnProfile() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmCdnProfileRead, + Read: dataSourceCdnProfileRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -42,8 +42,7 @@ func dataSourceArmCdnProfile() *schema.Resource { } } -func dataSourceArmCdnProfileRead(d *schema.ResourceData, meta interface{}) error { - subscriptionId := meta.(*clients.Client).Account.SubscriptionId +func dataSourceCdnProfileRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.ProfilesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -59,12 +58,12 @@ func dataSourceArmCdnProfileRead(d *schema.ResourceData, meta interface{}) error return fmt.Errorf("Error making Read request on Azure CDN Profile %q (Resource Group %q): %+v", name, resourceGroup, err) } - id, err := parse.CdnProfileID(*resp.ID) + id, err := parse.ProfileID(*resp.ID) if err != nil { return err } - d.SetId(id.ID(subscriptionId)) + d.SetId(id.ID()) d.Set("name", name) d.Set("resource_group_name", resourceGroup) diff --git a/azurerm/internal/services/cdn/cdn_profile_data_source_test.go b/azurerm/internal/services/cdn/cdn_profile_data_source_test.go new file mode 100644 index 000000000000..473f0ba77ff7 --- /dev/null +++ b/azurerm/internal/services/cdn/cdn_profile_data_source_test.go @@ -0,0 +1,98 @@ +package cdn_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type CdnProfileDataSource struct{} + +func TestAccCdnProfileDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_cdn_profile", "test") + d := CdnProfileDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: d.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("Standard_Verizon"), + ), + }, + }) +} + +func TestAccCdnProfileDataSource_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_cdn_profile", "test") + d := CdnProfileDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: d.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("Standard_Verizon"), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Production"), + check.That(data.ResourceName).Key("tags.cost_center").HasValue("MSFT"), + ), + }, + }) +} + +func (d CdnProfileDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} + +data "azurerm_cdn_profile" "test" { + name = azurerm_cdn_profile.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (d CdnProfileDataSource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} + +data "azurerm_cdn_profile" "test" { + name = azurerm_cdn_profile.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/cdn/cdn_profile_resource.go b/azurerm/internal/services/cdn/cdn_profile_resource.go index 703a26772202..fe9df849031e 100644 --- a/azurerm/internal/services/cdn/cdn_profile_resource.go +++ b/azurerm/internal/services/cdn/cdn_profile_resource.go @@ -22,12 +22,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCdnProfile() *schema.Resource { +func resourceCdnProfile() *schema.Resource { return &schema.Resource{ - Create: resourceArmCdnProfileCreate, - Read: resourceArmCdnProfileRead, - Update: resourceArmCdnProfileUpdate, - Delete: resourceArmCdnProfileDelete, + Create: resourceCdnProfileCreate, + Read: resourceCdnProfileRead, + Update: resourceCdnProfileUpdate, + Delete: resourceCdnProfileDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -37,7 +37,7 @@ func resourceArmCdnProfile() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.CdnProfileID(id) + _, err := parse.ProfileID(id) return err }), @@ -80,8 +80,7 @@ func resourceArmCdnProfile() *schema.Resource { } } -func resourceArmCdnProfileCreate(d *schema.ResourceData, meta interface{}) error { - subscriptionId := meta.(*clients.Client).Account.SubscriptionId +func resourceCdnProfileCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.ProfilesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -133,17 +132,17 @@ func resourceArmCdnProfileCreate(d *schema.ResourceData, meta interface{}) error return fmt.Errorf("Cannot read CDN Profile %s (resource group %s) ID", name, resGroup) } - id, err := parse.CdnProfileID(*read.ID) + id, err := parse.ProfileID(*read.ID) if err != nil { return err } - d.SetId(id.ID(subscriptionId)) + d.SetId(id.ID()) - return resourceArmCdnProfileRead(d, meta) + return resourceCdnProfileRead(d, meta) } -func resourceArmCdnProfileUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCdnProfileUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.ProfilesClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -152,7 +151,7 @@ func resourceArmCdnProfileUpdate(d *schema.ResourceData, meta interface{}) error return nil } - id, err := parse.CdnProfileID(d.Id()) + id, err := parse.ProfileID(d.Id()) if err != nil { return err } @@ -174,15 +173,15 @@ func resourceArmCdnProfileUpdate(d *schema.ResourceData, meta interface{}) error return fmt.Errorf("Error waiting for the update of CDN Profile %q (Resource Group %q) to commplete: %+v", id.Name, id.ResourceGroup, err) } - return resourceArmCdnProfileRead(d, meta) + return resourceCdnProfileRead(d, meta) } -func resourceArmCdnProfileRead(d *schema.ResourceData, meta interface{}) error { +func resourceCdnProfileRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.ProfilesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CdnProfileID(d.Id()) + id, err := parse.ProfileID(d.Id()) if err != nil { return err } @@ -209,31 +208,29 @@ func resourceArmCdnProfileRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmCdnProfileDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCdnProfileDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cdn.ProfilesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ProfileID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["profiles"] - future, err := client.Delete(ctx, resourceGroup, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error issuing delete request for CDN Profile %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("deleting CDN Profile %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error waiting for CDN Profile %q (Resource Group %q) to be deleted: %+v", name, resourceGroup, err) + return fmt.Errorf("waiting for deletion of CDN Profile %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return err diff --git a/azurerm/internal/services/cdn/cdn_profile_resource_test.go b/azurerm/internal/services/cdn/cdn_profile_resource_test.go new file mode 100644 index 000000000000..c07ec1992434 --- /dev/null +++ b/azurerm/internal/services/cdn/cdn_profile_resource_test.go @@ -0,0 +1,301 @@ +package cdn_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type CdnProfileResource struct{} + +func TestAccCdnProfile_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnProfile_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccCdnProfile_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnProfile_NonStandardCasing(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nonStandardCasing(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.nonStandardCasing(data), + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + }) +} + +func TestAccCdnProfile_basicToStandardAkamai(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.standardAkamai(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnProfile_standardAkamai(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standardAkamai(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard_Akamai"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCdnProfile_standardMicrosoft(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") + r := CdnProfileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standardMicrosoft(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard_Microsoft"), + ), + }, + data.ImportStep(), + }) +} + +func (r CdnProfileResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ProfileID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.Cdn.ProfilesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Cdn Profile %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + return utils.Bool(true), nil +} + +func (r CdnProfileResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CdnProfileResource) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_cdn_profile" "import" { + name = azurerm_cdn_profile.test.name + location = azurerm_cdn_profile.test.location + resource_group_name = azurerm_cdn_profile.test.resource_group_name + sku = azurerm_cdn_profile.test.sku +} +`, template) +} + +func (r CdnProfileResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CdnProfileResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Verizon" + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CdnProfileResource) nonStandardCasing(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "standard_verizon" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CdnProfileResource) standardAkamai(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Akamai" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CdnProfileResource) standardMicrosoft(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_cdn_profile" "test" { + name = "acctestcdnprof%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard_Microsoft" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/cdn/deliveryruleactions/cache_key_query_string.go b/azurerm/internal/services/cdn/deliveryruleactions/cache_key_query_string.go index 2c8479559612..2693a924743a 100644 --- a/azurerm/internal/services/cdn/deliveryruleactions/cache_key_query_string.go +++ b/azurerm/internal/services/cdn/deliveryruleactions/cache_key_query_string.go @@ -47,7 +47,7 @@ func ExpandArmCdnEndpointActionCacheKeyQueryString(input []interface{}) (*[]cdn. if parameters := item["parameters"].(string); parameters == "" { if behavior := cacheKeyQueryStringAction.Parameters.QueryStringBehavior; behavior == cdn.Include || behavior == cdn.Exclude { - return nil, fmt.Errorf("Parameters can not be empty if the behavior is either Include or Exclude.") + return nil, fmt.Errorf("Parameters can not be empty if the behaviour is either Include or Exclude.") } } else { cacheKeyQueryStringAction.Parameters.QueryParameters = utils.String(parameters) diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/cookies.go b/azurerm/internal/services/cdn/deliveryruleconditions/cookies.go index ca9f7e3f585b..85eeac388057 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/cookies.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/cookies.go @@ -42,7 +42,7 @@ func Cookies() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/post_arg.go b/azurerm/internal/services/cdn/deliveryruleconditions/post_arg.go index 8f0d90bed37e..0ceaf8c5385f 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/post_arg.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/post_arg.go @@ -42,7 +42,7 @@ func PostArg() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/query_string.go b/azurerm/internal/services/cdn/deliveryruleconditions/query_string.go index af6733a67157..227d22802ca1 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/query_string.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/query_string.go @@ -36,7 +36,7 @@ func QueryString() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/remote_address.go b/azurerm/internal/services/cdn/deliveryruleconditions/remote_address.go index 1bd77cc458f2..6daf75fa4ae0 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/remote_address.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/remote_address.go @@ -30,7 +30,7 @@ func RemoteAddress() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/request_body.go b/azurerm/internal/services/cdn/deliveryruleconditions/request_body.go index 49dfcfabb5ff..2378c9536d31 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/request_body.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/request_body.go @@ -36,7 +36,7 @@ func RequestBody() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/request_header.go b/azurerm/internal/services/cdn/deliveryruleconditions/request_header.go index aceff57471fc..f2fb7c978bdf 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/request_header.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/request_header.go @@ -42,7 +42,7 @@ func RequestHeader() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/request_uri.go b/azurerm/internal/services/cdn/deliveryruleconditions/request_uri.go index 2bcc17f7ad26..b41197100aa5 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/request_uri.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/request_uri.go @@ -36,7 +36,7 @@ func RequestURI() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/url_file_extension.go b/azurerm/internal/services/cdn/deliveryruleconditions/url_file_extension.go index 99e811f472e6..529ab116f93a 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/url_file_extension.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/url_file_extension.go @@ -36,7 +36,7 @@ func URLFileExtension() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/url_file_name.go b/azurerm/internal/services/cdn/deliveryruleconditions/url_file_name.go index ca4cd257b55f..d7786533afc2 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/url_file_name.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/url_file_name.go @@ -36,7 +36,7 @@ func URLFileName() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/deliveryruleconditions/url_path.go b/azurerm/internal/services/cdn/deliveryruleconditions/url_path.go index cfd1cab88f17..f3e465b26886 100644 --- a/azurerm/internal/services/cdn/deliveryruleconditions/url_path.go +++ b/azurerm/internal/services/cdn/deliveryruleconditions/url_path.go @@ -36,7 +36,7 @@ func URLPath() *schema.Resource { "match_values": { Type: schema.TypeSet, - Required: true, + Optional: true, MinItems: 1, Elem: &schema.Schema{ Type: schema.TypeString, diff --git a/azurerm/internal/services/cdn/migration/cdn_endpoint.go b/azurerm/internal/services/cdn/migration/cdn_endpoint.go index 34d8878139ef..a9b4c4d39119 100644 --- a/azurerm/internal/services/cdn/migration/cdn_endpoint.go +++ b/azurerm/internal/services/cdn/migration/cdn_endpoint.go @@ -1007,8 +1007,8 @@ func CdnEndpointV0ToV1(rawState map[string]interface{}, _ interface{}) (map[stri return rawState, err } - newId := parse.NewCdnEndpointID(parse.NewCdnProfileID(resourceGroup, profileName), name) - newIdStr := newId.ID(oldParsedId.SubscriptionID) + newId := parse.NewEndpointID(oldParsedId.SubscriptionID, resourceGroup, profileName, name) + newIdStr := newId.ID() log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newIdStr) diff --git a/azurerm/internal/services/cdn/migration/cdn_profile.go b/azurerm/internal/services/cdn/migration/cdn_profile.go index 58d208968551..1d3f48dc04b5 100644 --- a/azurerm/internal/services/cdn/migration/cdn_profile.go +++ b/azurerm/internal/services/cdn/migration/cdn_profile.go @@ -71,8 +71,8 @@ func CdnProfileV0ToV1(rawState map[string]interface{}, _ interface{}) (map[strin return rawState, err } - newId := parse.NewCdnProfileID(resourceGroup, name) - newIdStr := newId.ID(oldParsedId.SubscriptionID) + newId := parse.NewProfileID(oldParsedId.SubscriptionID, resourceGroup, name) + newIdStr := newId.ID() log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newIdStr) diff --git a/azurerm/internal/services/cdn/parse/cdn_endpoint.go b/azurerm/internal/services/cdn/parse/cdn_endpoint.go deleted file mode 100644 index 225066302df9..000000000000 --- a/azurerm/internal/services/cdn/parse/cdn_endpoint.go +++ /dev/null @@ -1,51 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type CdnEndpointId struct { - ResourceGroup string - ProfileName string - Name string -} - -func NewCdnEndpointID(id CdnProfileId, name string) CdnEndpointId { - return CdnEndpointId{ - ResourceGroup: id.ResourceGroup, - ProfileName: id.Name, - Name: name, - } -} - -func CdnEndpointID(input string) (*CdnEndpointId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse CDN Endpoint ID %q: %+v", input, err) - } - - endpoint := CdnEndpointId{ - ResourceGroup: id.ResourceGroup, - } - - if endpoint.ProfileName, err = id.PopSegment("profiles"); err != nil { - return nil, err - } - - if endpoint.Name, err = id.PopSegment("endpoints"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &endpoint, nil -} - -func (id CdnEndpointId) ID(subscriptionId string) string { - base := NewCdnProfileID(id.ResourceGroup, id.ProfileName).ID(subscriptionId) - return fmt.Sprintf("%s/endpoints/%s", base, id.Name) -} diff --git a/azurerm/internal/services/cdn/parse/cdn_endpoint_test.go b/azurerm/internal/services/cdn/parse/cdn_endpoint_test.go deleted file mode 100644 index 8147bcfdb57b..000000000000 --- a/azurerm/internal/services/cdn/parse/cdn_endpoint_test.go +++ /dev/null @@ -1,99 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = CdnEndpointId{} - -func TestCdnEndpointIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewCdnEndpointID(NewCdnProfileID("group1", "profile1"), "endpoint1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Cdn/profiles/profile1/endpoints/endpoint1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestCdnEndpointId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *CdnEndpointId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Profiles Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/", - Expected: nil, - }, - { - Name: "CDN Profile ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profName1", - Expected: nil, - }, - { - Name: "Missing Endpoints Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profName1/endpoints/", - Expected: nil, - }, - { - Name: "CDN Endpoint ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profName1/endpoints/Endpoint1", - Expected: &CdnEndpointId{ - ResourceGroup: "resGroup1", - ProfileName: "profName1", - Name: "Endpoint1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profName1/Endpoints/Endpoint1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := CdnEndpointID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ProfileName != v.Expected.ProfileName { - t.Fatalf("Expected %q but got %q for ProfileName", v.Expected.ProfileName, actual.ProfileName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/cdn/parse/cdn_profile.go b/azurerm/internal/services/cdn/parse/cdn_profile.go deleted file mode 100644 index 479a57e1e9cf..000000000000 --- a/azurerm/internal/services/cdn/parse/cdn_profile.go +++ /dev/null @@ -1,45 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type CdnProfileId struct { - ResourceGroup string - Name string -} - -func NewCdnProfileID(resourceGroup, name string) CdnProfileId { - return CdnProfileId{ - ResourceGroup: resourceGroup, - Name: name, - } -} - -func CdnProfileID(input string) (*CdnProfileId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse CDN Profile ID %q: %+v", input, err) - } - - profile := CdnProfileId{ - ResourceGroup: id.ResourceGroup, - } - - if profile.Name, err = id.PopSegment("profiles"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &profile, nil -} - -func (id CdnProfileId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Cdn/profiles/%s", - subscriptionId, id.ResourceGroup, id.Name) -} diff --git a/azurerm/internal/services/cdn/parse/cdn_profile_test.go b/azurerm/internal/services/cdn/parse/cdn_profile_test.go deleted file mode 100644 index 2112828f9456..000000000000 --- a/azurerm/internal/services/cdn/parse/cdn_profile_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = CdnProfileId{} - -func TestCdnProfileIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewCdnProfileID("group1", "profile1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Cdn/profiles/profile1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestCdnProfileId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *CdnProfileId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Profiles Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/", - Expected: nil, - }, - { - Name: "CDN Profile ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/Profile1", - Expected: &CdnProfileId{ - Name: "Profile1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Cdn/Profiles/", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := CdnProfileID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/cdn/parse/endpoint.go b/azurerm/internal/services/cdn/parse/endpoint.go new file mode 100644 index 000000000000..fdc861d2f8d0 --- /dev/null +++ b/azurerm/internal/services/cdn/parse/endpoint.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type EndpointId struct { + SubscriptionId string + ResourceGroup string + ProfileName string + Name string +} + +func NewEndpointID(subscriptionId, resourceGroup, profileName, name string) EndpointId { + return EndpointId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ProfileName: profileName, + Name: name, + } +} + +func (id EndpointId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Profile Name %q", id.ProfileName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Endpoint", segmentsStr) +} + +func (id EndpointId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Cdn/profiles/%s/endpoints/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ProfileName, id.Name) +} + +// EndpointID parses a Endpoint ID into an EndpointId struct +func EndpointID(input string) (*EndpointId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := EndpointId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ProfileName, err = id.PopSegment("profiles"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("endpoints"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/cdn/parse/endpoint_test.go b/azurerm/internal/services/cdn/parse/endpoint_test.go new file mode 100644 index 000000000000..efd289cd47fc --- /dev/null +++ b/azurerm/internal/services/cdn/parse/endpoint_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = EndpointId{} + +func TestEndpointIDFormatter(t *testing.T) { + actual := NewEndpointID("12345678-1234-9876-4563-123456789012", "resGroup1", "profile1", "endpoint1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/endpoints/endpoint1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestEndpointID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *EndpointId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ProfileName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/", + Error: true, + }, + + { + // missing value for ProfileName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/endpoints/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/endpoints/endpoint1", + Expected: &EndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ProfileName: "profile1", + Name: "endpoint1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CDN/PROFILES/PROFILE1/ENDPOINTS/ENDPOINT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := EndpointID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ProfileName != v.Expected.ProfileName { + t.Fatalf("Expected %q but got %q for ProfileName", v.Expected.ProfileName, actual.ProfileName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/cdn/parse/profile.go b/azurerm/internal/services/cdn/parse/profile.go new file mode 100644 index 000000000000..cb8a688dbc1c --- /dev/null +++ b/azurerm/internal/services/cdn/parse/profile.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ProfileId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewProfileID(subscriptionId, resourceGroup, name string) ProfileId { + return ProfileId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ProfileId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Profile", segmentsStr) +} + +func (id ProfileId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Cdn/profiles/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ProfileID parses a Profile ID into an ProfileId struct +func ProfileID(input string) (*ProfileId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ProfileId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("profiles"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/cdn/parse/profile_test.go b/azurerm/internal/services/cdn/parse/profile_test.go new file mode 100644 index 000000000000..2348267c89a1 --- /dev/null +++ b/azurerm/internal/services/cdn/parse/profile_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ProfileId{} + +func TestProfileIDFormatter(t *testing.T) { + actual := NewProfileID("12345678-1234-9876-4563-123456789012", "resGroup1", "profile1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestProfileID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ProfileId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1", + Expected: &ProfileId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "profile1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CDN/PROFILES/PROFILE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ProfileID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/cdn/registration.go b/azurerm/internal/services/cdn/registration.go index 109e642f731f..39d3b50584ff 100644 --- a/azurerm/internal/services/cdn/registration.go +++ b/azurerm/internal/services/cdn/registration.go @@ -21,14 +21,14 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_cdn_profile": dataSourceArmCdnProfile(), + "azurerm_cdn_profile": dataSourceCdnProfile(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_cdn_endpoint": resourceArmCdnEndpoint(), - "azurerm_cdn_profile": resourceArmCdnProfile(), + "azurerm_cdn_endpoint": resourceCdnEndpoint(), + "azurerm_cdn_profile": resourceCdnProfile(), } } diff --git a/azurerm/internal/services/cdn/resourceids.go b/azurerm/internal/services/cdn/resourceids.go new file mode 100644 index 000000000000..f6f8b9dd8f54 --- /dev/null +++ b/azurerm/internal/services/cdn/resourceids.go @@ -0,0 +1,4 @@ +package cdn + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Endpoint -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/endpoints/endpoint1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Profile -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1 diff --git a/azurerm/internal/services/cdn/tests/cdn_endpoint_resource_test.go b/azurerm/internal/services/cdn/tests/cdn_endpoint_resource_test.go deleted file mode 100644 index d33abfdcdaba..000000000000 --- a/azurerm/internal/services/cdn/tests/cdn_endpoint_resource_test.go +++ /dev/null @@ -1,1232 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn/parse" -) - -func TestAccAzureRMCdnEndpoint_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCdnEndpoint_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_cdn_endpoint"), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - testCheckAzureRMCdnEndpointDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_updateHostHeader(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_hostHeader(data, "www.contoso.com"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "origin_host_header", "www.contoso.com"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_hostHeader(data, ""), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "origin_host_header", ""), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_hostHeader(data, "www.example2.com"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "origin_host_header", "www.example2.com"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.cost_center", "MSFT"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCdnEndpoint_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "staging"), - ), - }, data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCdnEndpoint_withoutCompression(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_withoutCompression(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckNoResourceAttr(data.ResourceName, "is_compression_enabled"), - ), - }, data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCdnEndpoint_optimized(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_optimized(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "optimization_type", "GeneralWebDelivery"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_withGeoFilters(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_geoFilters(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "geo_filter.#", "2"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_fullFields(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_fullFields(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "is_http_allowed", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "is_https_allowed", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "origin_path", "/origin-path"), - resource.TestCheckResourceAttr(data.ResourceName, "probe_path", "/origin-path/probe"), - resource.TestCheckResourceAttr(data.ResourceName, "origin_host_header", "www.contoso.com"), - resource.TestCheckResourceAttr(data.ResourceName, "optimization_type", "GeneralWebDelivery"), - resource.TestCheckResourceAttr(data.ResourceName, "querystring_caching_behaviour", "UseQueryString"), - resource.TestCheckResourceAttr(data.ResourceName, "content_types_to_compress.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "is_compression_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "geo_filter.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_isHttpAndHttpsAllowedUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_isHttpAndHttpsAllowed(data, "true", "false"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "is_http_allowed", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "is_https_allowed", "false"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_isHttpAndHttpsAllowed(data, "false", "true"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "is_http_allowed", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "is_https_allowed", "true"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_globalDeliveryRule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_globalDeliveryRule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.cache_expiration_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.cache_expiration_action.0.behavior", "Override"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.cache_expiration_action.0.duration", "5.04:44:23"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_globalDeliveryRuleUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.cache_expiration_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.cache_expiration_action.0.behavior", "SetIfMissing"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.cache_expiration_action.0.duration", "12.04:11:22"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.modify_response_header_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.modify_response_header_action.0.action", "Overwrite"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.modify_response_header_action.0.name", "Content-Type"), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.0.modify_response_header_action.0.value", "application/json"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_globalDeliveryRuleRemove(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "global_delivery_rule.#", "0"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_deliveryRule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_deliveryRule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.name", "http2https"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.order", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.0.match_values.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.0.redirect_type", "Found"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.0.protocol", "Https"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_deliveryRuleUpdate1(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.name", "http2https"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.order", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.0.negate_condition", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.0.match_values.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.0.redirect_type", "Found"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.0.protocol", "Https"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_deliveryRuleUpdate2(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.name", "http2https"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.order", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.0.negate_condition", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.request_scheme_condition.0.match_values.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.0.redirect_type", "Found"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.0.url_redirect_action.0.protocol", "Https"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.name", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.order", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.device_condition.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.device_condition.0.match_values.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.modify_response_header_action.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.modify_response_header_action.0.action", "Delete"), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.1.modify_response_header_action.0.name", "Content-Language"), - ), - }, - { - Config: testAccAzureRMCdnEndpoint_deliveryRuleRemove(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "delivery_rule.#", "0"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnEndpoint_dnsAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_endpoint", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnEndpointDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnEndpoint_dnsAlias(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnEndpointExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMCdnEndpointExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cdn.EndpointsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.CdnEndpointID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ProfileName, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on cdnEndpointsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: CDN Endpoint %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMCdnEndpointDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cdn.EndpointsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - profileName := rs.Primary.Attributes["profile_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for cdn endpoint: %s", name) - } - - future, err := conn.Delete(ctx, resourceGroup, profileName, name) - if err != nil { - return fmt.Errorf("Bad: Delete on cdnEndpointsClient: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, conn.Client); err != nil { - return fmt.Errorf("Bad: Delete on cdnEndpointsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMCdnEndpointDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cdn.EndpointsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cdn_endpoint" { - continue - } - - id, err := parse.CdnEndpointID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ProfileName, id.Name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("CDN Endpoint still exists:\n%#v", resp.EndpointProperties) - } - } - - return nil -} - -func testAccAzureRMCdnEndpoint_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMCdnEndpoint_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_cdn_endpoint" "import" { - name = azurerm_cdn_endpoint.test.name - profile_name = azurerm_cdn_endpoint.test.profile_name - location = azurerm_cdn_endpoint.test.location - resource_group_name = azurerm_cdn_endpoint.test.resource_group_name - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, template) -} - -func testAccAzureRMCdnEndpoint_hostHeader(data acceptance.TestData, domain string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - origin_host_header = "%s" - - origin { - name = "acceptanceTestCdnOrigin2" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, domain) -} - -func testAccAzureRMCdnEndpoint_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin { - name = "acceptanceTestCdnOrigin2" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin { - name = "acceptanceTestCdnOrigin2" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_geoFilters(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - is_http_allowed = false - is_https_allowed = true - origin_path = "/origin-path" - probe_path = "/origin-path/probe" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - geo_filter { - relative_path = "/some-example-endpoint" - action = "Allow" - country_codes = ["GB"] - } - - geo_filter { - relative_path = "/some-other-endpoint" - action = "Block" - country_codes = ["US"] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_optimized(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - is_http_allowed = false - is_https_allowed = true - optimization_type = "GeneralWebDelivery" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_withoutCompression(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - is_http_allowed = false - is_https_allowed = true - optimization_type = "GeneralWebDelivery" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_fullFields(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - is_http_allowed = true - is_https_allowed = true - content_types_to_compress = ["text/html"] - is_compression_enabled = true - querystring_caching_behaviour = "UseQueryString" - origin_host_header = "www.contoso.com" - optimization_type = "GeneralWebDelivery" - origin_path = "/origin-path" - probe_path = "/origin-path/probe" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - geo_filter { - relative_path = "/some-example-endpoint" - action = "Allow" - country_codes = ["GB"] - } - - tags = { - environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_isHttpAndHttpsAllowed(data acceptance.TestData, isHttpAllowed string, isHttpsAllowed string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - is_http_allowed = %s - is_https_allowed = %s - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, isHttpAllowed, isHttpsAllowed) -} - -func testAccAzureRMCdnEndpoint_globalDeliveryRule(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - global_delivery_rule { - cache_expiration_action { - behavior = "Override" - duration = "5.04:44:23" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_globalDeliveryRuleUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - global_delivery_rule { - cache_expiration_action { - behavior = "SetIfMissing" - duration = "12.04:11:22" - } - - modify_response_header_action { - action = "Overwrite" - name = "Content-Type" - value = "application/json" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_globalDeliveryRuleRemove(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_deliveryRule(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - delivery_rule { - name = "http2https" - order = 1 - - request_scheme_condition { - match_values = ["HTTP"] - } - - url_redirect_action { - redirect_type = "Found" - protocol = "Https" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_deliveryRuleUpdate1(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - delivery_rule { - name = "http2https" - order = 1 - - request_scheme_condition { - negate_condition = true - match_values = ["HTTPS"] - } - - url_redirect_action { - redirect_type = "Found" - protocol = "Https" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_deliveryRuleUpdate2(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } - - delivery_rule { - name = "http2https" - order = 1 - - request_scheme_condition { - negate_condition = true - match_values = ["HTTPS"] - } - - url_redirect_action { - redirect_type = "Found" - protocol = "Https" - } - } - - delivery_rule { - name = "test" - order = 2 - - device_condition { - match_values = ["Mobile"] - } - - modify_response_header_action { - action = "Delete" - name = "Content-Language" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_deliveryRuleRemove(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnend%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin_host_header = "www.contoso.com" - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMCdnEndpoint_dnsAlias(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestcdnep%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnep%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -resource "azurerm_cdn_endpoint" "test" { - name = "acctestcdnep%d" - profile_name = azurerm_cdn_profile.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - origin { - name = "acceptanceTestCdnOrigin1" - host_name = "www.contoso.com" - https_port = 443 - http_port = 80 - } -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_cdn_endpoint.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/cdn/tests/cdn_profile_data_source_test.go b/azurerm/internal/services/cdn/tests/cdn_profile_data_source_test.go deleted file mode 100644 index cfc2a67b2479..000000000000 --- a/azurerm/internal/services/cdn/tests/cdn_profile_data_source_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMCdnProfile_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMCdnProfile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists("data.azurerm_cdn_profile.test"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMCdnProfile_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMCdnProfile_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.cost_center", "MSFT"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMCdnProfile_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} - -data "azurerm_cdn_profile" "test" { - name = azurerm_cdn_profile.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourceAzureRMCdnProfile_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} - -data "azurerm_cdn_profile" "test" { - name = azurerm_cdn_profile.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/cdn/tests/cdn_profile_resource_test.go b/azurerm/internal/services/cdn/tests/cdn_profile_resource_test.go deleted file mode 100644 index 953d00eea4f7..000000000000 --- a/azurerm/internal/services/cdn/tests/cdn_profile_resource_test.go +++ /dev/null @@ -1,373 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn/parse" -) - -func TestAccAzureRMCdnProfile_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCdnProfile_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMCdnProfile_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_cdn_profile"), - }, - }, - }) -} - -func TestAccAzureRMCdnProfile_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfile_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.cost_center", "MSFT"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCdnProfile_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "staging"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCdnProfile_NonStandardCasing(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfileNonStandardCasing(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists("azurerm_cdn_profile.test"), - ), - }, - { - Config: testAccAzureRMCdnProfileNonStandardCasing(data), - PlanOnly: true, - ExpectNonEmptyPlan: false, - }, - }, - }) -} - -func TestAccAzureRMCdnProfile_basicToStandardAkamai(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard_Verizon"), - ), - }, - { - Config: testAccAzureRMCdnProfile_standardAkamai(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard_Akamai"), - ), - }, - }, - }) -} - -func TestAccAzureRMCdnProfile_standardAkamai(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfile_standardAkamai(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard_Akamai"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCdnProfile_standardMicrosoft(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cdn_profile", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCdnProfileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCdnProfile_standardMicrosoft(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCdnProfileExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard_Microsoft"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMCdnProfileExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cdn.ProfilesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.CdnProfileID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on cdnProfilesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: CDN Profile %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMCdnProfileDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cdn.ProfilesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cdn_profile" { - continue - } - - id, err := parse.CdnProfileID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("CDN Profile still exists:\n%#v", resp.ProfileProperties) - } - } - - return nil -} - -func testAccAzureRMCdnProfile_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCdnProfile_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMCdnProfile_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_cdn_profile" "import" { - name = azurerm_cdn_profile.test.name - location = azurerm_cdn_profile.test.location - resource_group_name = azurerm_cdn_profile.test.resource_group_name - sku = azurerm_cdn_profile.test.sku -} -`, template) -} - -func testAccAzureRMCdnProfile_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCdnProfile_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Verizon" - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCdnProfileNonStandardCasing(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "standard_verizon" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCdnProfile_standardAkamai(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Akamai" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCdnProfile_standardMicrosoft(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_cdn_profile" "test" { - name = "acctestcdnprof%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard_Microsoft" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/cdn/validate/endpoint_id.go b/azurerm/internal/services/cdn/validate/endpoint_id.go new file mode 100644 index 000000000000..21759709a316 --- /dev/null +++ b/azurerm/internal/services/cdn/validate/endpoint_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn/parse" +) + +func EndpointID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.EndpointID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cdn/validate/endpoint_id_test.go b/azurerm/internal/services/cdn/validate/endpoint_id_test.go new file mode 100644 index 000000000000..63e017b1e804 --- /dev/null +++ b/azurerm/internal/services/cdn/validate/endpoint_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestEndpointID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ProfileName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/", + Valid: false, + }, + + { + // missing value for ProfileName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/endpoints/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1/endpoints/endpoint1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CDN/PROFILES/PROFILE1/ENDPOINTS/ENDPOINT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := EndpointID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cdn/validate/profile_id.go b/azurerm/internal/services/cdn/validate/profile_id.go new file mode 100644 index 000000000000..73d2dfe34424 --- /dev/null +++ b/azurerm/internal/services/cdn/validate/profile_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cdn/parse" +) + +func ProfileID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ProfileID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cdn/validate/profile_id_test.go b/azurerm/internal/services/cdn/validate/profile_id_test.go new file mode 100644 index 000000000000..45dde7f24afd --- /dev/null +++ b/azurerm/internal/services/cdn/validate/profile_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestProfileID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cdn/profiles/profile1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CDN/PROFILES/PROFILE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ProfileID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cognitive/cognitive_account_data_source.go b/azurerm/internal/services/cognitive/cognitive_account_data_source.go new file mode 100644 index 000000000000..5cb6744e7098 --- /dev/null +++ b/azurerm/internal/services/cognitive/cognitive_account_data_source.go @@ -0,0 +1,116 @@ +package cognitive + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceCognitiveAccount() *schema.Resource { + return &schema.Resource{ + Read: dataSourceCognitiveAccountRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "location": azure.SchemaLocationForDataSource(), + + "kind": { + Type: schema.TypeString, + Computed: true, + }, + + "sku_name": { + Type: schema.TypeString, + Computed: true, + }, + + "qna_runtime_endpoint": { + Type: schema.TypeString, + Computed: true, + }, + + "endpoint": { + Type: schema.TypeString, + Computed: true, + }, + + "primary_access_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_access_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceCognitiveAccountRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Cognitive.AccountsClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + resp, err := client.GetProperties(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error: Cognitive Services Account %q (Resource Group %q) was not found", name, resourceGroup) + } + return fmt.Errorf("Error reading the state of AzureRM Cognitive Services Account %q: %+v", name, err) + } + + keys, err := client.ListKeys(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error: Keys for Cognitive Services Account %q (Resource Group %q) were not found", name, resourceGroup) + } + return fmt.Errorf("Error obtaining keys for Cognitive Services Account %q in Resource Group %q: %v", name, resourceGroup, err) + } + + d.SetId(*resp.ID) + + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + d.Set("kind", resp.Kind) + + if sku := resp.Sku; sku != nil { + d.Set("sku_name", sku.Name) + } + + if props := resp.Properties; props != nil { + if apiProps := props.APIProperties; apiProps != nil { + d.Set("qna_runtime_endpoint", apiProps.QnaRuntimeEndpoint) + } + d.Set("endpoint", props.Endpoint) + } + + d.Set("primary_access_key", keys.Key1) + d.Set("secondary_access_key", keys.Key2) + + return tags.FlattenAndSet(d, resp.Tags) +} diff --git a/azurerm/internal/services/cognitive/cognitive_account_data_source_test.go b/azurerm/internal/services/cognitive/cognitive_account_data_source_test.go new file mode 100644 index 000000000000..5d078450b53b --- /dev/null +++ b/azurerm/internal/services/cognitive/cognitive_account_data_source_test.go @@ -0,0 +1,66 @@ +package cognitive_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type CognitiveAccountDataSource struct { +} + +func TestAccCognitiveAccountDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_cognitive_account", "test") + r := CognitiveAccountDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("Face"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + }) +} + +func (CognitiveAccountDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "Face" + sku_name = "S0" + + tags = { + Acceptance = "Test" + } +} + +data "azurerm_cognitive_account" "test" { + name = azurerm_cognitive_account.test.name + resource_group_name = azurerm_cognitive_account.test.resource_group_name +} +`, CognitiveAccountDataSource{}.template(data), data.RandomInteger) +} + +func (CognitiveAccountDataSource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/cognitive/cognitive_account_resource.go b/azurerm/internal/services/cognitive/cognitive_account_resource.go index 1fbfd831c3e7..49dc173c8986 100644 --- a/azurerm/internal/services/cognitive/cognitive_account_resource.go +++ b/azurerm/internal/services/cognitive/cognitive_account_resource.go @@ -22,12 +22,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCognitiveAccount() *schema.Resource { +func resourceCognitiveAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmCognitiveAccountCreate, - Read: resourceArmCognitiveAccountRead, - Update: resourceArmCognitiveAccountUpdate, - Delete: resourceArmCognitiveAccountDelete, + Create: resourceCognitiveAccountCreate, + Read: resourceCognitiveAccountRead, + Update: resourceCognitiveAccountUpdate, + Delete: resourceCognitiveAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -37,7 +37,7 @@ func resourceArmCognitiveAccount() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.CognitiveAccountID(id) + _, err := parse.AccountID(id) return err }), @@ -129,7 +129,7 @@ func resourceArmCognitiveAccount() *schema.Resource { } } -func resourceArmCognitiveAccountCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCognitiveAccountCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cognitive.AccountsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -197,15 +197,15 @@ func resourceArmCognitiveAccountCreate(d *schema.ResourceData, meta interface{}) d.SetId(*read.ID) - return resourceArmCognitiveAccountRead(d, meta) + return resourceCognitiveAccountRead(d, meta) } -func resourceArmCognitiveAccountUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCognitiveAccountUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cognitive.AccountsClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CognitiveAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } @@ -235,21 +235,20 @@ func resourceArmCognitiveAccountUpdate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error updating Cognitive Services Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - return resourceArmCognitiveAccountRead(d, meta) + return resourceCognitiveAccountRead(d, meta) } -func resourceArmCognitiveAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceCognitiveAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cognitive.AccountsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CognitiveAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } resp, err := client.GetProperties(ctx, id.ResourceGroup, id.Name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Cognitive Services Account %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) @@ -279,7 +278,6 @@ func resourceArmCognitiveAccountRead(d *schema.ResourceData, meta interface{}) e } keys, err := client.ListKeys(ctx, id.ResourceGroup, id.Name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Not able to obtain keys for Cognitive Services Account %q in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) @@ -295,12 +293,12 @@ func resourceArmCognitiveAccountRead(d *schema.ResourceData, meta interface{}) e return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmCognitiveAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCognitiveAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cognitive.AccountsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CognitiveAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/cognitive/cognitive_account_resource_test.go b/azurerm/internal/services/cognitive/cognitive_account_resource_test.go new file mode 100644 index 000000000000..bb0f9c766fe7 --- /dev/null +++ b/azurerm/internal/services/cognitive/cognitive_account_resource_test.go @@ -0,0 +1,387 @@ +package cognitive_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cognitive/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type CognitiveAccountResource struct { +} + +func TestAccCognitiveAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("Face"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCognitiveAccount_speechServices(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.speechServices(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("SpeechServices"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCognitiveAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_cognitive_account"), + }, + }) +} + +func TestAccCognitiveAccount_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("Face"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Acceptance").HasValue("Test"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCognitiveAccount_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("Face"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("Face"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Acceptance").HasValue("Test"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + }) +} + +func TestAccCognitiveAccount_qnaRuntimeEndpoint(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.qnaRuntimeEndpoint(data, "https://localhost:8080/"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("QnAMaker"), + check.That(data.ResourceName).Key("qna_runtime_endpoint").HasValue("https://localhost:8080/"), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.qnaRuntimeEndpoint(data, "https://localhost:9000/"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("QnAMaker"), + check.That(data.ResourceName).Key("qna_runtime_endpoint").HasValue("https://localhost:9000/"), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCognitiveAccount_qnaRuntimeEndpointUnspecified(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.qnaRuntimeEndpointUnspecified(data), + ExpectError: regexp.MustCompile("the QnAMaker runtime endpoint `qna_runtime_endpoint` is required when kind is set to `QnAMaker`"), + }, + }) +} + +func TestAccCognitiveAccount_cognitiveServices(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.cognitiveServices(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCognitiveAccount_withMultipleCognitiveAccounts(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") + r := CognitiveAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withMultipleCognitiveAccounts(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t CognitiveAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AccountID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Cognitive.AccountsClient.GetProperties(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Cognitive Services Account %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (CognitiveAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "Face" + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (CognitiveAccountResource) speechServices(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "SpeechServices" + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (CognitiveAccountResource) requiresImport(data acceptance.TestData) string { + template := CognitiveAccountResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_cognitive_account" "import" { + name = azurerm_cognitive_account.test.name + location = azurerm_cognitive_account.test.location + resource_group_name = azurerm_cognitive_account.test.resource_group_name + kind = azurerm_cognitive_account.test.kind + sku_name = "S0" +} +`, template) +} + +func (CognitiveAccountResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "Face" + sku_name = "S0" + + tags = { + Acceptance = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (CognitiveAccountResource) qnaRuntimeEndpoint(data acceptance.TestData, url string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "QnAMaker" + qna_runtime_endpoint = "%s" + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, url) +} + +func (CognitiveAccountResource) qnaRuntimeEndpointUnspecified(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "QnAMaker" + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (CognitiveAccountResource) cognitiveServices(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "CognitiveServices" + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (CognitiveAccountResource) withMultipleCognitiveAccounts(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cognitive-%d" + location = "%s" +} + +resource "azurerm_cognitive_account" "test" { + name = "acctestcogacc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "CustomVision.Prediction" + sku_name = "S0" +} + +resource "azurerm_cognitive_account" "test2" { + name = "acctestcogacc2-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + kind = "CustomVision.Training" + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/cognitive/parse/account.go b/azurerm/internal/services/cognitive/parse/account.go new file mode 100644 index 000000000000..9e87e693feb0 --- /dev/null +++ b/azurerm/internal/services/cognitive/parse/account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccountId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewAccountID(subscriptionId, resourceGroup, name string) AccountId { + return AccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id AccountId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Account", segmentsStr) +} + +func (id AccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.CognitiveServices/accounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// AccountID parses a Account ID into an AccountId struct +func AccountID(input string) (*AccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/cognitive/parse/account_test.go b/azurerm/internal/services/cognitive/parse/account_test.go new file mode 100644 index 000000000000..b360116b2355 --- /dev/null +++ b/azurerm/internal/services/cognitive/parse/account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccountId{} + +func TestAccountIDFormatter(t *testing.T) { + actual := NewAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/account1", + Expected: &AccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COGNITIVESERVICES/ACCOUNTS/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/cognitive/parse/cognitive_account.go b/azurerm/internal/services/cognitive/parse/cognitive_account.go deleted file mode 100644 index b69ce148bbdf..000000000000 --- a/azurerm/internal/services/cognitive/parse/cognitive_account.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type CognitiveAccountId struct { - ResourceGroup string - Name string -} - -func CognitiveAccountID(input string) (*CognitiveAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Cognitive Account ID %q: %+v", input, err) - } - - account := CognitiveAccountId{ - ResourceGroup: id.ResourceGroup, - } - - if account.Name, err = id.PopSegment("accounts"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &account, nil -} diff --git a/azurerm/internal/services/cognitive/parse/cognitive_account_test.go b/azurerm/internal/services/cognitive/parse/cognitive_account_test.go deleted file mode 100644 index ab38a6585575..000000000000 --- a/azurerm/internal/services/cognitive/parse/cognitive_account_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestCognitiveAccountId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *CognitiveAccountId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Accounts Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/", - Expected: nil, - }, - { - Name: "Cognitive Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/Account1", - Expected: &CognitiveAccountId{ - Name: "Account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/Accounts/", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := CognitiveAccountID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/cognitive/registration.go b/azurerm/internal/services/cognitive/registration.go index 44651dc7584f..d38e88df8f56 100644 --- a/azurerm/internal/services/cognitive/registration.go +++ b/azurerm/internal/services/cognitive/registration.go @@ -20,12 +20,14 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { - return map[string]*schema.Resource{} + return map[string]*schema.Resource{ + "azurerm_cognitive_account": dataSourceCognitiveAccount(), + } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_cognitive_account": resourceArmCognitiveAccount(), + "azurerm_cognitive_account": resourceCognitiveAccount(), } } diff --git a/azurerm/internal/services/cognitive/resourceids.go b/azurerm/internal/services/cognitive/resourceids.go new file mode 100644 index 000000000000..c25485ada7a3 --- /dev/null +++ b/azurerm/internal/services/cognitive/resourceids.go @@ -0,0 +1,3 @@ +package cognitive + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Account -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/account1 diff --git a/azurerm/internal/services/cognitive/tests/cognitive_account_resource_test.go b/azurerm/internal/services/cognitive/tests/cognitive_account_resource_test.go deleted file mode 100644 index 7b2d10ecc59a..000000000000 --- a/azurerm/internal/services/cognitive/tests/cognitive_account_resource_test.go +++ /dev/null @@ -1,461 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cognitive/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMCognitiveAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "Face"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCognitiveAccount_speechServices(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_speechServices(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "SpeechServices"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCognitiveAccount_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMCognitiveAccount_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_cognitive_account"), - }, - }, - }) -} - -func TestAccAzureRMCognitiveAccount_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "Face"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Acceptance", "Test"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCognitiveAccount_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "Face"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - { - Config: testAccAzureRMCognitiveAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "Face"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Acceptance", "Test"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - }, - }) -} - -func TestAccAzureRMCognitiveAccount_qnaRuntimeEndpoint(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_qnaRuntimeEndpoint(data, "https://localhost:8080/"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "QnAMaker"), - resource.TestCheckResourceAttr(data.ResourceName, "qna_runtime_endpoint", "https://localhost:8080/"), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCognitiveAccount_qnaRuntimeEndpoint(data, "https://localhost:9000/"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "QnAMaker"), - resource.TestCheckResourceAttr(data.ResourceName, "qna_runtime_endpoint", "https://localhost:9000/"), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCognitiveAccount_qnaRuntimeEndpointUnspecified(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_qnaRuntimeEndpointUnspecified(data), - ExpectError: regexp.MustCompile("the QnAMaker runtime endpoint `qna_runtime_endpoint` is required when kind is set to `QnAMaker`"), - }, - }, - }) -} - -func TestAccAzureRMCognitiveAccount_cognitiveServices(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_cognitiveServices(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCognitiveAccount_withMultipleCognitiveAccounts(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cognitive_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMAppCognitiveAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCognitiveAccount_withMultipleCognitiveAccounts(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCognitiveAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMAppCognitiveAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cognitive.AccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cognitive_account" { - continue - } - - id, err := parse.CognitiveAccountID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.GetProperties(ctx, id.ResourceGroup, id.Name) - if err != nil { - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Cognitive Services Account still exists:\n%#v", resp) - } - - return nil - } - } - - return nil -} - -func testCheckAzureRMCognitiveAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cognitive.AccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.CognitiveAccountID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.GetProperties(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Cognitive Services Account %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get on cognitiveAccountsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMCognitiveAccount_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "Face" - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCognitiveAccount_speechServices(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "SpeechServices" - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCognitiveAccount_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMCognitiveAccount_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_cognitive_account" "import" { - name = azurerm_cognitive_account.test.name - location = azurerm_cognitive_account.test.location - resource_group_name = azurerm_cognitive_account.test.resource_group_name - kind = azurerm_cognitive_account.test.kind - sku_name = "S0" -} -`, template) -} - -func testAccAzureRMCognitiveAccount_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "Face" - sku_name = "S0" - - tags = { - Acceptance = "Test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCognitiveAccount_qnaRuntimeEndpoint(data acceptance.TestData, url string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "QnAMaker" - qna_runtime_endpoint = "%s" - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, url) -} - -func testAccAzureRMCognitiveAccount_qnaRuntimeEndpointUnspecified(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "QnAMaker" - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCognitiveAccount_cognitiveServices(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "CognitiveServices" - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCognitiveAccount_withMultipleCognitiveAccounts(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cognitive-%d" - location = "%s" -} - -resource "azurerm_cognitive_account" "test" { - name = "acctestcogacc-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "CustomVision.Prediction" - sku_name = "S0" -} - -resource "azurerm_cognitive_account" "test2" { - name = "acctestcogacc2-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - kind = "CustomVision.Training" - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/cognitive/validate/account_id.go b/azurerm/internal/services/cognitive/validate/account_id.go new file mode 100644 index 000000000000..7c0ebbef7529 --- /dev/null +++ b/azurerm/internal/services/cognitive/validate/account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cognitive/parse" +) + +func AccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cognitive/validate/account_id_test.go b/azurerm/internal/services/cognitive/validate/account_id_test.go new file mode 100644 index 000000000000..dd3cedb7d0b9 --- /dev/null +++ b/azurerm/internal/services/cognitive/validate/account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CognitiveServices/accounts/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COGNITIVESERVICES/ACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/common/service_registration.go b/azurerm/internal/services/common/service_registration.go deleted file mode 100644 index 193216bb88dd..000000000000 --- a/azurerm/internal/services/common/service_registration.go +++ /dev/null @@ -1,17 +0,0 @@ -package common - -import "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - -type ServiceRegistration interface { - // Name is the name of this Service - Name() string - - // WebsiteCategories returns a list of categories which can be used for the sidebar - WebsiteCategories() []string - - // SupportedDataSources returns the supported Data Sources supported by this Service - SupportedDataSources() map[string]*schema.Resource - - // SupportedResources returns the supported Resources supported by this Service - SupportedResources() map[string]*schema.Resource -} diff --git a/azurerm/internal/services/compute/availability_set_resource.go b/azurerm/internal/services/compute/availability_set_resource.go index d21220550e5f..aee5e2428c8c 100644 --- a/azurerm/internal/services/compute/availability_set_resource.go +++ b/azurerm/internal/services/compute/availability_set_resource.go @@ -3,6 +3,7 @@ package compute import ( "fmt" "log" + "regexp" "strings" "time" @@ -41,6 +42,10 @@ func resourceArmAvailabilitySet() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[a-zA-Z0-9]([-._a-zA-Z0-9]{0,78}[a-zA-Z0-9_])?$"), + "The Availability set name can contain only letters, numbers, periods (.), hyphens (-),and underscores (_), up to 80 characters, and it must begin a letter or number and end with a letter, number or underscore.", + ), }, "resource_group_name": azure.SchemaResourceGroupName(), diff --git a/azurerm/internal/services/compute/client/client.go b/azurerm/internal/services/compute/client/client.go index 2a6cad24ea11..cfef5feed2e2 100644 --- a/azurerm/internal/services/compute/client/client.go +++ b/azurerm/internal/services/compute/client/client.go @@ -11,6 +11,7 @@ type Client struct { DedicatedHostsClient *compute.DedicatedHostsClient DedicatedHostGroupsClient *compute.DedicatedHostGroupsClient DisksClient *compute.DisksClient + DiskAccessClient *compute.DiskAccessesClient DiskEncryptionSetsClient *compute.DiskEncryptionSetsClient GalleriesClient *compute.GalleriesClient GalleryImagesClient *compute.GalleryImagesClient @@ -43,6 +44,9 @@ func NewClient(o *common.ClientOptions) *Client { disksClient := compute.NewDisksClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&disksClient.Client, o.ResourceManagerAuthorizer) + diskAccessClient := compute.NewDiskAccessesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&diskAccessClient.Client, o.ResourceManagerAuthorizer) + diskEncryptionSetsClient := compute.NewDiskEncryptionSetsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&diskEncryptionSetsClient.Client, o.ResourceManagerAuthorizer) @@ -99,6 +103,7 @@ func NewClient(o *common.ClientOptions) *Client { DedicatedHostsClient: &dedicatedHostsClient, DedicatedHostGroupsClient: &dedicatedHostGroupsClient, DisksClient: &disksClient, + DiskAccessClient: &diskAccessClient, DiskEncryptionSetsClient: &diskEncryptionSetsClient, GalleriesClient: &galleriesClient, GalleryImagesClient: &galleryImagesClient, diff --git a/azurerm/internal/services/compute/dedicated_host_resource.go b/azurerm/internal/services/compute/dedicated_host_resource.go index ce35e199a33b..410bcd5f6700 100644 --- a/azurerm/internal/services/compute/dedicated_host_resource.go +++ b/azurerm/internal/services/compute/dedicated_host_resource.go @@ -72,6 +72,28 @@ func resourceArmDedicatedHost() *schema.Resource { "ESv3-Type1", "ESv3-Type2", "FSv2-Type2", + "DASv4-Type1", + "DCSv2-Type1", + "DDSv4-Type1", + "DSv3-Type1", + "DSv3-Type2", + "DSv3-Type3", + "DSv4-Type1", + "EASv4-Type1", + "EDSv4-Type1", + "ESv3-Type1", + "ESv3-Type2", + "ESv3-Type3", + "ESv4-Type1", + "FSv2-Type2", + "FSv2-Type3", + "LSv2-Type1", + "MS-Type1", + "MSm-Type1", + "MSmv2-Type1", + "MSv2-Type1", + "NVASv4-Type1", + "NVSv3-Type1", }, false), }, @@ -115,7 +137,7 @@ func resourceArmDedicatedHostCreate(d *schema.ResourceData, meta interface{}) er } resourceGroupName := dedicatedHostGroupId.ResourceGroup - hostGroupName := dedicatedHostGroupId.Name + hostGroupName := dedicatedHostGroupId.HostGroupName if d.IsNewResource() { existing, err := client.Get(ctx, resourceGroupName, hostGroupName, name, "") @@ -173,7 +195,7 @@ func resourceArmDedicatedHostRead(d *schema.ResourceData, meta interface{}) erro return err } - group, err := groupsClient.Get(ctx, id.ResourceGroup, id.HostGroup, "") + group, err := groupsClient.Get(ctx, id.ResourceGroup, id.HostGroupName, "") if err != nil { if utils.ResponseWasNotFound(group.Response) { log.Printf("[INFO] Parent Dedicated Host Group %q does not exist - removing from state", d.Id()) @@ -181,10 +203,10 @@ func resourceArmDedicatedHostRead(d *schema.ResourceData, meta interface{}) erro return nil } - return fmt.Errorf("Error retrieving Dedicated Host Group %q (Resource Group %q): %+v", id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Dedicated Host Group %q (Resource Group %q): %+v", id.HostGroupName, id.ResourceGroup, err) } - resp, err := hostsClient.Get(ctx, id.ResourceGroup, id.HostGroup, id.Name, "") + resp, err := hostsClient.Get(ctx, id.ResourceGroup, id.HostGroupName, id.HostName, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Dedicated Host %q does not exist - removing from state", d.Id()) @@ -192,7 +214,7 @@ func resourceArmDedicatedHostRead(d *schema.ResourceData, meta interface{}) erro return nil } - return fmt.Errorf("Error retrieving Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.Name, id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.HostName, id.HostGroupName, id.ResourceGroup, err) } d.Set("name", resp.Name) @@ -234,12 +256,12 @@ func resourceArmDedicatedHostUpdate(d *schema.ResourceData, meta interface{}) er Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } - future, err := client.Update(ctx, id.ResourceGroup, id.HostGroup, id.Name, parameters) + future, err := client.Update(ctx, id.ResourceGroup, id.HostGroupName, id.HostName, parameters) if err != nil { - return fmt.Errorf("Error updating Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.Name, id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error updating Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.HostName, id.HostGroupName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for update of Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.Name, id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for update of Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.HostName, id.HostGroupName, id.ResourceGroup, err) } return resourceArmDedicatedHostRead(d, meta) @@ -255,19 +277,19 @@ func resourceArmDedicatedHostDelete(d *schema.ResourceData, meta interface{}) er return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.HostGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.HostGroupName, id.HostName) if err != nil { - return fmt.Errorf("Error deleting Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.Name, id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.HostName, id.HostGroupName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error waiting for deleting Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.Name, id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deleting Dedicated Host %q (Host Group Name %q / Resource Group %q): %+v", id.HostName, id.HostGroupName, id.ResourceGroup, err) } } // API has bug, which appears to be eventually consistent. Tracked by this issue: https://github.com/Azure/azure-rest-api-specs/issues/8137 - log.Printf("[DEBUG] Waiting for Dedicated Host %q (Host Group Name %q / Resource Group %q) to disappear", id.Name, id.HostGroup, id.ResourceGroup) + log.Printf("[DEBUG] Waiting for Dedicated Host %q (Host Group Name %q / Resource Group %q) to disappear", id.HostName, id.HostGroupName, id.ResourceGroup) stateConf := &resource.StateChangeConf{ Pending: []string{"Exists"}, Target: []string{"NotFound"}, @@ -278,7 +300,7 @@ func resourceArmDedicatedHostDelete(d *schema.ResourceData, meta interface{}) er } if _, err = stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Dedicated Host %q (Host Group Name %q / Resource Group %q) to become available: %+v", id.Name, id.HostGroup, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for Dedicated Host %q (Host Group Name %q / Resource Group %q) to become available: %+v", id.HostName, id.HostGroupName, id.ResourceGroup, err) } return nil @@ -286,7 +308,7 @@ func resourceArmDedicatedHostDelete(d *schema.ResourceData, meta interface{}) er func dedicatedHostDeletedRefreshFunc(ctx context.Context, client *compute.DedicatedHostsClient, id *parse.DedicatedHostId) resource.StateRefreshFunc { return func() (interface{}, string, error) { - res, err := client.Get(ctx, id.ResourceGroup, id.HostGroup, id.Name, "") + res, err := client.Get(ctx, id.ResourceGroup, id.HostGroupName, id.HostName, "") if err != nil { if utils.ResponseWasNotFound(res.Response) { return "NotFound", "NotFound", nil diff --git a/azurerm/internal/services/compute/disk_access_data_source.go b/azurerm/internal/services/compute/disk_access_data_source.go new file mode 100644 index 000000000000..1639e2c97b7e --- /dev/null +++ b/azurerm/internal/services/compute/disk_access_data_source.go @@ -0,0 +1,58 @@ +package compute + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceArmDiskAccess() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDiskAccessRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "tags": tags.Schema(), + }, + } +} + +func dataSourceArmDiskAccessRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Compute.DiskAccessClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceGroup := d.Get("resource_group_name").(string) + name := d.Get("name").(string) + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error: Disk Access %q (Resource Group %q) was not found", name, resourceGroup) + } + return fmt.Errorf("Error: Error making Read request on Azure Disk Access %q (Resource Group %q): %s", name, resourceGroup, err) + } + + d.SetId(*resp.ID) + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + + return tags.FlattenAndSet(d, resp.Tags) +} diff --git a/azurerm/internal/services/compute/disk_access_resource.go b/azurerm/internal/services/compute/disk_access_resource.go new file mode 100644 index 000000000000..7eeabb3166cc --- /dev/null +++ b/azurerm/internal/services/compute/disk_access_resource.go @@ -0,0 +1,158 @@ +package compute + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDiskAccess() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDiskAccessCreateUpdate, + Read: resourceArmDiskAccessRead, + Update: resourceArmDiskAccessCreateUpdate, + Delete: resourceArmDiskAccessDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DiskAccessID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "tags": tags.Schema(), + }, + } +} + +func resourceArmDiskAccessCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Compute.DiskAccessClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure ARM Disk Access creation.") + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + t := d.Get("tags").(map[string]interface{}) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Disk Access %q (Resource Group %q): %s", name, resourceGroup, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_disk_access", *existing.ID) + } + } + + location := azure.NormalizeLocation(d.Get("location").(string)) + + createDiskAccess := compute.DiskAccess{ + Name: &name, + Location: &location, + Tags: tags.Expand(t), + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, createDiskAccess) + if err != nil { + return fmt.Errorf("Error creating/updating Disk Access %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for create/update of Disk Access %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error retrieving Disk Access %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("Error reading Disk Access %s (Resource Group %q): ID was nil", name, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceArmDiskAccessRead(d, meta) +} + +func resourceArmDiskAccessRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Compute.DiskAccessClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DiskAccessID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Disk Access %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on Azure Disk Access %s (resource group %s): %s", id.Name, id.ResourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", id.ResourceGroup) + + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmDiskAccessDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Compute.DiskAccessClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DiskAccessID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("Error deleting Disk Access %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of Disk Access %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/compute/disk_encryption_set_resource.go b/azurerm/internal/services/compute/disk_encryption_set_resource.go index 4684a9711b70..c6072dfe63ea 100644 --- a/azurerm/internal/services/compute/disk_encryption_set_resource.go +++ b/azurerm/internal/services/compute/disk_encryption_set_resource.go @@ -22,9 +22,9 @@ import ( func resourceArmDiskEncryptionSet() *schema.Resource { return &schema.Resource{ - Create: resourceArmDiskEncryptionSetCreateUpdate, + Create: resourceArmDiskEncryptionSetCreate, Read: resourceArmDiskEncryptionSetRead, - Update: resourceArmDiskEncryptionSetCreateUpdate, + Update: resourceArmDiskEncryptionSetUpdate, Delete: resourceArmDiskEncryptionSetDelete, Timeouts: &schema.ResourceTimeout{ @@ -54,7 +54,6 @@ func resourceArmDiskEncryptionSet() *schema.Resource { "key_vault_key_id": { Type: schema.TypeString, Required: true, - ForceNew: true, ValidateFunc: azure.ValidateKeyVaultChildId, }, @@ -91,26 +90,24 @@ func resourceArmDiskEncryptionSet() *schema.Resource { } } -func resourceArmDiskEncryptionSetCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceArmDiskEncryptionSetCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Compute.DiskEncryptionSetsClient vaultClient := meta.(*clients.Client).KeyVault.VaultsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for present of existing Disk Encryption Set %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_disk_encryption_set", *existing.ID) + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for present of existing Disk Encryption Set %q (Resource Group %q): %+v", name, resourceGroup, err) } } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_disk_encryption_set", *existing.ID) + } keyVaultKeyId := d.Get("key_vault_key_id").(string) keyVaultDetails, err := diskEncryptionSetRetrieveKeyVault(ctx, vaultClient, keyVaultKeyId) @@ -203,6 +200,55 @@ func resourceArmDiskEncryptionSetRead(d *schema.ResourceData, meta interface{}) return tags.FlattenAndSet(d, resp.Tags) } +func resourceArmDiskEncryptionSetUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Compute.DiskEncryptionSetsClient + vaultClient := meta.(*clients.Client).KeyVault.VaultsClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DiskEncryptionSetID(d.Id()) + if err != nil { + return err + } + + update := compute.DiskEncryptionSetUpdate{} + if d.HasChange("tags") { + update.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + if d.HasChange("key_vault_key_id") { + keyVaultKeyId := d.Get("key_vault_key_id").(string) + keyVaultDetails, err := diskEncryptionSetRetrieveKeyVault(ctx, vaultClient, keyVaultKeyId) + if err != nil { + return fmt.Errorf("Error validating Key Vault Key %q for Disk Encryption Set: %+v", keyVaultKeyId, err) + } + if !keyVaultDetails.softDeleteEnabled { + return fmt.Errorf("Error validating Key Vault %q (Resource Group %q) for Disk Encryption Set: Soft Delete must be enabled but it isn't!", keyVaultDetails.keyVaultName, keyVaultDetails.resourceGroupName) + } + if !keyVaultDetails.purgeProtectionEnabled { + return fmt.Errorf("Error validating Key Vault %q (Resource Group %q) for Disk Encryption Set: Purge Protection must be enabled but it isn't!", keyVaultDetails.keyVaultName, keyVaultDetails.resourceGroupName) + } + update.DiskEncryptionSetUpdateProperties = &compute.DiskEncryptionSetUpdateProperties{ + ActiveKey: &compute.KeyVaultAndKeyReference{ + KeyURL: utils.String(keyVaultKeyId), + SourceVault: &compute.SourceVault{ + ID: utils.String(keyVaultDetails.keyVaultId), + }, + }, + } + } + + future, err := client.Update(ctx, id.ResourceGroup, id.Name, update) + if err != nil { + return fmt.Errorf("Error updating Disk Encryption Set %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for update of Disk Encryption Set %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return resourceArmDiskEncryptionSetRead(d, meta) +} + func resourceArmDiskEncryptionSetDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Compute.DiskEncryptionSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/compute/images_data_source.go b/azurerm/internal/services/compute/images_data_source.go new file mode 100644 index 000000000000..98dfba74c638 --- /dev/null +++ b/azurerm/internal/services/compute/images_data_source.go @@ -0,0 +1,199 @@ +package compute + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceArmImages() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmImagesRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "tags_filter": tags.Schema(), + + "images": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + + "location": location.SchemaComputed(), + + "zone_resilient": { + Type: schema.TypeBool, + Computed: true, + }, + + "os_disk": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "blob_uri": { + Type: schema.TypeString, + Computed: true, + }, + "caching": { + Type: schema.TypeString, + Computed: true, + }, + "managed_disk_id": { + Type: schema.TypeString, + Computed: true, + }, + "os_state": { + Type: schema.TypeString, + Computed: true, + }, + "os_type": { + Type: schema.TypeString, + Computed: true, + }, + "size_gb": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + + "data_disk": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "blob_uri": { + Type: schema.TypeString, + Computed: true, + }, + "caching": { + Type: schema.TypeString, + Computed: true, + }, + "lun": { + Type: schema.TypeInt, + Computed: true, + }, + "managed_disk_id": { + Type: schema.TypeString, + Computed: true, + }, + "size_gb": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + + "tags": tags.SchemaDataSource(), + }, + }, + }, + }, + } +} + +func dataSourceArmImagesRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Compute.ImagesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceGroup := d.Get("resource_group_name").(string) + filterTags := tags.Expand(d.Get("tags_filter").(map[string]interface{})) + + resp, err := client.ListByResourceGroupComplete(ctx, resourceGroup) + if err != nil { + if utils.ResponseWasNotFound(resp.Response().Response) { + return fmt.Errorf("no images were found in Resource Group %q", resourceGroup) + } + return fmt.Errorf("retrieving Images (Resource Group %q): %+v", resourceGroup, err) + } + + images, err := flattenImagesResult(ctx, resp, filterTags) + if err != nil { + return fmt.Errorf("parsing Images (Resource Group %q): %+v", resourceGroup, err) + } + if len(images) == 0 { + return fmt.Errorf("no images were found that match the specified tags") + } + + d.SetId(time.Now().UTC().String()) + + d.Set("resource_group_name", resourceGroup) + + if err := d.Set("images", images); err != nil { + return fmt.Errorf("setting `images`: %+v", err) + } + + return nil +} + +func flattenImagesResult(ctx context.Context, iterator compute.ImageListResultIterator, filterTags map[string]*string) ([]interface{}, error) { + results := make([]interface{}, 0) + + for iterator.NotDone() { + image := iterator.Value() + found := true + // Loop through our filter tags and see if they match + for k, v := range filterTags { + if v != nil { + // If the tags do not match return false + if image.Tags[k] == nil || *v != *image.Tags[k] { + found = false + } + } + } + + if found { + results = append(results, flattenImage(image)) + } + if err := iterator.NextWithContext(ctx); err != nil { + return nil, err + } + } + + return results, nil +} + +func flattenImage(input compute.Image) map[string]interface{} { + output := make(map[string]interface{}) + + output["name"] = input.Name + output["location"] = location.NormalizeNilable(input.Location) + + if input.ImageProperties != nil { + if storageProfile := input.ImageProperties.StorageProfile; storageProfile != nil { + output["zone_resilient"] = storageProfile.ZoneResilient + + output["os_disk"] = flattenAzureRmImageOSDisk(storageProfile.OsDisk) + + output["data_disk"] = flattenAzureRmImageDataDisks(storageProfile.DataDisks) + } + } + + output["tags"] = tags.Flatten(input.Tags) + + return output +} diff --git a/azurerm/internal/services/compute/linux_virtual_machine_resource.go b/azurerm/internal/services/compute/linux_virtual_machine_resource.go index d122dad75985..5c2eac456490 100644 --- a/azurerm/internal/services/compute/linux_virtual_machine_resource.go +++ b/azurerm/internal/services/compute/linux_virtual_machine_resource.go @@ -15,6 +15,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" @@ -137,7 +138,6 @@ func resourceLinuxVirtualMachine() *schema.Resource { "dedicated_host_id": { Type: schema.TypeString, Optional: true, - ForceNew: true, // TODO: investigate, looks like the Portal allows migration ValidateFunc: computeValidate.DedicatedHostID, // the Compute/VM API is broken and returns the Resource Group name in UPPERCASE :shrug: DiffSuppressFunc: suppress.CaseDifference, @@ -168,6 +168,13 @@ func resourceLinuxVirtualMachine() *schema.Resource { }, false), }, + "extensions_time_budget": { + Type: schema.TypeString, + Optional: true, + Default: "PT1H30M", + ValidateFunc: azValidate.ISO8601DurationBetween("PT15M", "PT2H"), + }, + "identity": virtualMachineIdentitySchema(), "max_bid_price": { @@ -304,8 +311,10 @@ func resourceLinuxVirtualMachineCreate(d *schema.ResourceData, meta interface{}) adminUsername := d.Get("admin_username").(string) allowExtensionOperations := d.Get("allow_extension_operations").(bool) + bootDiagnosticsRaw := d.Get("boot_diagnostics").([]interface{}) bootDiagnostics := expandBootDiagnostics(bootDiagnosticsRaw) + var computerName string if v, ok := d.GetOk("computer_name"); ok && len(v.(string)) > 0 { computerName = v.(string) @@ -387,6 +396,7 @@ func resourceLinuxVirtualMachineCreate(d *schema.ResourceData, meta interface{}) // Optional AdditionalCapabilities: additionalCapabilities, DiagnosticsProfile: bootDiagnostics, + ExtensionsTimeBudget: utils.String(d.Get("extensions_time_budget").(string)), }, Tags: tags.Expand(t), } @@ -551,6 +561,12 @@ func resourceLinuxVirtualMachineRead(d *schema.ResourceData, meta interface{}) e d.Set("size", string(profile.VMSize)) } + extensionsTimeBudget := "PT1H30M" + if props.ExtensionsTimeBudget != nil { + extensionsTimeBudget = *props.ExtensionsTimeBudget + } + d.Set("extensions_time_budget", extensionsTimeBudget) + // defaulted since BillingProfile isn't returned if it's unset maxBidPrice := float64(-1.0) if props.BillingProfile != nil && props.BillingProfile.MaxPrice != nil { @@ -589,7 +605,7 @@ func resourceLinuxVirtualMachineRead(d *schema.ResourceData, meta interface{}) e if err != nil { return fmt.Errorf("flattening `admin_ssh_key`: %+v", err) } - if err := d.Set("admin_ssh_key", flattenedSSHKeys); err != nil { + if err := d.Set("admin_ssh_key", schema.NewSet(SSHKeySchemaHash, *flattenedSSHKeys)); err != nil { return fmt.Errorf("setting `admin_ssh_key`: %+v", err) } } @@ -739,6 +755,26 @@ func resourceLinuxVirtualMachineUpdate(d *schema.ResourceData, meta interface{}) update.Identity = identity } + if d.HasChange("dedicated_host_id") { + shouldUpdate = true + + // Code="PropertyChangeNotAllowed" Message="Updating Host of VM 'VMNAME' is not allowed as the VM is currently allocated. Please Deallocate the VM and retry the operation." + shouldDeallocate = true + + if v, ok := d.GetOk("dedicated_host_id"); ok { + update.Host = &compute.SubResource{ + ID: utils.String(v.(string)), + } + } else { + update.Host = &compute.SubResource{} + } + } + + if d.HasChange("extensions_time_budget") { + shouldUpdate = true + update.ExtensionsTimeBudget = utils.String(d.Get("extensions_time_budget").(string)) + } + if d.HasChange("max_bid_price") { shouldUpdate = true @@ -1052,10 +1088,8 @@ func resourceLinuxVirtualMachineDelete(d *schema.ResourceData, meta interface{}) // ISSUE: XXX // shutting down the Virtual Machine prior to removing it means users are no longer charged for the compute // thus this can be a large cost-saving when deleting larger instances - // in addition - since we're shutting down the machine to remove it, forcing a power-off is fine (as opposed - // to waiting for a graceful shut down) log.Printf("[DEBUG] Powering Off Linux Virtual Machine %q (Resource Group %q)..", id.Name, id.ResourceGroup) - skipShutdown := true + skipShutdown := !meta.(*clients.Client).Features.VirtualMachine.GracefulShutdown powerOffFuture, err := client.PowerOff(ctx, id.ResourceGroup, id.Name, utils.Bool(skipShutdown)) if err != nil { return fmt.Errorf("powering off Linux Virtual Machine %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) @@ -1092,19 +1126,19 @@ func resourceLinuxVirtualMachineDelete(d *schema.ResourceData, meta interface{}) return err } - diskDeleteFuture, err := disksClient.Delete(ctx, diskId.ResourceGroup, diskId.Name) + diskDeleteFuture, err := disksClient.Delete(ctx, diskId.ResourceGroup, diskId.DiskName) if err != nil { if !response.WasNotFound(diskDeleteFuture.Response()) { - return fmt.Errorf("deleting OS Disk %q (Resource Group %q) for Linux Virtual Machine %q (Resource Group %q): %+v", diskId.Name, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting OS Disk %q (Resource Group %q) for Linux Virtual Machine %q (Resource Group %q): %+v", diskId.DiskName, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) } } if !response.WasNotFound(diskDeleteFuture.Response()) { if err := diskDeleteFuture.WaitForCompletionRef(ctx, disksClient.Client); err != nil { - return fmt.Errorf("OS Disk %q (Resource Group %q) for Linux Virtual Machine %q (Resource Group %q): %+v", diskId.Name, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) + return fmt.Errorf("OS Disk %q (Resource Group %q) for Linux Virtual Machine %q (Resource Group %q): %+v", diskId.DiskName, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) } } - log.Printf("[DEBUG] Deleted OS Disk from Linux Virtual Machine %q (Resource Group %q).", diskId.Name, diskId.ResourceGroup) + log.Printf("[DEBUG] Deleted OS Disk from Linux Virtual Machine %q (Resource Group %q).", diskId.DiskName, diskId.ResourceGroup) } else { log.Printf("[DEBUG] Skipping Deleting OS Disk from Linux Virtual Machine %q (Resource Group %q) - cannot determine OS Disk ID.", id.Name, id.ResourceGroup) } @@ -1133,7 +1167,6 @@ func resourceLinuxVirtualMachineDelete(d *schema.ResourceData, meta interface{}) Refresh: func() (interface{}, string, error) { log.Printf("[INFO] checking on state of Linux Virtual Machine %q", id.Name) resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return resp, strconv.Itoa(resp.StatusCode), nil diff --git a/azurerm/internal/services/compute/linux_virtual_machine_scale_set_resource.go b/azurerm/internal/services/compute/linux_virtual_machine_scale_set_resource.go index af6d54b9db83..a5cbb7ca1b52 100644 --- a/azurerm/internal/services/compute/linux_virtual_machine_scale_set_resource.go +++ b/azurerm/internal/services/compute/linux_virtual_machine_scale_set_resource.go @@ -168,6 +168,13 @@ func resourceArmLinuxVirtualMachineScaleSet() *schema.Resource { "plan": planSchema(), + "platform_fault_domain_count": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + Computed: true, + }, + "priority": { Type: schema.TypeString, Optional: true, @@ -291,7 +298,11 @@ func resourceArmLinuxVirtualMachineScaleSetCreate(d *schema.ResourceData, meta i bootDiagnostics := expandBootDiagnostics(bootDiagnosticsRaw) dataDisksRaw := d.Get("data_disk").([]interface{}) - dataDisks := ExpandVirtualMachineScaleSetDataDisk(dataDisksRaw) + ultraSSDEnabled := d.Get("additional_capabilities.0.ultra_ssd_enabled").(bool) + dataDisks, err := ExpandVirtualMachineScaleSetDataDisk(dataDisksRaw, ultraSSDEnabled) + if err != nil { + return fmt.Errorf("expanding `data_disk`: %+v", err) + } identityRaw := d.Get("identity").([]interface{}) identity, err := ExpandVirtualMachineScaleSetIdentity(identityRaw) @@ -484,6 +495,10 @@ func resourceArmLinuxVirtualMachineScaleSetCreate(d *schema.ResourceData, meta i Zones: zones, } + if v, ok := d.GetOk("platform_fault_domain_count"); ok { + props.VirtualMachineScaleSetProperties.PlatformFaultDomainCount = utils.Int32(int32(v.(int))) + } + if v, ok := d.GetOk("proximity_placement_group_id"); ok { props.VirtualMachineScaleSetProperties.ProximityPlacementGroup = &compute.SubResource{ ID: utils.String(v.(string)), @@ -662,8 +677,12 @@ func resourceArmLinuxVirtualMachineScaleSetUpdate(d *schema.ResourceData, meta i } if d.HasChange("data_disk") { - dataDisksRaw := d.Get("data_disk").([]interface{}) - updateProps.VirtualMachineProfile.StorageProfile.DataDisks = ExpandVirtualMachineScaleSetDataDisk(dataDisksRaw) + ultraSSDEnabled := d.Get("additional_capabilities.0.ultra_ssd_enabled").(bool) + dataDisks, err := ExpandVirtualMachineScaleSetDataDisk(d.Get("data_disk").([]interface{}), ultraSSDEnabled) + if err != nil { + return fmt.Errorf("expanding `data_disk`: %+v", err) + } + updateProps.VirtualMachineProfile.StorageProfile.DataDisks = dataDisks } if d.HasChange("os_disk") { @@ -870,6 +889,7 @@ func resourceArmLinuxVirtualMachineScaleSetRead(d *schema.ResourceData, meta int if props.ProximityPlacementGroup != nil && props.ProximityPlacementGroup.ID != nil { proximityPlacementGroupId = *props.ProximityPlacementGroup.ID } + d.Set("platform_fault_domain_count", props.PlatformFaultDomainCount) d.Set("proximity_placement_group_id", proximityPlacementGroupId) d.Set("single_placement_group", props.SinglePlacementGroup) d.Set("unique_id", props.UniqueID) @@ -931,7 +951,7 @@ func resourceArmLinuxVirtualMachineScaleSetRead(d *schema.ResourceData, meta int if err != nil { return fmt.Errorf("Error flattening `admin_ssh_key`: %+v", err) } - if err := d.Set("admin_ssh_key", flattenedSshKeys); err != nil { + if err := d.Set("admin_ssh_key", schema.NewSet(SSHKeySchemaHash, *flattenedSshKeys)); err != nil { return fmt.Errorf("Error setting `admin_ssh_key`: %+v", err) } } diff --git a/azurerm/internal/services/compute/managed_disk_resource.go b/azurerm/internal/services/compute/managed_disk_resource.go index 263508813033..8e218f2b9c12 100644 --- a/azurerm/internal/services/compute/managed_disk_resource.go +++ b/azurerm/internal/services/compute/managed_disk_resource.go @@ -503,14 +503,14 @@ func resourceArmManagedDiskRead(d *schema.ResourceData, meta interface{}) error return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.DiskName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Disk %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Azure Managed Disk %s (resource group %s): %s", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error making Read request on Azure Managed Disk %s (resource group %s): %s", id.DiskName, id.ResourceGroup, err) } d.Set("name", resp.Name) @@ -569,13 +569,13 @@ func resourceArmManagedDiskDelete(d *schema.ResourceData, meta interface{}) erro return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.DiskName) if err != nil { - return fmt.Errorf("Error deleting Managed Disk %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Managed Disk %q (Resource Group %q): %+v", id.DiskName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Managed Disk %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deletion of Managed Disk %q (Resource Group %q): %+v", id.DiskName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/compute/network_interface.go b/azurerm/internal/services/compute/network_interface.go index 329cf452a957..24ee4c0183ff 100644 --- a/azurerm/internal/services/compute/network_interface.go +++ b/azurerm/internal/services/compute/network_interface.go @@ -126,7 +126,7 @@ func retrieveIPAddressesForNIC(ctx context.Context, nicClient *network.Interface // retrievePublicIPAddress returns the Public IP Address associated with an Azure Public IP // nolint: deadcode unused func retrievePublicIPAddress(ctx context.Context, client *network.PublicIPAddressesClient, publicIPAddressID string) (*string, error) { - id, err := parse.PublicIPAddressID(publicIPAddressID) + id, err := parse.PublicIpAddressID(publicIPAddressID) if err != nil { return nil, err } diff --git a/azurerm/internal/services/compute/parse/availability_set.go b/azurerm/internal/services/compute/parse/availability_set.go index bd3a9c899827..ef5cf741f1e5 100644 --- a/azurerm/internal/services/compute/parse/availability_set.go +++ b/azurerm/internal/services/compute/parse/availability_set.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type AvailabilitySetId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewAvailabilitySetId(resourceGroup, name string) AvailabilitySetId { +func NewAvailabilitySetID(subscriptionId, resourceGroup, name string) AvailabilitySetId { return AvailabilitySetId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id AvailabilitySetId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/availabilitySets/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id AvailabilitySetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Availability Set", segmentsStr) } +func (id AvailabilitySetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/availabilitySets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// AvailabilitySetID parses a AvailabilitySet ID into an AvailabilitySetId struct func AvailabilitySetID(input string) (*AvailabilitySetId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Availability Set ID %q: %+v", input, err) + return nil, err + } + + resourceId := AvailabilitySetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - set := AvailabilitySetId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if set.Name, err = id.PopSegment("availabilitySets"); err != nil { + if resourceId.Name, err = id.PopSegment("availabilitySets"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func AvailabilitySetID(input string) (*AvailabilitySetId, error) { return nil, err } - return &set, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/availability_set_test.go b/azurerm/internal/services/compute/parse/availability_set_test.go index c53636f41489..bbe630818b98 100644 --- a/azurerm/internal/services/compute/parse/availability_set_test.go +++ b/azurerm/internal/services/compute/parse/availability_set_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = AvailabilitySetId{} func TestAvailabilitySetIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewAvailabilitySetId("group1", "set1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/availabilitySets/set1" + actual := NewAvailabilitySetID("12345678-1234-9876-4563-123456789012", "resGroup1", "set1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/set1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,54 +20,72 @@ func TestAvailabilitySetIDFormatter(t *testing.T) { func TestAvailabilitySetID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *AvailabilitySetId + Input string + Error bool + Expected *AvailabilitySetId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Availability Set Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Availability Set ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/set1", - Error: false, - Expect: &AvailabilitySetId{ - ResourceGroup: "resGroup1", - Name: "set1", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/set1", + Expected: &AvailabilitySetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "set1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/AvailabilitySets/set1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/AVAILABILITYSETS/SET1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := AvailabilitySetID(v.Input) if err != nil { @@ -74,15 +93,20 @@ func TestAvailabilitySetID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/compute/parse/dedicated_host.go b/azurerm/internal/services/compute/parse/dedicated_host.go index 0971ac42b4ff..7956fa5395b3 100644 --- a/azurerm/internal/services/compute/parse/dedicated_host.go +++ b/azurerm/internal/services/compute/parse/dedicated_host.go @@ -1,45 +1,69 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type DedicatedHostId struct { - ResourceGroup string - HostGroup string - Name string + SubscriptionId string + ResourceGroup string + HostGroupName string + HostName string } -func NewDedicatedHostId(id DedicatedHostGroupId, name string) DedicatedHostId { +func NewDedicatedHostID(subscriptionId, resourceGroup, hostGroupName, hostName string) DedicatedHostId { return DedicatedHostId{ - ResourceGroup: id.ResourceGroup, - HostGroup: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + HostGroupName: hostGroupName, + HostName: hostName, } } -func (id DedicatedHostId) ID(subscriptionId string) string { - base := NewDedicatedHostGroupId(id.ResourceGroup, id.HostGroup).ID(subscriptionId) - return fmt.Sprintf("%s/hosts/%s", base, id.Name) +func (id DedicatedHostId) String() string { + segments := []string{ + fmt.Sprintf("Host Name %q", id.HostName), + fmt.Sprintf("Host Group Name %q", id.HostGroupName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Dedicated Host", segmentsStr) } +func (id DedicatedHostId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/hostGroups/%s/hosts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.HostGroupName, id.HostName) +} + +// DedicatedHostID parses a DedicatedHost ID into an DedicatedHostId struct func DedicatedHostID(input string) (*DedicatedHostId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Dedicated Host ID %q: %+v", input, err) + return nil, err } - host := DedicatedHostId{ - ResourceGroup: id.ResourceGroup, + resourceId := DedicatedHostId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if host.HostGroup, err = id.PopSegment("hostGroups"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if host.Name, err = id.PopSegment("hosts"); err != nil { + if resourceId.HostGroupName, err = id.PopSegment("hostGroups"); err != nil { + return nil, err + } + if resourceId.HostName, err = id.PopSegment("hosts"); err != nil { return nil, err } @@ -47,5 +71,5 @@ func DedicatedHostID(input string) (*DedicatedHostId, error) { return nil, err } - return &host, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/dedicated_host_group.go b/azurerm/internal/services/compute/parse/dedicated_host_group.go index d43811ff2ab5..71fb4ed66be1 100644 --- a/azurerm/internal/services/compute/parse/dedicated_host_group.go +++ b/azurerm/internal/services/compute/parse/dedicated_host_group.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type DedicatedHostGroupId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + HostGroupName string } -func NewDedicatedHostGroupId(resourceGroup, name string) DedicatedHostGroupId { +func NewDedicatedHostGroupID(subscriptionId, resourceGroup, hostGroupName string) DedicatedHostGroupId { return DedicatedHostGroupId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + HostGroupName: hostGroupName, } } -func (id DedicatedHostGroupId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/hostGroups/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id DedicatedHostGroupId) String() string { + segments := []string{ + fmt.Sprintf("Host Group Name %q", id.HostGroupName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Dedicated Host Group", segmentsStr) } +func (id DedicatedHostGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/hostGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.HostGroupName) +} + +// DedicatedHostGroupID parses a DedicatedHostGroup ID into an DedicatedHostGroupId struct func DedicatedHostGroupID(input string) (*DedicatedHostGroupId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Dedicated Host Group ID %q: %+v", input, err) + return nil, err + } + + resourceId := DedicatedHostGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - group := DedicatedHostGroupId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if group.Name, err = id.PopSegment("hostGroups"); err != nil { + if resourceId.HostGroupName, err = id.PopSegment("hostGroups"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func DedicatedHostGroupID(input string) (*DedicatedHostGroupId, error) { return nil, err } - return &group, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/dedicated_host_group_test.go b/azurerm/internal/services/compute/parse/dedicated_host_group_test.go index 307e0c15bfbf..1f10ba4429e4 100644 --- a/azurerm/internal/services/compute/parse/dedicated_host_group_test.go +++ b/azurerm/internal/services/compute/parse/dedicated_host_group_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = DedicatedHostGroupId{} func TestDedicatedHostGroupIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewDedicatedHostGroupId("group1", "hostGroup1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/hostGroups/hostGroup1" + actual := NewDedicatedHostGroupID("12345678-1234-9876-4563-123456789012", "resGroup1", "hostGroup1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,54 +20,72 @@ func TestDedicatedHostGroupIDFormatter(t *testing.T) { func TestDedicatedHostGroupID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *DedicatedHostGroupId + Input string + Error bool + Expected *DedicatedHostGroupId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Host Group Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Host Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/group1", - Error: false, - Expect: &DedicatedHostGroupId{ - ResourceGroup: "resGroup1", - Name: "group1", + // missing HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1", + Expected: &DedicatedHostGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + HostGroupName: "hostGroup1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/HostGroups/group1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/HOSTGROUPS/HOSTGROUP1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := DedicatedHostGroupID(v.Input) if err != nil { @@ -74,15 +93,20 @@ func TestDedicatedHostGroupID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.HostGroupName != v.Expected.HostGroupName { + t.Fatalf("Expected %q but got %q for HostGroupName", v.Expected.HostGroupName, actual.HostGroupName) } } } diff --git a/azurerm/internal/services/compute/parse/dedicated_host_test.go b/azurerm/internal/services/compute/parse/dedicated_host_test.go index aa628112836f..62b1156607c2 100644 --- a/azurerm/internal/services/compute/parse/dedicated_host_test.go +++ b/azurerm/internal/services/compute/parse/dedicated_host_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,10 +11,8 @@ import ( var _ resourceid.Formatter = DedicatedHostId{} func TestDedicatedHostIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - hostGroupId := NewDedicatedHostGroupId("group1", "hostGroup1") - actual := NewDedicatedHostId(hostGroupId, "host1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/host1" + actual := NewDedicatedHostID("12345678-1234-9876-4563-123456789012", "resGroup1", "hostGroup1", "host1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/host1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -20,65 +20,85 @@ func TestDedicatedHostIDFormatter(t *testing.T) { func TestDedicatedHostID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *DedicatedHostId + Input string + Error bool + Expected *DedicatedHostId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", Error: true, }, + { - Name: "Missing Host Group Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/", + // missing value for HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/", Error: true, }, + { - Name: "Host Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/group1/", + // missing HostName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/", Error: true, }, + { - Name: "Missing Host Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/group1/hosts/", + // missing value for HostName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/", Error: true, }, + { - Name: "Host ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/group1/hosts/host1", - Error: false, - Expect: &DedicatedHostId{ - ResourceGroup: "resGroup1", - HostGroup: "group1", - Name: "host1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/host1", + Expected: &DedicatedHostId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + HostGroupName: "hostGroup1", + HostName: "host1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/group1/Hosts/host1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/HOSTGROUPS/HOSTGROUP1/HOSTS/HOST1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := DedicatedHostID(v.Input) if err != nil { @@ -86,19 +106,23 @@ func TestDedicatedHostID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.HostGroup != v.Expect.HostGroup { - t.Fatalf("Expected %q but got %q for HostGroup", v.Expect.HostGroup, actual.HostGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.HostGroupName != v.Expected.HostGroupName { + t.Fatalf("Expected %q but got %q for HostGroupName", v.Expected.HostGroupName, actual.HostGroupName) + } + if actual.HostName != v.Expected.HostName { + t.Fatalf("Expected %q but got %q for HostName", v.Expected.HostName, actual.HostName) } } } diff --git a/azurerm/internal/services/compute/parse/disk_access.go b/azurerm/internal/services/compute/parse/disk_access.go new file mode 100644 index 000000000000..64c59b32d0af --- /dev/null +++ b/azurerm/internal/services/compute/parse/disk_access.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DiskAccessId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewDiskAccessID(subscriptionId, resourceGroup, name string) DiskAccessId { + return DiskAccessId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id DiskAccessId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Disk Access", segmentsStr) +} + +func (id DiskAccessId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/diskAccesses/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DiskAccessID parses a DiskAccess ID into an DiskAccessId struct +func DiskAccessID(input string) (*DiskAccessId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DiskAccessId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("diskAccesses"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/compute/parse/disk_access_test.go b/azurerm/internal/services/compute/parse/disk_access_test.go new file mode 100644 index 000000000000..0d9708d6a498 --- /dev/null +++ b/azurerm/internal/services/compute/parse/disk_access_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DiskAccessId{} + +func TestDiskAccessIDFormatter(t *testing.T) { + actual := NewDiskAccessID("12345678-1234-9876-4563-123456789012", "resGroup1", "diskAccess1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskAccesses/diskAccess1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDiskAccessID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DiskAccessId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskAccesses/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskAccesses/diskAccess1", + Expected: &DiskAccessId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "diskAccess1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/DISKACCESSES/DISKACCESS1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DiskAccessID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/compute/parse/disk_encryption_set.go b/azurerm/internal/services/compute/parse/disk_encryption_set.go index 71edc8ca4321..3bf1ecebaeda 100644 --- a/azurerm/internal/services/compute/parse/disk_encryption_set.go +++ b/azurerm/internal/services/compute/parse/disk_encryption_set.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type DiskEncryptionSetId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewDiskEncryptionSetId(resourceGroup, name string) DiskEncryptionSetId { +func NewDiskEncryptionSetID(subscriptionId, resourceGroup, name string) DiskEncryptionSetId { return DiskEncryptionSetId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id DiskEncryptionSetId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/diskEncryptionSets/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id DiskEncryptionSetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Disk Encryption Set", segmentsStr) } +func (id DiskEncryptionSetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/diskEncryptionSets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DiskEncryptionSetID parses a DiskEncryptionSet ID into an DiskEncryptionSetId struct func DiskEncryptionSetID(input string) (*DiskEncryptionSetId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Disk Encryption Set ID %q: %+v", input, err) + return nil, err + } + + resourceId := DiskEncryptionSetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - encryptionSetId := DiskEncryptionSetId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if encryptionSetId.Name, err = id.PopSegment("diskEncryptionSets"); err != nil { + if resourceId.Name, err = id.PopSegment("diskEncryptionSets"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func DiskEncryptionSetID(input string) (*DiskEncryptionSetId, error) { return nil, err } - return &encryptionSetId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/disk_encryption_set_test.go b/azurerm/internal/services/compute/parse/disk_encryption_set_test.go index 3f4fbfe684c4..929244a6cba3 100644 --- a/azurerm/internal/services/compute/parse/disk_encryption_set_test.go +++ b/azurerm/internal/services/compute/parse/disk_encryption_set_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = DiskEncryptionSetId{} func TestDiskEncryptionSetIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewDiskEncryptionSetId("group1", "set1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/diskEncryptionSets/set1" + actual := NewDiskEncryptionSetID("12345678-1234-9876-4563-123456789012", "resGroup1", "set1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/set1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,54 +20,72 @@ func TestDiskEncryptionSetIDFormatter(t *testing.T) { func TestDiskEncryptionSetID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *DiskEncryptionSetId + Input string + Error bool + Expected *DiskEncryptionSetId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Group Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Disk Encryption Set Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Disk Encryption Set ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/encryptionSet1", - Error: false, - Expect: &DiskEncryptionSetId{ - ResourceGroup: "resGroup1", - Name: "encryptionSet1", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/set1", + Expected: &DiskEncryptionSetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "set1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/DiskEncryptionSets/encryptionSet1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/DISKENCRYPTIONSETS/SET1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := DiskEncryptionSetID(v.Input) if err != nil { @@ -74,15 +93,20 @@ func TestDiskEncryptionSetID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %+v", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/compute/parse/image.go b/azurerm/internal/services/compute/parse/image.go index 3cf767717464..0decb14c6833 100644 --- a/azurerm/internal/services/compute/parse/image.go +++ b/azurerm/internal/services/compute/parse/image.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type ImageId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewImageId(resourceGroup, name string) ImageId { +func NewImageID(subscriptionId, resourceGroup, name string) ImageId { return ImageId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id ImageId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/images/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id ImageId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Image", segmentsStr) } +func (id ImageId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/images/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ImageID parses a Image ID into an ImageId struct func ImageID(input string) (*ImageId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Image ID %q: %+v", input, err) + return nil, err + } + + resourceId := ImageId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - set := ImageId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if set.Name, err = id.PopSegment("images"); err != nil { + if resourceId.Name, err = id.PopSegment("images"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func ImageID(input string) (*ImageId, error) { return nil, err } - return &set, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/image_test.go b/azurerm/internal/services/compute/parse/image_test.go index 86f6232bbd94..dfcd9691b97f 100644 --- a/azurerm/internal/services/compute/parse/image_test.go +++ b/azurerm/internal/services/compute/parse/image_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = ImageId{} func TestImageIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewImageId("group1", "image1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/images/image1" + actual := NewImageID("12345678-1234-9876-4563-123456789012", "resGroup1", "image1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,54 +20,72 @@ func TestImageIDFormatter(t *testing.T) { func TestImageID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *ImageId + Input string + Error bool + Expected *ImageId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Image Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/images/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Image ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1", - Error: false, - Expect: &ImageId{ - ResourceGroup: "resGroup1", - Name: "image1", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/images/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1", + Expected: &ImageId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "image1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/Images/image1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/IMAGES/IMAGE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := ImageID(v.Input) if err != nil { @@ -74,15 +93,20 @@ func TestImageID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/compute/parse/managed_disk.go b/azurerm/internal/services/compute/parse/managed_disk.go index ec5d64d46dd3..e45f4264acea 100644 --- a/azurerm/internal/services/compute/parse/managed_disk.go +++ b/azurerm/internal/services/compute/parse/managed_disk.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type ManagedDiskId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + DiskName string } -func NewManagedDiskId(resourceGroup, name string) ManagedDiskId { +func NewManagedDiskID(subscriptionId, resourceGroup, diskName string) ManagedDiskId { return ManagedDiskId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DiskName: diskName, } } -func (id ManagedDiskId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/disks/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id ManagedDiskId) String() string { + segments := []string{ + fmt.Sprintf("Disk Name %q", id.DiskName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Managed Disk", segmentsStr) } +func (id ManagedDiskId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/disks/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DiskName) +} + +// ManagedDiskID parses a ManagedDisk ID into an ManagedDiskId struct func ManagedDiskID(input string) (*ManagedDiskId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Managed Disk ID %q: %+v", input, err) + return nil, err + } + + resourceId := ManagedDiskId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - disk := ManagedDiskId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if disk.Name, err = id.PopSegment("disks"); err != nil { + if resourceId.DiskName, err = id.PopSegment("disks"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func ManagedDiskID(input string) (*ManagedDiskId, error) { return nil, err } - return &disk, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/managed_disk_test.go b/azurerm/internal/services/compute/parse/managed_disk_test.go index 4ca30d53caac..1759875eb659 100644 --- a/azurerm/internal/services/compute/parse/managed_disk_test.go +++ b/azurerm/internal/services/compute/parse/managed_disk_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = ManagedDiskId{} func TestManagedDiskIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewManagedDiskId("group1", "disk1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/disks/disk1" + actual := NewManagedDiskID("12345678-1234-9876-4563-123456789012", "resGroup1", "disk1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/disk1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,54 +20,72 @@ func TestManagedDiskIDFormatter(t *testing.T) { func TestManagedDiskID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *ManagedDiskId + Input string + Error bool + Expected *ManagedDiskId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Disk Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Managed Disk ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/disk1", - Error: false, - Expect: &ManagedDiskId{ - ResourceGroup: "resGroup1", - Name: "disk1", + // missing DiskName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for DiskName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/disk1", + Expected: &ManagedDiskId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DiskName: "disk1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/Disks/disk1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/DISKS/DISK1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := ManagedDiskID(v.Input) if err != nil { @@ -74,15 +93,20 @@ func TestManagedDiskID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DiskName != v.Expected.DiskName { + t.Fatalf("Expected %q but got %q for DiskName", v.Expected.DiskName, actual.DiskName) } } } diff --git a/azurerm/internal/services/compute/parse/proximity_placement_group.go b/azurerm/internal/services/compute/parse/proximity_placement_group.go index 33bef947cc28..f6794f0178b6 100644 --- a/azurerm/internal/services/compute/parse/proximity_placement_group.go +++ b/azurerm/internal/services/compute/parse/proximity_placement_group.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type ProximityPlacementGroupId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewProximityPlacementGroupId(resourceGroup, name string) ProximityPlacementGroupId { +func NewProximityPlacementGroupID(subscriptionId, resourceGroup, name string) ProximityPlacementGroupId { return ProximityPlacementGroupId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id ProximityPlacementGroupId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/proximityPlacementGroups/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id ProximityPlacementGroupId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Proximity Placement Group", segmentsStr) } +func (id ProximityPlacementGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/proximityPlacementGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ProximityPlacementGroupID parses a ProximityPlacementGroup ID into an ProximityPlacementGroupId struct func ProximityPlacementGroupID(input string) (*ProximityPlacementGroupId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Proximity Placement Group ID %q: %+v", input, err) + return nil, err + } + + resourceId := ProximityPlacementGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - server := ProximityPlacementGroupId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if server.Name, err = id.PopSegment("proximityPlacementGroups"); err != nil { + if resourceId.Name, err = id.PopSegment("proximityPlacementGroups"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func ProximityPlacementGroupID(input string) (*ProximityPlacementGroupId, error) return nil, err } - return &server, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/proximity_placement_group_test.go b/azurerm/internal/services/compute/parse/proximity_placement_group_test.go index 51650734fd22..1cbdee93fe4f 100644 --- a/azurerm/internal/services/compute/parse/proximity_placement_group_test.go +++ b/azurerm/internal/services/compute/parse/proximity_placement_group_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = ProximityPlacementGroupId{} func TestProximityPlacementGroupIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewProximityPlacementGroupId("group1", "ppg1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/proximityPlacementGroups/ppg1" + actual := NewProximityPlacementGroupID("12345678-1234-9876-4563-123456789012", "resGroup1", "group1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/group1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,54 +20,72 @@ func TestProximityPlacementGroupIDFormatter(t *testing.T) { func TestProximityPlacementGroupID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *ProximityPlacementGroupId + Input string + Error bool + Expected *ProximityPlacementGroupId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Proximity Placement Group Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Proximity Placement Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/group1", - Error: false, - Expect: &ProximityPlacementGroupId{ - ResourceGroup: "resGroup1", - Name: "group1", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/group1", + Expected: &ProximityPlacementGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "group1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/ProximityPlacementGroups/group1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/PROXIMITYPLACEMENTGROUPS/GROUP1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := ProximityPlacementGroupID(v.Input) if err != nil { @@ -74,15 +93,20 @@ func TestProximityPlacementGroupID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/compute/parse/shared_image.go b/azurerm/internal/services/compute/parse/shared_image.go index b7315c1b05a7..fbe274e2f472 100644 --- a/azurerm/internal/services/compute/parse/shared_image.go +++ b/azurerm/internal/services/compute/parse/shared_image.go @@ -1,45 +1,69 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SharedImageId struct { - ResourceGroup string - Gallery string - Name string + SubscriptionId string + ResourceGroup string + GalleryName string + ImageName string } -func NewSharedImageId(id SharedImageGalleryId, name string) SharedImageId { +func NewSharedImageID(subscriptionId, resourceGroup, galleryName, imageName string) SharedImageId { return SharedImageId{ - ResourceGroup: id.ResourceGroup, - Gallery: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + GalleryName: galleryName, + ImageName: imageName, } } -func (id SharedImageId) ID(subscriptionId string) string { - base := NewSharedImageGalleryId(id.ResourceGroup, id.Gallery).ID(subscriptionId) - return fmt.Sprintf("%s/images/%s", base, id.Name) +func (id SharedImageId) String() string { + segments := []string{ + fmt.Sprintf("Image Name %q", id.ImageName), + fmt.Sprintf("Gallery Name %q", id.GalleryName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Shared Image", segmentsStr) } +func (id SharedImageId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/galleries/%s/images/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.GalleryName, id.ImageName) +} + +// SharedImageID parses a SharedImage ID into an SharedImageId struct func SharedImageID(input string) (*SharedImageId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Shared Image ID %q: %+v", input, err) + return nil, err } - image := SharedImageId{ - ResourceGroup: id.ResourceGroup, + resourceId := SharedImageId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if image.Gallery, err = id.PopSegment("galleries"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if image.Name, err = id.PopSegment("images"); err != nil { + if resourceId.GalleryName, err = id.PopSegment("galleries"); err != nil { + return nil, err + } + if resourceId.ImageName, err = id.PopSegment("images"); err != nil { return nil, err } @@ -47,5 +71,5 @@ func SharedImageID(input string) (*SharedImageId, error) { return nil, err } - return &image, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/shared_image_gallery.go b/azurerm/internal/services/compute/parse/shared_image_gallery.go index 43c2d8eda002..8e93ed1e08e0 100644 --- a/azurerm/internal/services/compute/parse/shared_image_gallery.go +++ b/azurerm/internal/services/compute/parse/shared_image_gallery.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SharedImageGalleryId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + GalleryName string } -func NewSharedImageGalleryId(resourceGroup, name string) SharedImageGalleryId { +func NewSharedImageGalleryID(subscriptionId, resourceGroup, galleryName string) SharedImageGalleryId { return SharedImageGalleryId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + GalleryName: galleryName, } } -func (id SharedImageGalleryId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/galleries/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id SharedImageGalleryId) String() string { + segments := []string{ + fmt.Sprintf("Gallery Name %q", id.GalleryName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Shared Image Gallery", segmentsStr) } +func (id SharedImageGalleryId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/galleries/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.GalleryName) +} + +// SharedImageGalleryID parses a SharedImageGallery ID into an SharedImageGalleryId struct func SharedImageGalleryID(input string) (*SharedImageGalleryId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Shared Image Gallery ID %q: %+v", input, err) + return nil, err + } + + resourceId := SharedImageGalleryId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - gallery := SharedImageGalleryId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if gallery.Name, err = id.PopSegment("galleries"); err != nil { + if resourceId.GalleryName, err = id.PopSegment("galleries"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func SharedImageGalleryID(input string) (*SharedImageGalleryId, error) { return nil, err } - return &gallery, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/shared_image_gallery_test.go b/azurerm/internal/services/compute/parse/shared_image_gallery_test.go index fd2c7086e1ab..bda1006d55d6 100644 --- a/azurerm/internal/services/compute/parse/shared_image_gallery_test.go +++ b/azurerm/internal/services/compute/parse/shared_image_gallery_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = SharedImageGalleryId{} func TestSharedImageGalleryIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewSharedImageGalleryId("group1", "gallery1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/galleries/gallery1" + actual := NewSharedImageGalleryID("12345678-1234-9876-4563-123456789012", "resGroup1", "gallery1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,59 +20,72 @@ func TestSharedImageGalleryIDFormatter(t *testing.T) { func TestSharedImageGalleryID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *SharedImageGalleryId + Input string + Error bool + Expected *SharedImageGalleryId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing galleries segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/", + // missing GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", Error: true, }, + { - Name: "Missing gallery Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", + // missing value for GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", Error: true, }, + { - Name: "Shared Image Gallery ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Compute/galleries/gallery1", - Error: false, - Expect: &SharedImageGalleryId{ - ResourceGroup: "mygroup1", - Name: "gallery1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1", + Expected: &SharedImageGalleryId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + GalleryName: "gallery1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Compute/Galleries/gallery1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/GALLERIES/GALLERY1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SharedImageGalleryID(v.Input) if err != nil { @@ -79,15 +93,20 @@ func TestSharedImageGalleryID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.GalleryName != v.Expected.GalleryName { + t.Fatalf("Expected %q but got %q for GalleryName", v.Expected.GalleryName, actual.GalleryName) } } } diff --git a/azurerm/internal/services/compute/parse/shared_image_test.go b/azurerm/internal/services/compute/parse/shared_image_test.go index 186dc9251557..52ba49edc09d 100644 --- a/azurerm/internal/services/compute/parse/shared_image_test.go +++ b/azurerm/internal/services/compute/parse/shared_image_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,10 +11,8 @@ import ( var _ resourceid.Formatter = SharedImageId{} func TestSharedImageIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - galleryId := NewSharedImageGalleryId("group1", "gallery1") - actual := NewSharedImageId(galleryId, "image1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/galleries/gallery1/images/image1" + actual := NewSharedImageID("12345678-1234-9876-4563-123456789012", "resGroup1", "gallery1", "image1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -20,60 +20,85 @@ func TestSharedImageIDFormatter(t *testing.T) { func TestSharedImageID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *SharedImageId + Input string + Error bool + Expected *SharedImageId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing galleries segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing image Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images", + // missing GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", Error: true, }, + + { + // missing value for GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", + Error: true, + }, + { - Name: "Image ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1", - Error: false, - Expect: &SharedImageId{ - ResourceGroup: "mygroup1", - Gallery: "gallery1", - Name: "image1", + // missing ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/", + Error: true, + }, + + { + // missing value for ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1", + Expected: &SharedImageId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + GalleryName: "gallery1", + ImageName: "image1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Compute/galleries/gallery1/Images/image1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/GALLERIES/GALLERY1/IMAGES/IMAGE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SharedImageID(v.Input) if err != nil { @@ -81,19 +106,23 @@ func TestSharedImageID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Gallery != v.Expect.Gallery { - t.Fatalf("Expected %q but got %q for Gallery", v.Expect.Gallery, actual.Gallery) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.GalleryName != v.Expected.GalleryName { + t.Fatalf("Expected %q but got %q for GalleryName", v.Expected.GalleryName, actual.GalleryName) + } + if actual.ImageName != v.Expected.ImageName { + t.Fatalf("Expected %q but got %q for ImageName", v.Expected.ImageName, actual.ImageName) } } } diff --git a/azurerm/internal/services/compute/parse/shared_image_version.go b/azurerm/internal/services/compute/parse/shared_image_version.go index 380ae1c9f07c..476d7ce8d0b0 100644 --- a/azurerm/internal/services/compute/parse/shared_image_version.go +++ b/azurerm/internal/services/compute/parse/shared_image_version.go @@ -1,52 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SharedImageVersionId struct { - ResourceGroup string - Gallery string - ImageName string - Version string + SubscriptionId string + ResourceGroup string + GalleryName string + ImageName string + VersionName string } -func NewSharedImageVersionId(id SharedImageId, name string) SharedImageVersionId { +func NewSharedImageVersionID(subscriptionId, resourceGroup, galleryName, imageName, versionName string) SharedImageVersionId { return SharedImageVersionId{ - ResourceGroup: id.ResourceGroup, - Gallery: id.Gallery, - ImageName: id.Name, - Version: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + GalleryName: galleryName, + ImageName: imageName, + VersionName: versionName, + } +} + +func (id SharedImageVersionId) String() string { + segments := []string{ + fmt.Sprintf("Version Name %q", id.VersionName), + fmt.Sprintf("Image Name %q", id.ImageName), + fmt.Sprintf("Gallery Name %q", id.GalleryName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Shared Image Version", segmentsStr) } -func (id SharedImageVersionId) ID(subscriptionId string) string { - galleryId := NewSharedImageGalleryId(id.ResourceGroup, id.Gallery) - base := NewSharedImageId(galleryId, id.ImageName).ID(subscriptionId) - return fmt.Sprintf("%s/versions/%s", base, id.Version) +func (id SharedImageVersionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/galleries/%s/images/%s/versions/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.GalleryName, id.ImageName, id.VersionName) } +// SharedImageVersionID parses a SharedImageVersion ID into an SharedImageVersionId struct func SharedImageVersionID(input string) (*SharedImageVersionId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Shared Image Version ID %q: %+v", input, err) + return nil, err } - set := SharedImageVersionId{ - ResourceGroup: id.ResourceGroup, + resourceId := SharedImageVersionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if set.Gallery, err = id.PopSegment("galleries"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if set.ImageName, err = id.PopSegment("images"); err != nil { - return nil, err + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if set.Version, err = id.PopSegment("versions"); err != nil { + if resourceId.GalleryName, err = id.PopSegment("galleries"); err != nil { + return nil, err + } + if resourceId.ImageName, err = id.PopSegment("images"); err != nil { + return nil, err + } + if resourceId.VersionName, err = id.PopSegment("versions"); err != nil { return nil, err } @@ -54,5 +77,5 @@ func SharedImageVersionID(input string) (*SharedImageVersionId, error) { return nil, err } - return &set, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/shared_image_version_test.go b/azurerm/internal/services/compute/parse/shared_image_version_test.go index e87b78294285..044f341cfa28 100644 --- a/azurerm/internal/services/compute/parse/shared_image_version_test.go +++ b/azurerm/internal/services/compute/parse/shared_image_version_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,11 +11,8 @@ import ( var _ resourceid.Formatter = SharedImageVersionId{} func TestSharedImageVersionIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - galleryId := NewSharedImageGalleryId("group1", "gallery1") - imageId := NewSharedImageId(galleryId, "image1") - actual := NewSharedImageVersionId(imageId, "version1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/version1" + actual := NewSharedImageVersionID("12345678-1234-9876-4563-123456789012", "resGroup1", "gallery1", "image1", "version1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/version1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -21,61 +20,98 @@ func TestSharedImageVersionIDFormatter(t *testing.T) { func TestSharedImageVersionID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *SharedImageVersionId + Input string + Error bool + Expected *SharedImageVersionId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/", Error: true, }, + { - Name: "Missing galleries segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1/versions/1.0.0", + // missing value for ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/", Error: true, }, + { - Name: "Missing image segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/versions/1.0.0", + // missing VersionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/", Error: true, }, + { - Name: "Image ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/1.0.0", - Error: false, - Expect: &SharedImageVersionId{ - ResourceGroup: "mygroup1", - Gallery: "gallery1", - ImageName: "image1", - Version: "1.0.0", + // missing value for VersionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/version1", + Expected: &SharedImageVersionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + GalleryName: "gallery1", + ImageName: "image1", + VersionName: "version1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/Versions/1.0.0", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/GALLERIES/GALLERY1/IMAGES/IMAGE1/VERSIONS/VERSION1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SharedImageVersionID(v.Input) if err != nil { @@ -83,15 +119,26 @@ func TestSharedImageVersionID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.ImageName != v.Expect.ImageName { - t.Fatalf("Expected %q but got %q for Name", v.Expect.ImageName, actual.ImageName) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.GalleryName != v.Expected.GalleryName { + t.Fatalf("Expected %q but got %q for GalleryName", v.Expected.GalleryName, actual.GalleryName) + } + if actual.ImageName != v.Expected.ImageName { + t.Fatalf("Expected %q but got %q for ImageName", v.Expected.ImageName, actual.ImageName) + } + if actual.VersionName != v.Expected.VersionName { + t.Fatalf("Expected %q but got %q for VersionName", v.Expected.VersionName, actual.VersionName) } } } diff --git a/azurerm/internal/services/compute/parse/virtual_machine.go b/azurerm/internal/services/compute/parse/virtual_machine.go index deddffc83946..e8b5e7488871 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine.go +++ b/azurerm/internal/services/compute/parse/virtual_machine.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualMachineId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewVirtualMachineId(resourceGroup, name string) VirtualMachineId { +func NewVirtualMachineID(subscriptionId, resourceGroup, name string) VirtualMachineId { return VirtualMachineId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id VirtualMachineId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/virtualMachines/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id VirtualMachineId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Machine", segmentsStr) } +func (id VirtualMachineId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/virtualMachines/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// VirtualMachineID parses a VirtualMachine ID into an VirtualMachineId struct func VirtualMachineID(input string) (*VirtualMachineId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Virtual Machine ID %q: %+v", input, err) + return nil, err + } + + resourceId := VirtualMachineId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - vm := VirtualMachineId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if vm.Name, err = id.PopSegment("virtualMachines"); err != nil { + if resourceId.Name, err = id.PopSegment("virtualMachines"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func VirtualMachineID(input string) (*VirtualMachineId, error) { return nil, err } - return &vm, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/virtual_machine_extension.go b/azurerm/internal/services/compute/parse/virtual_machine_extension.go index adf4697e4c8f..dda19ddefa76 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine_extension.go +++ b/azurerm/internal/services/compute/parse/virtual_machine_extension.go @@ -1,47 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualMachineExtensionId struct { - ResourceGroup string - Name string - VirtualMachine string + SubscriptionId string + ResourceGroup string + VirtualMachineName string + ExtensionName string } -func NewVirtualMachineExtensionId(id VirtualMachineId, name string) VirtualMachineExtensionId { +func NewVirtualMachineExtensionID(subscriptionId, resourceGroup, virtualMachineName, extensionName string) VirtualMachineExtensionId { return VirtualMachineExtensionId{ - ResourceGroup: id.ResourceGroup, - VirtualMachine: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualMachineName: virtualMachineName, + ExtensionName: extensionName, } } -func (id VirtualMachineExtensionId) ID(subscriptionId string) string { - base := NewVirtualMachineId(id.ResourceGroup, id.VirtualMachine).ID(subscriptionId) - return fmt.Sprintf("%s/extensions/%s", base, id.Name) +func (id VirtualMachineExtensionId) String() string { + segments := []string{ + fmt.Sprintf("Extension Name %q", id.ExtensionName), + fmt.Sprintf("Virtual Machine Name %q", id.VirtualMachineName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Machine Extension", segmentsStr) } +func (id VirtualMachineExtensionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/virtualMachines/%s/extensions/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualMachineName, id.ExtensionName) +} + +// VirtualMachineExtensionID parses a VirtualMachineExtension ID into an VirtualMachineExtensionId struct func VirtualMachineExtensionID(input string) (*VirtualMachineExtensionId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Virtual Machine Extension ID %q: %+v", input, err) + return nil, err } - virtualMachineExtension := VirtualMachineExtensionId{ - ResourceGroup: id.ResourceGroup, + resourceId := VirtualMachineExtensionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if virtualMachineExtension.VirtualMachine, err = id.PopSegment("virtualMachines"); err != nil { + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VirtualMachineName, err = id.PopSegment("virtualMachines"); err != nil { + return nil, err + } + if resourceId.ExtensionName, err = id.PopSegment("extensions"); err != nil { return nil, err } - if virtualMachineExtension.Name, err = id.PopSegment("extensions"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &virtualMachineExtension, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/virtual_machine_extension_test.go b/azurerm/internal/services/compute/parse/virtual_machine_extension_test.go index a442a655dd78..f3dd56005a4a 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine_extension_test.go +++ b/azurerm/internal/services/compute/parse/virtual_machine_extension_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,69 +11,118 @@ import ( var _ resourceid.Formatter = VirtualMachineExtensionId{} func TestVirtualMachineExtensionIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - vmId := NewVirtualMachineId("group1", "vm1") - actual := NewVirtualMachineExtensionId(vmId, "extension1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/virtualMachines/vm1/extensions/extension1" + actual := NewVirtualMachineExtensionID("12345678-1234-9876-4563-123456789012", "resGroup1", "machine1", "extension1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/extension1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestParseVirtualMachineExtensionID(t *testing.T) { +func TestVirtualMachineExtensionID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *VirtualMachineExtensionId }{ + + { + // empty + Input: "", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing SubscriptionId + Input: "/", + Error: true, }, + { - Name: "No virtual machine segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + { - Name: "No extension name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/virtualMachines/machine1/extension/", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "Case incorrect in path element", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/virtualMachines/machine1/Extensions/extName", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + + { + // missing VirtualMachineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for VirtualMachineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/", + Error: true, + }, + { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/extName", + // missing ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/", + Error: true, + }, + + { + // missing value for ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/extension1", Expected: &VirtualMachineExtensionId{ - ResourceGroup: "myGroup1", - Name: "extName", - VirtualMachine: "machine1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VirtualMachineName: "machine1", + ExtensionName: "extension1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINES/MACHINE1/EXTENSIONS/EXTENSION1", + Error: true, + }, } + for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := VirtualMachineExtensionID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } + if actual.VirtualMachineName != v.Expected.VirtualMachineName { + t.Fatalf("Expected %q but got %q for VirtualMachineName", v.Expected.VirtualMachineName, actual.VirtualMachineName) + } + if actual.ExtensionName != v.Expected.ExtensionName { + t.Fatalf("Expected %q but got %q for ExtensionName", v.Expected.ExtensionName, actual.ExtensionName) + } } } diff --git a/azurerm/internal/services/compute/parse/virtual_machine_scale_set.go b/azurerm/internal/services/compute/parse/virtual_machine_scale_set.go index 753b03fd2471..52e3f193be20 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine_scale_set.go +++ b/azurerm/internal/services/compute/parse/virtual_machine_scale_set.go @@ -1,38 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualMachineScaleSetId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewVirtualMachineScaleSetId(resourceGroup, name string) VirtualMachineScaleSetId { +func NewVirtualMachineScaleSetID(subscriptionId, resourceGroup, name string) VirtualMachineScaleSetId { return VirtualMachineScaleSetId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id VirtualMachineScaleSetId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/virtualMachineScaleSets/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id VirtualMachineScaleSetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Machine Scale Set", segmentsStr) } +func (id VirtualMachineScaleSetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/virtualMachineScaleSets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// VirtualMachineScaleSetID parses a VirtualMachineScaleSet ID into an VirtualMachineScaleSetId struct func VirtualMachineScaleSetID(input string) (*VirtualMachineScaleSetId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Virtual Machine Scale Set ID %q: %+v", input, err) + return nil, err + } + + resourceId := VirtualMachineScaleSetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - vmScaleSet := VirtualMachineScaleSetId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if vmScaleSet.Name, err = id.PopSegment("virtualMachineScaleSets"); err != nil { + if resourceId.Name, err = id.PopSegment("virtualMachineScaleSets"); err != nil { return nil, err } @@ -40,5 +65,5 @@ func VirtualMachineScaleSetID(input string) (*VirtualMachineScaleSetId, error) { return nil, err } - return &vmScaleSet, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension.go b/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension.go index 6d18ba867192..58c95a080a4d 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension.go +++ b/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension.go @@ -1,45 +1,69 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualMachineScaleSetExtensionId struct { + SubscriptionId string ResourceGroup string VirtualMachineScaleSetName string - Name string + ExtensionName string } -func NewVirtualMachineScaleSetExtensionId(id VirtualMachineScaleSetId, name string) VirtualMachineScaleSetExtensionId { +func NewVirtualMachineScaleSetExtensionID(subscriptionId, resourceGroup, virtualMachineScaleSetName, extensionName string) VirtualMachineScaleSetExtensionId { return VirtualMachineScaleSetExtensionId{ - ResourceGroup: id.ResourceGroup, - VirtualMachineScaleSetName: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualMachineScaleSetName: virtualMachineScaleSetName, + ExtensionName: extensionName, } } -func (id VirtualMachineScaleSetExtensionId) ID(subscriptionId string) string { - base := NewVirtualMachineScaleSetId(id.ResourceGroup, id.VirtualMachineScaleSetName).ID(subscriptionId) - return fmt.Sprintf("%s/extensions/%s", base, id.Name) +func (id VirtualMachineScaleSetExtensionId) String() string { + segments := []string{ + fmt.Sprintf("Extension Name %q", id.ExtensionName), + fmt.Sprintf("Virtual Machine Scale Set Name %q", id.VirtualMachineScaleSetName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Machine Scale Set Extension", segmentsStr) } +func (id VirtualMachineScaleSetExtensionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Compute/virtualMachineScaleSets/%s/extensions/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualMachineScaleSetName, id.ExtensionName) +} + +// VirtualMachineScaleSetExtensionID parses a VirtualMachineScaleSetExtension ID into an VirtualMachineScaleSetExtensionId struct func VirtualMachineScaleSetExtensionID(input string) (*VirtualMachineScaleSetExtensionId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Virtual Machine Scale Set Extension ID %q: %+v", input, err) + return nil, err } - extension := VirtualMachineScaleSetExtensionId{ - ResourceGroup: id.ResourceGroup, + resourceId := VirtualMachineScaleSetExtensionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if extension.VirtualMachineScaleSetName, err = id.PopSegment("virtualMachineScaleSets"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if extension.Name, err = id.PopSegment("extensions"); err != nil { + if resourceId.VirtualMachineScaleSetName, err = id.PopSegment("virtualMachineScaleSets"); err != nil { + return nil, err + } + if resourceId.ExtensionName, err = id.PopSegment("extensions"); err != nil { return nil, err } @@ -47,5 +71,5 @@ func VirtualMachineScaleSetExtensionID(input string) (*VirtualMachineScaleSetExt return nil, err } - return &extension, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension_test.go b/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension_test.go index e7dde2028ad9..fda96ca7d33e 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension_test.go +++ b/azurerm/internal/services/compute/parse/virtual_machine_scale_set_extension_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,75 +11,118 @@ import ( var _ resourceid.Formatter = VirtualMachineScaleSetExtensionId{} func TestVirtualMachineScaleSetExtensionIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - vmssId := NewVirtualMachineScaleSetId("group1", "vmss1") - actual := NewVirtualMachineScaleSetExtensionId(vmssId, "extension1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1/extensions/extension1" + actual := NewVirtualMachineScaleSetExtensionID("12345678-1234-9876-4563-123456789012", "resGroup1", "scaleSet1", "extension1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/extensions/extension1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestParseVirtualMachineScaleSetExtensionID(t *testing.T) { +func TestVirtualMachineScaleSetExtensionID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *VirtualMachineScaleSetExtensionId }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "No Virtual Machine Scale Set Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Expected: nil, + // missing VirtualMachineScaleSetName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, }, + { - Name: "No Virtual Machine Scale Set Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualMachineScaleSets/", - Expected: nil, + // missing value for VirtualMachineScaleSetName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/", + Error: true, }, + { - Name: "No Extensions Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualMachineScaleSets/machine1", - Expected: nil, + // missing ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/", + Error: true, }, + { - Name: "No Extensions Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualMachineScaleSets/machine1/extensions/", - Expected: nil, + // missing value for ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/extensions/", + Error: true, }, + { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualMachineScaleSets/machine1/extensions/extension1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/extensions/extension1", Expected: &VirtualMachineScaleSetExtensionId{ - Name: "extension1", - VirtualMachineScaleSetName: "machine1", - ResourceGroup: "foo", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VirtualMachineScaleSetName: "scaleSet1", + ExtensionName: "extension1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINESCALESETS/SCALESET1/EXTENSIONS/EXTENSION1", + Error: true, + }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := VirtualMachineScaleSetExtensionID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } + if actual.VirtualMachineScaleSetName != v.Expected.VirtualMachineScaleSetName { + t.Fatalf("Expected %q but got %q for VirtualMachineScaleSetName", v.Expected.VirtualMachineScaleSetName, actual.VirtualMachineScaleSetName) + } + if actual.ExtensionName != v.Expected.ExtensionName { + t.Fatalf("Expected %q but got %q for ExtensionName", v.Expected.ExtensionName, actual.ExtensionName) + } } } diff --git a/azurerm/internal/services/compute/parse/virtual_machine_scale_set_test.go b/azurerm/internal/services/compute/parse/virtual_machine_scale_set_test.go new file mode 100644 index 000000000000..f4233c8b74f0 --- /dev/null +++ b/azurerm/internal/services/compute/parse/virtual_machine_scale_set_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VirtualMachineScaleSetId{} + +func TestVirtualMachineScaleSetIDFormatter(t *testing.T) { + actual := NewVirtualMachineScaleSetID("12345678-1234-9876-4563-123456789012", "resGroup1", "scaleSet1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVirtualMachineScaleSetID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VirtualMachineScaleSetId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1", + Expected: &VirtualMachineScaleSetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "scaleSet1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINESCALESETS/SCALESET1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VirtualMachineScaleSetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/compute/parse/virtual_machine_test.go b/azurerm/internal/services/compute/parse/virtual_machine_test.go index aab444d5f0bc..16a44fafb578 100644 --- a/azurerm/internal/services/compute/parse/virtual_machine_test.go +++ b/azurerm/internal/services/compute/parse/virtual_machine_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,77 +11,102 @@ import ( var _ resourceid.Formatter = VirtualMachineId{} func TestVirtualMachineIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewVirtualMachineId("group1", "vm1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/virtualMachines/vm1" + actual := NewVirtualMachineID("12345678-1234-9876-4563-123456789012", "resGroup1", "machine1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } func TestVirtualMachineID(t *testing.T) { - var testData = []struct { - Name string + testData := []struct { Input string + Error bool Expected *VirtualMachineId }{ + { - Name: "Empty", - Input: "", - Expected: nil, + // empty + Input: "", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing SubscriptionId + Input: "/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Error: true, }, + { - Name: "Missing Stores Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/", - Expected: nil, + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/", + Error: true, }, + { - Name: "App Configuration ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/vm1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1", Expected: &VirtualMachineId{ - Name: "vm1", - ResourceGroup: "resGroup1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "machine1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Compute/VirtualMachines/vm1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINES/MACHINE1", + Error: true, }, } + for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := VirtualMachineID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/compute/parse/vritual_machine_scale_set_test.go b/azurerm/internal/services/compute/parse/vritual_machine_scale_set_test.go deleted file mode 100644 index 4645eebaeada..000000000000 --- a/azurerm/internal/services/compute/parse/vritual_machine_scale_set_test.go +++ /dev/null @@ -1,71 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = VirtualMachineScaleSetId{} - -func TestVirtualMachineScaleSetIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewVirtualMachineScaleSetId("group1", "vmss1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestVirtualMachineScaleSetID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *VirtualMachineScaleSetId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Virtual Machine Scale Set Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Expected: nil, - }, - { - Name: "No Virtual Machine Scale Set Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualMachineScaleSets/", - Expected: nil, - }, - { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualMachineScaleSets/example", - Expected: &VirtualMachineScaleSetId{ - Name: "example", - ResourceGroup: "foo", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := VirtualMachineScaleSetID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/compute/registration.go b/azurerm/internal/services/compute/registration.go index 31c32398ac68..0a2dd816fe9b 100644 --- a/azurerm/internal/services/compute/registration.go +++ b/azurerm/internal/services/compute/registration.go @@ -26,7 +26,9 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { "azurerm_dedicated_host_group": dataSourceArmDedicatedHostGroup(), "azurerm_disk_encryption_set": dataSourceArmDiskEncryptionSet(), "azurerm_managed_disk": dataSourceArmManagedDisk(), + "azurerm_disk_access": dataSourceArmDiskAccess(), "azurerm_image": dataSourceArmImage(), + "azurerm_images": dataSourceArmImages(), "azurerm_platform_image": dataSourceArmPlatformImage(), "azurerm_proximity_placement_group": dataSourceArmProximityPlacementGroup(), "azurerm_shared_image_gallery": dataSourceArmSharedImageGallery(), @@ -48,6 +50,7 @@ func (r Registration) SupportedResources() map[string]*schema.Resource { "azurerm_disk_encryption_set": resourceArmDiskEncryptionSet(), "azurerm_image": resourceArmImage(), "azurerm_managed_disk": resourceArmManagedDisk(), + "azurerm_disk_access": resourceArmDiskAccess(), "azurerm_marketplace_agreement": resourceArmMarketplaceAgreement(), "azurerm_proximity_placement_group": resourceArmProximityPlacementGroup(), "azurerm_shared_image_gallery": resourceArmSharedImageGallery(), diff --git a/azurerm/internal/services/compute/resourceids.go b/azurerm/internal/services/compute/resourceids.go new file mode 100644 index 000000000000..c18f1d589296 --- /dev/null +++ b/azurerm/internal/services/compute/resourceids.go @@ -0,0 +1,17 @@ +package compute + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=AvailabilitySet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/set1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DedicatedHostGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DedicatedHost -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/host1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DiskEncryptionSet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/set1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Image -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ManagedDisk -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/disk1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ProximityPlacementGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/group1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SharedImage -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SharedImageGallery -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SharedImageVersion -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/version1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualMachine -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualMachineExtension -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/extension1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualMachineScaleSet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualMachineScaleSetExtension -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/extensions/extension1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DiskAccess -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskAccesses/diskAccess1 diff --git a/azurerm/internal/services/compute/shared_image_data_source.go b/azurerm/internal/services/compute/shared_image_data_source.go index ec013034cb90..e9ab92c19c45 100644 --- a/azurerm/internal/services/compute/shared_image_data_source.go +++ b/azurerm/internal/services/compute/shared_image_data_source.go @@ -99,6 +99,7 @@ func dataSourceArmSharedImage() *schema.Resource { }, } } + func dataSourceArmSharedImageRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Compute.GalleryImagesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/compute/shared_image_gallery_resource.go b/azurerm/internal/services/compute/shared_image_gallery_resource.go index 8f87afc20c27..36a637e2d88b 100644 --- a/azurerm/internal/services/compute/shared_image_gallery_resource.go +++ b/azurerm/internal/services/compute/shared_image_gallery_resource.go @@ -131,18 +131,18 @@ func resourceArmSharedImageGalleryRead(d *schema.ResourceData, meta interface{}) return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.GalleryName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Shared Image Gallery %q (Resource Group %q) was not found - removing from state", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Shared Image Gallery %q (Resource Group %q) was not found - removing from state", id.GalleryName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Shared Image Gallery %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error making Read request on Shared Image Gallery %q (Resource Group %q): %+v", id.GalleryName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.GalleryName) d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) @@ -168,19 +168,19 @@ func resourceArmSharedImageGalleryDelete(d *schema.ResourceData, meta interface{ return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.GalleryName) if err != nil { // deleted outside of Terraform if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting Shared Image Gallery %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Shared Image Gallery %q (Resource Group %q): %+v", id.GalleryName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error waiting for the deletion of Shared Image Gallery %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for the deletion of Shared Image Gallery %q (Resource Group %q): %+v", id.GalleryName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/compute/shared_image_resource.go b/azurerm/internal/services/compute/shared_image_resource.go index 1bee6e107a81..8a3628999c9c 100644 --- a/azurerm/internal/services/compute/shared_image_resource.go +++ b/azurerm/internal/services/compute/shared_image_resource.go @@ -239,19 +239,19 @@ func resourceArmSharedImageRead(d *schema.ResourceData, meta interface{}) error return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Gallery, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.GalleryName, id.ImageName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Shared Image %q (Gallery %q / Resource Group %q) was not found - removing from state", id.Name, id.Gallery, id.ResourceGroup) + log.Printf("[DEBUG] Shared Image %q (Gallery %q / Resource Group %q) was not found - removing from state", id.ImageName, id.GalleryName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Shared Image %q (Gallery %q / Resource Group %q): %+v", id.Name, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("Error making Read request on Shared Image %q (Gallery %q / Resource Group %q): %+v", id.ImageName, id.GalleryName, id.ResourceGroup, err) } - d.Set("name", id.Name) - d.Set("gallery_name", id.Gallery) + d.Set("name", id.ImageName) + d.Set("gallery_name", id.GalleryName) d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) @@ -288,27 +288,27 @@ func resourceArmSharedImageDelete(d *schema.ResourceData, meta interface{}) erro return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Gallery, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.GalleryName, id.ImageName) if err != nil { - return fmt.Errorf("deleting Shared Image %q (Gallery %q / Resource Group %q): %+v", id.Name, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("deleting Shared Image %q (Gallery %q / Resource Group %q): %+v", id.ImageName, id.GalleryName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("failed to wait for deleting Shared Image %q (Gallery %q / Resource Group %q): %+v", id.Name, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("failed to wait for deleting Shared Image %q (Gallery %q / Resource Group %q): %+v", id.ImageName, id.GalleryName, id.ResourceGroup, err) } - log.Printf("[DEBUG] Waiting for Shared Image %q (Gallery %q / Resource Group %q) to be eventually deleted", id.Name, id.Gallery, id.ResourceGroup) + log.Printf("[DEBUG] Waiting for Shared Image %q (Gallery %q / Resource Group %q) to be eventually deleted", id.ImageName, id.GalleryName, id.ResourceGroup) stateConf := &resource.StateChangeConf{ Pending: []string{"Exists"}, Target: []string{"NotFound"}, - Refresh: sharedImageDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.Gallery, id.Name), + Refresh: sharedImageDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.GalleryName, id.ImageName), MinTimeout: 10 * time.Second, ContinuousTargetOccurence: 10, Timeout: d.Timeout(schema.TimeoutDelete), } if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("failed to wait for Shared Image %q (Gallery %q / Resource Group %q) to be deleted: %+v", id.Name, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("failed to wait for Shared Image %q (Gallery %q / Resource Group %q) to be deleted: %+v", id.ImageName, id.GalleryName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/compute/shared_image_version_resource.go b/azurerm/internal/services/compute/shared_image_version_resource.go index cc51742f0f8f..9794967c9503 100644 --- a/azurerm/internal/services/compute/shared_image_version_resource.go +++ b/azurerm/internal/services/compute/shared_image_version_resource.go @@ -209,19 +209,19 @@ func resourceArmSharedImageVersionRead(d *schema.ResourceData, meta interface{}) return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Gallery, id.ImageName, id.Version, compute.ReplicationStatusTypesReplicationStatus) + resp, err := client.Get(ctx, id.ResourceGroup, id.GalleryName, id.ImageName, id.VersionName, compute.ReplicationStatusTypesReplicationStatus) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Shared Image Version %q (Image %q / Gallery %q / Resource Group %q) was not found - removing from state", id.Version, id.ImageName, id.Gallery, id.ResourceGroup) + log.Printf("[DEBUG] Shared Image Version %q (Image %q / Gallery %q / Resource Group %q) was not found - removing from state", id.VersionName, id.ImageName, id.GalleryName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error retrieving Shared Image Version %q (Image %q / Gallery %q / Resource Group %q): %+v", id.Version, id.ImageName, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Shared Image Version %q (Image %q / Gallery %q / Resource Group %q): %+v", id.VersionName, id.ImageName, id.GalleryName, id.ResourceGroup, err) } d.Set("name", resp.Name) d.Set("image_name", id.ImageName) - d.Set("gallery_name", id.Gallery) + d.Set("gallery_name", id.GalleryName) d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { @@ -263,18 +263,18 @@ func resourceArmSharedImageVersionDelete(d *schema.ResourceData, meta interface{ return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Gallery, id.ImageName, id.Version) + future, err := client.Delete(ctx, id.ResourceGroup, id.GalleryName, id.ImageName, id.VersionName) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting Shared Image Version %q (Image %q / Gallery %q / Resource Group %q): %+v", id.Version, id.ImageName, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Shared Image Version %q (Image %q / Gallery %q / Resource Group %q): %+v", id.VersionName, id.ImageName, id.GalleryName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Shared Image Version %q (Image %q / Gallery %q / Resource Group %q): %+v", id.Version, id.ImageName, id.Gallery, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Shared Image Version %q (Image %q / Gallery %q / Resource Group %q): %+v", id.VersionName, id.ImageName, id.GalleryName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/compute/shared_schema.go b/azurerm/internal/services/compute/shared_schema.go index d66c6cb357b3..ff3544e1c03e 100644 --- a/azurerm/internal/services/compute/shared_schema.go +++ b/azurerm/internal/services/compute/shared_schema.go @@ -104,7 +104,7 @@ func bootDiagnosticsSchema() *schema.Schema { // TODO: should this be `storage_account_endpoint`? "storage_account_uri": { Type: schema.TypeString, - Required: true, + Optional: true, // TODO: validation }, }, @@ -113,7 +113,7 @@ func bootDiagnosticsSchema() *schema.Schema { } func expandBootDiagnostics(input []interface{}) *compute.DiagnosticsProfile { - if len(input) == 0 || input[0] == nil { + if len(input) == 0 { return &compute.DiagnosticsProfile{ BootDiagnostics: &compute.BootDiagnostics{ Enabled: utils.Bool(false), @@ -122,6 +122,16 @@ func expandBootDiagnostics(input []interface{}) *compute.DiagnosticsProfile { } } + // this serves the managed boot diagnostics, in this case we only have this empty block without `storage_account_uri` set + if input[0] == nil { + return &compute.DiagnosticsProfile{ + BootDiagnostics: &compute.BootDiagnostics{ + Enabled: utils.Bool(true), + StorageURI: utils.String(""), + }, + } + } + raw := input[0].(map[string]interface{}) storageAccountURI := raw["storage_account_uri"].(string) @@ -337,24 +347,28 @@ func sourceImageReferenceSchema(isVirtualMachine bool) *schema.Schema { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "publisher": { - Type: schema.TypeString, - Required: true, - ForceNew: isVirtualMachine, + Type: schema.TypeString, + Required: true, + ForceNew: isVirtualMachine, + ValidateFunc: validation.StringIsNotEmpty, }, "offer": { - Type: schema.TypeString, - Required: true, - ForceNew: isVirtualMachine, + Type: schema.TypeString, + Required: true, + ForceNew: isVirtualMachine, + ValidateFunc: validation.StringIsNotEmpty, }, "sku": { - Type: schema.TypeString, - Required: true, - ForceNew: isVirtualMachine, + Type: schema.TypeString, + Required: true, + ForceNew: isVirtualMachine, + ValidateFunc: validation.StringIsNotEmpty, }, "version": { - Type: schema.TypeString, - Required: true, - ForceNew: isVirtualMachine, + Type: schema.TypeString, + Required: true, + ForceNew: isVirtualMachine, + ValidateFunc: validation.StringIsNotEmpty, }, }, }, diff --git a/azurerm/internal/services/compute/ssh_keys.go b/azurerm/internal/services/compute/ssh_keys.go index 8261190c6bd3..7df65461a473 100644 --- a/azurerm/internal/services/compute/ssh_keys.go +++ b/azurerm/internal/services/compute/ssh_keys.go @@ -1,18 +1,21 @@ package compute import ( + "bytes" "crypto/rsa" "encoding/base64" "fmt" + "log" "regexp" "strings" - "golang.org/x/crypto/ssh" - "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" + "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + "golang.org/x/crypto/ssh" ) func SSHKeysSchema(isVirtualMachine bool) *schema.Schema { @@ -23,13 +26,15 @@ func SSHKeysSchema(isVirtualMachine bool) *schema.Schema { Type: schema.TypeSet, Optional: true, ForceNew: isVirtualMachine, + Set: SSHKeySchemaHash, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "public_key": { - Type: schema.TypeString, - Required: true, - ForceNew: isVirtualMachine, - ValidateFunc: ValidateSSHKey, + Type: schema.TypeString, + Required: true, + ForceNew: isVirtualMachine, + ValidateFunc: ValidateSSHKey, + DiffSuppressFunc: SSHKeyDiffSuppress, }, "username": { @@ -155,3 +160,38 @@ func ValidateSSHKey(i interface{}, k string) (warnings []string, errors []error) return warnings, errors } + +func SSHKeyDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + oldNormalised, err := azure.NormaliseSSHKey(old) + if err != nil { + log.Printf("[DEBUG] error normalising ssh key %q: %+v", old, err) + return false + } + + newNormalised, err := azure.NormaliseSSHKey(new) + if err != nil { + log.Printf("[DEBUG] error normalising ssh key %q: %+v", new, err) + return false + } + + if *oldNormalised == *newNormalised { + return true + } + + return false +} + +func SSHKeySchemaHash(v interface{}) int { + var buf bytes.Buffer + + if m, ok := v.(map[string]interface{}); ok { + normalisedKey, err := azure.NormaliseSSHKey(m["public_key"].(string)) + if err != nil { + log.Printf("[DEBUG] error normalising ssh key %q: %+v", m["public_key"].(string), err) + } + buf.WriteString(fmt.Sprintf("%s-", *normalisedKey)) + buf.WriteString(fmt.Sprintf("%s", m["username"])) + } + + return hashcode.String(buf.String()) +} diff --git a/azurerm/internal/services/compute/tests/availability_set_resource_test.go b/azurerm/internal/services/compute/tests/availability_set_resource_test.go index fbfd16b795f2..3bfad5105d28 100644 --- a/azurerm/internal/services/compute/tests/availability_set_resource_test.go +++ b/azurerm/internal/services/compute/tests/availability_set_resource_test.go @@ -241,7 +241,6 @@ func testCheckAzureRMAvailabilitySetDestroy(s *terraform.State) error { } resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil diff --git a/azurerm/internal/services/compute/tests/dedicated_host_resource_test.go b/azurerm/internal/services/compute/tests/dedicated_host_resource_test.go index 716c42dbbf5c..5feab562e720 100644 --- a/azurerm/internal/services/compute/tests/dedicated_host_resource_test.go +++ b/azurerm/internal/services/compute/tests/dedicated_host_resource_test.go @@ -34,6 +34,25 @@ func TestAccAzureRMDedicatedHost_basic(t *testing.T) { }) } +func TestAccAzureRMDedicatedHost_basicNewSku(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dedicated_host", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDedicatedHostDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDedicatedHost_basicNewSku(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDedicatedHostExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func TestAccAzureRMDedicatedHost_autoReplaceOnFailure(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dedicated_host", "test") @@ -189,9 +208,9 @@ func testCheckAzureRMDedicatedHostExists(resourceName string) resource.TestCheck client := acceptance.AzureProvider.Meta().(*clients.Client).Compute.DedicatedHostsClient ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - if resp, err := client.Get(ctx, id.ResourceGroup, id.HostGroup, id.Name, ""); err != nil { + if resp, err := client.Get(ctx, id.ResourceGroup, id.HostGroupName, id.HostName, ""); err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Dedicated Host %q (Host Group Name %q / Resource Group %q) does not exist", id.Name, id.HostGroup, id.ResourceGroup) + return fmt.Errorf("Bad: Dedicated Host %q (Host Group Name %q / Resource Group %q) does not exist", id.HostName, id.HostGroupName, id.ResourceGroup) } return fmt.Errorf("Bad: Get on Compute.DedicatedHostsClient: %+v", err) } @@ -214,7 +233,7 @@ func testCheckAzureRMDedicatedHostDestroy(s *terraform.State) error { return err } - if resp, err := client.Get(ctx, id.ResourceGroup, id.HostGroup, id.Name, ""); err != nil { + if resp, err := client.Get(ctx, id.ResourceGroup, id.HostGroupName, id.HostName, ""); err != nil { if !utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Bad: Get on Compute.DedicatedHostsClient: %+v", err) } @@ -241,6 +260,21 @@ resource "azurerm_dedicated_host" "test" { `, template, data.RandomInteger) } +func testAccAzureRMDedicatedHost_basicNewSku(data acceptance.TestData) string { + template := testAccAzureRMDedicatedHost_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host" "test" { + name = "acctest-DH-%d" + location = azurerm_resource_group.test.location + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + sku_name = "DCSv2-Type1" + platform_fault_domain = 1 +} +`, template, data.RandomInteger) +} + func testAccAzureRMDedicatedHost_autoReplaceOnFailure(data acceptance.TestData, replace bool) string { template := testAccAzureRMDedicatedHost_template(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/compute/tests/disk_access_data_source_test.go b/azurerm/internal/services/compute/tests/disk_access_data_source_test.go new file mode 100644 index 000000000000..141318619565 --- /dev/null +++ b/azurerm/internal/services/compute/tests/disk_access_data_source_test.go @@ -0,0 +1,60 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDiskAccess_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_disk_access", "test") + + name := fmt.Sprintf("acctestdiskaccess-%d", data.RandomInteger) + resourceGroupName := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceAzureRMDiskAccessBasic(data, name, resourceGroupName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(data.ResourceName, "name", name), + resource.TestCheckResourceAttr(data.ResourceName, "resource_group_name", resourceGroupName), + resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "acctest"), + ), + }, + }, + }) +} + +func testAccDataSourceAzureRMDiskAccessBasic(data acceptance.TestData, name string, resourceGroupName string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "%s" + location = "%s" +} + +resource "azurerm_disk_access" "test" { + name = "%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "acctest" + } +} + +data "azurerm_disk_access" "test" { + name = azurerm_disk_access.test.name + resource_group_name = azurerm_resource_group.test.name +} + `, resourceGroupName, data.Locations.Primary, name) +} diff --git a/azurerm/internal/services/compute/tests/disk_access_resource_test.go b/azurerm/internal/services/compute/tests/disk_access_resource_test.go new file mode 100644 index 000000000000..cda57956eefb --- /dev/null +++ b/azurerm/internal/services/compute/tests/disk_access_resource_test.go @@ -0,0 +1,201 @@ +package tests + +import ( + "fmt" + "net/http" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +func TestAccAzureRMDiskAccess_empty(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_disk_access", "test") + var d compute.DiskAccess + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDiskAccessDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDiskAccess_empty(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDiskAccessExists(data.ResourceName, &d, true), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDiskAccess_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_disk_access", "test") + var d compute.DiskAccess + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDiskAccessDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDiskAccess_empty(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDiskAccessExists(data.ResourceName, &d, true), + ), + }, + { + Config: testAccAzureRMDiskAccess_requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_disk_access"), + }, + }, + }) +} + +func TestAccAzureRMDiskAccess_import(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_disk_access", "test") + var d compute.DiskAccess + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDiskAccessDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDiskAccess_import(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDiskAccessExists(data.ResourceName, &d, true), + ), + }, + }, + }) +} + +func testCheckAzureRMDiskAccessExists(resourceName string, d *compute.DiskAccess, shouldExist bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Compute.DiskAccessClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + daName := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for disk access: %s", daName) + } + resp, err := client.Get(ctx, resourceGroup, daName) + if err != nil { + return fmt.Errorf("Bad: Get on diskAccessClient: %v", err) + } + if resp.StatusCode == http.StatusNotFound && shouldExist { + return fmt.Errorf("Bad: DiskAccess %q (resource group %q) does not exist", daName, resourceGroup) + } + if resp.StatusCode != http.StatusNotFound && !shouldExist { + return fmt.Errorf("Bad: DiskAccess %q (resource group %q) still exists", daName, resourceGroup) + } + + *d = resp + + return nil + } +} + +func testAccAzureRMDiskAccess_empty(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_disk_access" "test" { + name = "acctestda-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "acctest" + cost-center = "ops" + } +} + + `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMDiskAccess_requiresImport(data acceptance.TestData) string { + template := testAccAzureRMDiskAccess_empty(data) + return fmt.Sprintf(` + %s + +resource "azurerm_disk_access" "import" { + name = azurerm_disk_access.test.name + location = azurerm_disk_access.test.location + resource_group_name = azurerm_disk_access.test.resource_group_name + + tags = { + environment = "acctest" + cost-center = "ops" + } +} + `, template) +} + +func testAccAzureRMDiskAccess_import(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_disk_access" "test" { + name = "accda%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + + tags = { + environment = "staging" + } +} + + + `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testCheckAzureRMDiskAccessDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Compute.DiskAccessClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_disk_access" { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Disk Access still exists: \n%#v", resp.DiskAccessProperties) + } + } + + return nil +} diff --git a/azurerm/internal/services/compute/tests/disk_encryption_set_resource_test.go b/azurerm/internal/services/compute/tests/disk_encryption_set_resource_test.go index ad9edd1823d8..df089d847d19 100644 --- a/azurerm/internal/services/compute/tests/disk_encryption_set_resource_test.go +++ b/azurerm/internal/services/compute/tests/disk_encryption_set_resource_test.go @@ -95,6 +95,41 @@ func TestAccAzureRMDiskEncryptionSet_update(t *testing.T) { }) } +func TestAccAzureRMDiskEncryptionSet_keyRotate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_disk_encryption_set", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDiskEncryptionSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDiskEncryptionSet_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDiskEncryptionSetExists(data.ResourceName), + ), + }, + data.ImportStep(), + // we have to first grant the permission for DiskEncryptionSet to access the KeyVault + { + Config: testAccAzureRMDiskEncryptionSet_grantAccessToKeyVault(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDiskEncryptionSetExists(data.ResourceName), + ), + }, + data.ImportStep(), + // after the access is granted, we can rotate the key in DiskEncryptionSet + { + Config: testAccAzureRMDiskEncryptionSet_keyRotate(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDiskEncryptionSetExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func testCheckAzureRMDiskEncryptionSetExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[resourceName] @@ -148,10 +183,6 @@ func testCheckAzureRMDiskEncryptionSetDestroy(s *terraform.State) error { } func testAccAzureRMDiskEncryptionSet_dependencies(data acceptance.TestData) string { - // whilst this is in Preview it's only supported in: West Central US, Canada Central, North Europe - // TODO: switch back to default location - location := "northeurope" - return fmt.Sprintf(` provider "azurerm" { features {} @@ -165,32 +196,33 @@ resource "azurerm_resource_group" "test" { } resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "premium" - - purge_protection_enabled = true - soft_delete_enabled = true - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + purge_protection_enabled = true + enabled_for_disk_encryption = true +} + +resource "azurerm_key_vault_access_policy" "service-principal" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + "delete", + "get", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] } resource "azurerm_key_vault_key" "test" { @@ -207,8 +239,10 @@ resource "azurerm_key_vault_key" "test" { "verify", "wrapKey", ] + + depends_on = ["azurerm_key_vault_access_policy.service-principal"] } -`, data.RandomInteger, location, data.RandomString) +`, data.RandomInteger, data.Locations.Primary, data.RandomString) } func testAccAzureRMDiskEncryptionSet_basic(data acceptance.TestData) string { @@ -268,3 +302,83 @@ resource "azurerm_disk_encryption_set" "test" { } `, template, data.RandomInteger) } + +func testAccAzureRMDiskEncryptionSet_grantAccessToKeyVault(data acceptance.TestData) string { + template := testAccAzureRMDiskEncryptionSet_dependencies(data) + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_access_policy" "disk-encryption" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "get", + "wrapkey", + "unwrapkey", + ] + + tenant_id = azurerm_disk_encryption_set.test.identity.0.tenant_id + object_id = azurerm_disk_encryption_set.test.identity.0.principal_id +} + +resource "azurerm_disk_encryption_set" "test" { + name = "acctestDES-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + key_vault_key_id = azurerm_key_vault_key.test.id + + identity { + type = "SystemAssigned" + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMDiskEncryptionSet_keyRotate(data acceptance.TestData) string { + template := testAccAzureRMDiskEncryptionSet_dependencies(data) + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_key" "new" { + name = "newKey" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] + + depends_on = ["azurerm_key_vault_access_policy.service-principal"] +} + +resource "azurerm_key_vault_access_policy" "disk-encryption" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "get", + "wrapkey", + "unwrapkey", + ] + + tenant_id = azurerm_disk_encryption_set.test.identity.0.tenant_id + object_id = azurerm_disk_encryption_set.test.identity.0.principal_id +} + +resource "azurerm_disk_encryption_set" "test" { + name = "acctestDES-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + key_vault_key_id = azurerm_key_vault_key.new.id + + identity { + type = "SystemAssigned" + } +} +`, template, data.RandomInteger) +} diff --git a/azurerm/internal/services/compute/tests/image_resource_test.go b/azurerm/internal/services/compute/tests/image_resource_test.go index deea116207b7..4bef5034e409 100644 --- a/azurerm/internal/services/compute/tests/image_resource_test.go +++ b/azurerm/internal/services/compute/tests/image_resource_test.go @@ -485,7 +485,6 @@ func testCheckAzureRMImageDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/compute/tests/images_data_source_test.go b/azurerm/internal/services/compute/tests/images_data_source_test.go new file mode 100644 index 000000000000..555db0c24910 --- /dev/null +++ b/azurerm/internal/services/compute/tests/images_data_source_test.go @@ -0,0 +1,165 @@ +package tests + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMImages_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_images", "test") + + resourceGroup := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + userName := "testadmin" + password := "Password1234!" + hostName := fmt.Sprintf("tftestcustomimagesrc%d", data.RandomInteger) + sshPort := "22" + storageType := "LRS" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMImageDestroy, + Steps: []resource.TestStep{ + { + // need to create a vm and then reference it in the image creation + Config: testAccAzureRMImage_standaloneImage_setup(data, userName, password, hostName, storageType), + Check: resource.ComposeTestCheckFunc( + testCheckAzureVMExists("azurerm_virtual_machine.testsource", true), + testGeneralizeVMImage(resourceGroup, "testsource", userName, password, hostName, sshPort, data.Locations.Primary), + ), + }, + { + Config: testAccAzureRMImage_standaloneImage_provision(data, userName, password, hostName, storageType, ""), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMImageExists("azurerm_image.test", true), + ), + }, + { + Config: testAccDataSourceImages_basic(data, userName, password, hostName, storageType), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(data.ResourceName, "images.#", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "images.0.os_disk.0.os_type", "Linux"), + ), + }, + }, + }) +} + +func TestAccDataSourceAzureRMImages_tagsFilterError(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_images", "test") + + resourceGroup := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + userName := "testadmin" + password := "Password1234!" + hostName := fmt.Sprintf("tftestcustomimagesrc%d", data.RandomInteger) + sshPort := "22" + storageType := "LRS" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMImageDestroy, + Steps: []resource.TestStep{ + { + // need to create a vm and then reference it in the image creation + Config: testAccAzureRMImage_standaloneImage_setup(data, userName, password, hostName, storageType), + Check: resource.ComposeTestCheckFunc( + testCheckAzureVMExists("azurerm_virtual_machine.testsource", true), + testGeneralizeVMImage(resourceGroup, "testsource", userName, password, hostName, sshPort, data.Locations.Primary), + ), + }, + { + Config: testAccAzureRMImage_standaloneImage_provision(data, userName, password, hostName, storageType, ""), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMImageExists("azurerm_image.test", true), + ), + }, + { + Config: testAccDataSourceImages_tagsFilterError(data, userName, password, hostName, storageType), + ExpectError: regexp.MustCompile("no images were found that match the specified tags"), + }, + }, + }) +} + +func TestAccDataSourceAzureRMImages_tagsFilter(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_images", "test") + + resourceGroup := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + userName := "testadmin" + password := "Password1234!" + hostName := fmt.Sprintf("tftestcustomimagesrc%d", data.RandomInteger) + sshPort := "22" + storageType := "LRS" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMImageDestroy, + Steps: []resource.TestStep{ + { + // need to create a vm and then reference it in the image creation + Config: testAccAzureRMImage_standaloneImage_setup(data, userName, password, hostName, storageType), + Check: resource.ComposeTestCheckFunc( + testCheckAzureVMExists("azurerm_virtual_machine.testsource", true), + testGeneralizeVMImage(resourceGroup, "testsource", userName, password, hostName, sshPort, data.Locations.Primary), + ), + }, + { + Config: testAccAzureRMImage_standaloneImage_provision(data, userName, password, hostName, storageType, ""), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMImageExists("azurerm_image.test", true), + ), + }, + { + Config: testAccDataSourceImages_tagsFilter(data, userName, password, hostName, storageType), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(data.ResourceName, "images.#", "1"), + ), + }, + }, + }) +} + +func testAccDataSourceImages_basic(data acceptance.TestData, userName, password, hostName, storageType string) string { + template := testAccAzureRMImage_standaloneImage_provision(data, userName, password, hostName, storageType, "") + return fmt.Sprintf(` +%s + +data "azurerm_images" "test" { + resource_group_name = azurerm_image.test.resource_group_name +} +`, template) +} + +func testAccDataSourceImages_tagsFilterError(data acceptance.TestData, userName, password, hostName, storageType string) string { + template := testAccAzureRMImage_standaloneImage_provision(data, userName, password, hostName, storageType, "") + return fmt.Sprintf(` +%s + +data "azurerm_images" "test" { + resource_group_name = azurerm_image.test.resource_group_name + tags_filter = { + environment = "error" + } +} +`, template) +} + +func testAccDataSourceImages_tagsFilter(data acceptance.TestData, userName, password, hostName, storageType string) string { + template := testAccAzureRMImage_standaloneImage_provision(data, userName, password, hostName, storageType, "") + return fmt.Sprintf(` +%s + +data "azurerm_images" "test" { + resource_group_name = azurerm_image.test.resource_group_name + tags_filter = { + environment = "Dev" + } +} +`, template) +} diff --git a/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_other_test.go b/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_other_test.go index 0a1de5d0a2e6..ab38f9e832db 100644 --- a/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_other_test.go +++ b/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_other_test.go @@ -105,6 +105,62 @@ func TestAccLinuxVirtualMachine_otherAllowExtensionOperationsUpdatedWithoutVmAge }) } +func TestAccLinuxVirtualMachine_otherExtensionsTimeBudget(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkLinuxVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testLinuxVirtualMachine_otherExtensionsTimeBudget(data, "PT30M"), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT30M"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccLinuxVirtualMachine_otherExtensionsTimeBudgetUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkLinuxVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testLinuxVirtualMachine_otherExtensionsTimeBudget(data, "PT30M"), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT30M"), + ), + }, + data.ImportStep(), + { + Config: testLinuxVirtualMachine_otherExtensionsTimeBudget(data, "PT50M"), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT50M"), + ), + }, + data.ImportStep(), + { + Config: testLinuxVirtualMachine_otherExtensionsTimeBudget(data, "PT30M"), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT30M"), + ), + }, + data.ImportStep(), + }, + }) +} + func TestAccLinuxVirtualMachine_otherBootDiagnostics(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") @@ -141,6 +197,42 @@ func TestAccLinuxVirtualMachine_otherBootDiagnostics(t *testing.T) { }) } +func TestAccLinuxVirtualMachine_otherBootDiagnosticsManaged(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkLinuxVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + // Enabled + Config: testLinuxVirtualMachine_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + // Disabled + Config: testLinuxVirtualMachine_otherBootDiagnosticsDisabled(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + // Enabled + Config: testLinuxVirtualMachine_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func TestAccLinuxVirtualMachine_otherComputerNameDefault(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") @@ -521,6 +613,42 @@ func TestAccLinuxVirtualMachine_otherEncryptionAtHostEnabledWithCMK(t *testing.T }) } +func TestAccLinuxVirtualMachine_otherGracefulShutdownDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkLinuxVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testLinuxVirtualMachine_otherGracefulShutdown(data, false), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + }, + }) +} + +func TestAccLinuxVirtualMachine_otherGracefulShutdownEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkLinuxVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testLinuxVirtualMachine_otherGracefulShutdown(data, true), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + }, + }) +} + func testLinuxVirtualMachine_otherAllowExtensionOperationsDefault(data acceptance.TestData) string { template := testLinuxVirtualMachine_template(data) return fmt.Sprintf(` @@ -666,6 +794,43 @@ resource "azurerm_linux_virtual_machine" "test" { `, template, data.RandomInteger) } +func testLinuxVirtualMachine_otherExtensionsTimeBudget(data acceptance.TestData, duration string) string { + template := testLinuxVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctestVM-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + admin_ssh_key { + username = "adminuser" + public_key = local.first_public_key + } + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + extensions_time_budget = "%s" +} +`, template, data.RandomInteger, duration) +} + func testLinuxVirtualMachine_otherBootDiagnostics(data acceptance.TestData) string { template := testLinuxVirtualMachine_otherBootDiagnosticsTemplate(data) return fmt.Sprintf(` @@ -705,6 +870,43 @@ resource "azurerm_linux_virtual_machine" "test" { `, template, data.RandomInteger) } +func testLinuxVirtualMachine_otherBootDiagnosticsManaged(data acceptance.TestData) string { + template := testLinuxVirtualMachine_otherBootDiagnosticsTemplate(data) + return fmt.Sprintf(` +%s + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctestVM-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + admin_ssh_key { + username = "adminuser" + public_key = local.first_public_key + } + + boot_diagnostics {} + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } +} +`, template, data.RandomInteger) +} + func testLinuxVirtualMachine_otherBootDiagnosticsDisabled(data acceptance.TestData) string { template := testLinuxVirtualMachine_otherBootDiagnosticsTemplate(data) return fmt.Sprintf(` @@ -1573,3 +1775,78 @@ resource "azurerm_linux_virtual_machine" "test" { } `, template, data.RandomInteger, enabled) } + +func testLinuxVirtualMachine_otherGracefulShutdown(data acceptance.TestData, gracefulShutdown bool) string { + return fmt.Sprintf(` +locals { + first_public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC+wWK73dCr+jgQOAxNsHAnNNNMEMWOHYEccp6wJm2gotpr9katuF/ZAdou5AaW1C61slRkHRkpRRX9FA9CYBiitZgvCCz+3nWNN7l/Up54Zps/pHWGZLHNJZRYyAB6j5yVLMVHIHriY49d/GZTZVNB8GoJv9Gakwc/fuEZYYl4YDFiGMBP///TzlI4jhiJzjKnEvqPFki5p2ZRJqcbCiF4pJrxUQR/RXqVFQdbRLZgYfJ8xGB878RENq3yQ39d8dVOkq4edbkzwcUmwwwkYVPIoDGsYLaRHnG+To7FvMeyO7xDVQkMKzopTQV8AuKpyvpqu0a9pWOMaiCyDytO7GGN you@me.com" +} + +provider "azurerm" { + features { + virtual_machine { + graceful_shutdown = %t + } + } +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestnw-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "internal" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_network_interface" "test" { + name = "acctestnic-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "internal" + subnet_id = azurerm_subnet.test.id + private_ip_address_allocation = "Dynamic" + } +} + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctestVM-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + admin_ssh_key { + username = "adminuser" + public_key = local.first_public_key + } + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } +} +`, gracefulShutdown, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_scaling_test.go b/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_scaling_test.go index 7122b52d3fa6..39c2973f60a5 100644 --- a/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_scaling_test.go +++ b/azurerm/internal/services/compute/tests/linux_virtual_machine_resource_scaling_test.go @@ -66,6 +66,46 @@ func TestAccLinuxVirtualMachine_scalingDedicatedHost(t *testing.T) { }) } +func TestAccLinuxVirtualMachine_scalingDedicatedHostUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkLinuxVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testLinuxVirtualMachine_scalingDedicatedHostInitial(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testLinuxVirtualMachine_scalingDedicatedHost(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testLinuxVirtualMachine_scalingDedicatedHostUpdate(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testLinuxVirtualMachine_scalingDedicatedHostRemoved(data), + Check: resource.ComposeTestCheckFunc( + checkLinuxVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func TestAccLinuxVirtualMachine_scalingProximityPlacementGroup(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine", "test") @@ -220,6 +260,48 @@ resource "azurerm_linux_virtual_machine" "test" { `, template, data.RandomInteger, data.RandomInteger) } +func testLinuxVirtualMachine_scalingDedicatedHostInitial(data acceptance.TestData) string { + template := testLinuxVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host_group" "test" { + name = "acctestDHG-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + platform_fault_domain_count = 2 +} + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctestVM-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_D2s_v3" # NOTE: SKU's are limited by the Dedicated Host + admin_username = "adminuser" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + admin_ssh_key { + username = "adminuser" + public_key = local.first_public_key + } + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } +} +`, template, data.RandomInteger, data.RandomInteger) +} + func testLinuxVirtualMachine_scalingDedicatedHost(data acceptance.TestData) string { template := testLinuxVirtualMachine_template(data) return fmt.Sprintf(` @@ -271,6 +353,115 @@ resource "azurerm_linux_virtual_machine" "test" { `, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) } +func testLinuxVirtualMachine_scalingDedicatedHostUpdate(data acceptance.TestData) string { + template := testLinuxVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host_group" "test" { + name = "acctestDHG-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + platform_fault_domain_count = 2 +} + +resource "azurerm_dedicated_host" "test" { + name = "acctestDH-%d" + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + location = azurerm_resource_group.test.location + sku_name = "DSv3-Type1" + platform_fault_domain = 1 +} + +resource "azurerm_dedicated_host" "second" { + name = "acctestDH2-%d" + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + location = azurerm_resource_group.test.location + sku_name = "DSv3-Type1" + platform_fault_domain = 1 +} + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctestVM-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_D2s_v3" # NOTE: SKU's are limited by the Dedicated Host + admin_username = "adminuser" + dedicated_host_id = azurerm_dedicated_host.second.id + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + admin_ssh_key { + username = "adminuser" + public_key = local.first_public_key + } + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testLinuxVirtualMachine_scalingDedicatedHostRemoved(data acceptance.TestData) string { + template := testLinuxVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host_group" "test" { + name = "acctestDHG-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + platform_fault_domain_count = 2 +} + +resource "azurerm_dedicated_host" "second" { + name = "acctestDH2-%d" + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + location = azurerm_resource_group.test.location + sku_name = "DSv3-Type1" + platform_fault_domain = 1 +} + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctestVM-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_D2s_v3" # NOTE: SKU's are limited by the Dedicated Host + admin_username = "adminuser" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + admin_ssh_key { + username = "adminuser" + public_key = local.first_public_key + } + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + func testLinuxVirtualMachine_scalingProximityPlacementGroup(data acceptance.TestData) string { template := testLinuxVirtualMachine_template(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_data_resource_test.go b/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_data_resource_test.go index 9fec76f33a27..d804f74c84aa 100644 --- a/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_data_resource_test.go +++ b/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_data_resource_test.go @@ -45,7 +45,8 @@ func TestAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskCaching(t *testing.T }, data.ImportStep( "admin_password", - ), { + ), + { Config: testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskCaching(data, "ReadOnly"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), @@ -308,6 +309,72 @@ func TestAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUl }) } +func TestAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") + // Are supported in East US 2, SouthEast Asia, and North Europe, in two availability zones per region + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMLinuxVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPS(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithMBPS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") + // Are supported in East US 2, SouthEast Asia, and North Europe, in two availability zones per region + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMLinuxVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithMBPS(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPSAndMBPS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") + // Are supported in East US 2, SouthEast Asia, and North Europe, in two availability zones per region + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMLinuxVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPSAndMBPS(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskWriteAcceleratorEnabled(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") @@ -804,6 +871,172 @@ resource "azurerm_linux_virtual_machine_scale_set" "test" { `, template, data.RandomInteger) } +func testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPS(data acceptance.TestData) string { + template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_linux_virtual_machine_scale_set" "test" { + name = "acctestvmss-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_D2s_v3" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + zones = [1, 2, 3] + + disable_password_authentication = false + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + data_disk { + storage_account_type = "UltraSSD_LRS" + caching = "None" + disk_size_gb = 10 + lun = 10 + disk_iops_read_write = 101 + } + + additional_capabilities { + ultra_ssd_enabled = true + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithMBPS(data acceptance.TestData) string { + template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_linux_virtual_machine_scale_set" "test" { + name = "acctestvmss-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_D2s_v3" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + zones = [1, 2, 3] + + disable_password_authentication = false + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + data_disk { + storage_account_type = "UltraSSD_LRS" + caching = "None" + disk_size_gb = 10 + lun = 10 + disk_mbps_read_write = 11 + } + + additional_capabilities { + ultra_ssd_enabled = true + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPSAndMBPS(data acceptance.TestData) string { + template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_linux_virtual_machine_scale_set" "test" { + name = "acctestvmss-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_D2s_v3" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + zones = [1, 2, 3] + + disable_password_authentication = false + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + data_disk { + storage_account_type = "UltraSSD_LRS" + caching = "None" + disk_size_gb = 10 + lun = 10 + disk_iops_read_write = 101 + disk_mbps_read_write = 11 + } + + additional_capabilities { + ultra_ssd_enabled = true + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } +} +`, template, data.RandomInteger) +} + func testAccAzureRMLinuxVirtualMachineScaleSet_disksDataDiskWriteAcceleratorEnabled(data acceptance.TestData) string { template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_os_resource_test.go b/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_os_resource_test.go index a6e3a70329dc..2471b0cb0b3c 100644 --- a/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_os_resource_test.go +++ b/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_disk_os_resource_test.go @@ -24,7 +24,8 @@ func TestAccAzureRMLinuxVirtualMachineScaleSet_disksOSDiskCaching(t *testing.T) }, data.ImportStep( "admin_password", - ), { + ), + { Config: testAccAzureRMLinuxVirtualMachineScaleSet_disksOSDiskCaching(data, "ReadOnly"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), @@ -32,7 +33,8 @@ func TestAccAzureRMLinuxVirtualMachineScaleSet_disksOSDiskCaching(t *testing.T) }, data.ImportStep( "admin_password", - ), { + ), + { Config: testAccAzureRMLinuxVirtualMachineScaleSet_disksOSDiskCaching(data, "ReadWrite"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), diff --git a/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_other_resource_test.go b/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_other_resource_test.go index ba003fc2ba80..73af37a79afd 100644 --- a/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_other_resource_test.go +++ b/azurerm/internal/services/compute/tests/linux_virtual_machine_scale_set_other_resource_test.go @@ -51,6 +51,48 @@ func TestAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnostics(t *testing.T }) } +func TestAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnosticsManaged(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMLinuxVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + // Enabled + Config: testAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + // Removed + Config: testAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnosticsDisabled(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + // Enabled + Config: testAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccAzureRMLinuxVirtualMachineScaleSet_otherComputerNamePrefix(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") @@ -781,6 +823,26 @@ func TestAccAzureRMLinuxVirtualMachineScaleSet_otherEncryptionAtHostWithCMK(t *t }) } +func TestAccAzureRMLinuxVirtualMachineScaleSet_otherPlatformFaultDomainCount(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMLinuxVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMLinuxVirtualMachineScaleSet_otherPlatformFaultDomainCount(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLinuxVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + // TODO - extension should be changed to extension.0.protected_settings when either binary testing is available or this feature is promoted from beta + data.ImportStep("admin_password", "extension"), + }, + }) +} + func testAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnostics(data acceptance.TestData) string { template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) return fmt.Sprintf(` @@ -835,6 +897,58 @@ resource "azurerm_linux_virtual_machine_scale_set" "test" { `, template, data.RandomString, data.RandomInteger) } +func testAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnosticsManaged(data acceptance.TestData) string { + template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "test" { + name = "accsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_linux_virtual_machine_scale_set" "test" { + name = "acctestvmss-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_F2" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + + disable_password_authentication = false + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } + + boot_diagnostics {} +} +`, template, data.RandomString, data.RandomInteger) +} + func testAccAzureRMLinuxVirtualMachineScaleSet_otherBootDiagnosticsDisabled(data acceptance.TestData) string { template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) return fmt.Sprintf(` @@ -2564,3 +2678,47 @@ resource "azurerm_linux_virtual_machine_scale_set" "test" { } `, template, data.RandomInteger, enabled) } + +func testAccAzureRMLinuxVirtualMachineScaleSet_otherPlatformFaultDomainCount(data acceptance.TestData) string { + template := testAccAzureRMLinuxVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_linux_virtual_machine_scale_set" "test" { + name = "acctestvmss-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_F2" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + + disable_password_authentication = false + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } + + platform_fault_domain_count = 3 +} +`, template, data.RandomInteger) +} diff --git a/azurerm/internal/services/compute/tests/managed_disk_resource_test.go b/azurerm/internal/services/compute/tests/managed_disk_resource_test.go index 862b2b15853c..79cb0ad959b8 100644 --- a/azurerm/internal/services/compute/tests/managed_disk_resource_test.go +++ b/azurerm/internal/services/compute/tests/managed_disk_resource_test.go @@ -446,7 +446,6 @@ func testCheckAzureRMManagedDiskDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/compute/tests/proximity_placement_group_resource_test.go b/azurerm/internal/services/compute/tests/proximity_placement_group_resource_test.go index 79eebabe24d3..ae7382a4baf0 100644 --- a/azurerm/internal/services/compute/tests/proximity_placement_group_resource_test.go +++ b/azurerm/internal/services/compute/tests/proximity_placement_group_resource_test.go @@ -174,7 +174,6 @@ func testCheckAzureRMProximityPlacementGroupDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil diff --git a/azurerm/internal/services/compute/tests/shared_image_gallery_resource_test.go b/azurerm/internal/services/compute/tests/shared_image_gallery_resource_test.go index fd426c3024cc..4f362a0ff5aa 100644 --- a/azurerm/internal/services/compute/tests/shared_image_gallery_resource_test.go +++ b/azurerm/internal/services/compute/tests/shared_image_gallery_resource_test.go @@ -31,6 +31,7 @@ func TestAccAzureRMSharedImageGallery_basic(t *testing.T) { }, }) } + func TestAccAzureRMSharedImageGallery_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_shared_image_gallery", "test") diff --git a/azurerm/internal/services/compute/tests/snapshot_resource_test.go b/azurerm/internal/services/compute/tests/snapshot_resource_test.go index 1a514f48d134..5599bd302876 100644 --- a/azurerm/internal/services/compute/tests/snapshot_resource_test.go +++ b/azurerm/internal/services/compute/tests/snapshot_resource_test.go @@ -61,6 +61,7 @@ func TestSnapshotName_validation(t *testing.T) { } } } + func TestAccAzureRMSnapshot_fromManagedDisk(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_snapshot", "test") @@ -79,6 +80,7 @@ func TestAccAzureRMSnapshot_fromManagedDisk(t *testing.T) { }, }) } + func TestAccAzureRMSnapshot_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_snapshot", "test") diff --git a/azurerm/internal/services/compute/tests/virtual_machine_extension_resource_test.go b/azurerm/internal/services/compute/tests/virtual_machine_extension_resource_test.go index 7ab22ff77f84..01dd8aa3f157 100644 --- a/azurerm/internal/services/compute/tests/virtual_machine_extension_resource_test.go +++ b/azurerm/internal/services/compute/tests/virtual_machine_extension_resource_test.go @@ -119,9 +119,9 @@ func testCheckAzureRMVirtualMachineExtensionExists(resourceName string) resource return err } - if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachine, id.Name, ""); err != nil { + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineName, id.ExtensionName, ""); err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: VirtualMachine Extension %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) + return fmt.Errorf("Bad: VirtualMachine Extension %q (resource group: %q) does not exist", id.ExtensionName, id.ResourceGroup) } return fmt.Errorf("Bad: Get on vmExtensionClient: %s", err) } @@ -144,7 +144,7 @@ func testCheckAzureRMVirtualMachineExtensionDestroy(s *terraform.State) error { return err } - if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachine, id.Name, ""); err != nil { + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineName, id.ExtensionName, ""); err != nil { if !utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Bad: Get on Compute.VMExtensionClient: %+v", err) } diff --git a/azurerm/internal/services/compute/tests/virtual_machine_resource_test.go b/azurerm/internal/services/compute/tests/virtual_machine_resource_test.go index cdf4417ac73d..ad3aa1238af4 100644 --- a/azurerm/internal/services/compute/tests/virtual_machine_resource_test.go +++ b/azurerm/internal/services/compute/tests/virtual_machine_resource_test.go @@ -169,7 +169,6 @@ func testCheckAzureRMVirtualMachineDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { if resp.StatusCode == http.StatusNotFound { return nil diff --git a/azurerm/internal/services/compute/tests/virtual_machine_scale_set_extension_resource_test.go b/azurerm/internal/services/compute/tests/virtual_machine_scale_set_extension_resource_test.go index 156e83614140..bab402b6e4e3 100644 --- a/azurerm/internal/services/compute/tests/virtual_machine_scale_set_extension_resource_test.go +++ b/azurerm/internal/services/compute/tests/virtual_machine_scale_set_extension_resource_test.go @@ -213,10 +213,10 @@ func testCheckAzureRMVirtualMachineScaleSetExtensionExists(resourceName string) return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.Name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.ExtensionName, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Extension %q (VirtualMachineScaleSet %q / Resource Group: %q) does not exist", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup) + return fmt.Errorf("Bad: Extension %q (VirtualMachineScaleSet %q / Resource Group: %q) does not exist", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup) } return fmt.Errorf("Bad: Get on vmScaleSetClient: %+v", err) } @@ -239,7 +239,7 @@ func testCheckAzureRMVirtualMachineScaleSetExtensionDestroy(s *terraform.State) return err } - if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.Name, ""); err != nil { + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.ExtensionName, ""); err != nil { if !utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Bad: Get on Compute.VMScaleSetExtensionsClient: %+v", err) } diff --git a/azurerm/internal/services/compute/tests/virtual_machine_scale_set_resource_test.go b/azurerm/internal/services/compute/tests/virtual_machine_scale_set_resource_test.go index b4826fba599d..2bc1d8121556 100644 --- a/azurerm/internal/services/compute/tests/virtual_machine_scale_set_resource_test.go +++ b/azurerm/internal/services/compute/tests/virtual_machine_scale_set_resource_test.go @@ -1105,7 +1105,6 @@ func testCheckAzureRMVirtualMachineScaleSetDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/compute/tests/virtual_machine_unmanaged_disks_resource_test.go b/azurerm/internal/services/compute/tests/virtual_machine_unmanaged_disks_resource_test.go index e5540b0e1d14..515a99e14a44 100644 --- a/azurerm/internal/services/compute/tests/virtual_machine_unmanaged_disks_resource_test.go +++ b/azurerm/internal/services/compute/tests/virtual_machine_unmanaged_disks_resource_test.go @@ -12,7 +12,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" - "github.com/tombuildsstuff/giovanni/storage/2018-11-09/blob/blobs" + "github.com/tombuildsstuff/giovanni/storage/2019-12-12/blob/blobs" ) func TestAccAzureRMVirtualMachine_basicLinuxMachine(t *testing.T) { diff --git a/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_other_test.go b/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_other_test.go index 804130948a07..601145e5f134 100644 --- a/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_other_test.go +++ b/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_other_test.go @@ -9,6 +9,108 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" ) +func TestAccWindowsVirtualMachine_otherPatchModeManual(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherPatchModeManual(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccWindowsVirtualMachine_otherPatchModeAutomaticByOS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherPatchModeAutomaticByOS(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccWindowsVirtualMachine_otherPatchModeAutomaticByPlatform(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherPatchModeAutomaticByPlatform(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccWindowsVirtualMachine_otherPatchModeUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherPatchModeAutomaticByOS(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_otherPatchModeAutomaticByPlatform(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_otherPatchModeManual(data), // this update requires force replacement actually + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccWindowsVirtualMachine_otherAdditionalUnattendContent(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") @@ -139,6 +241,70 @@ func TestAccWindowsVirtualMachine_otherAllowExtensionOperationsUpdatedWithoutVmA }) } +func TestAccWindowsVirtualMachine_otherExtensionsTimeBudget(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherExtensionsTimeBudget(data, "PT30M"), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT30M"), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccWindowsVirtualMachine_otherExtensionsTimeBudgetUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherExtensionsTimeBudget(data, "PT30M"), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT30M"), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_otherExtensionsTimeBudget(data, "PT50M"), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT50M"), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_otherExtensionsTimeBudget(data, "PT30M"), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "extensions_time_budget", "PT30M"), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccWindowsVirtualMachine_otherBootDiagnostics(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") @@ -181,6 +347,48 @@ func TestAccWindowsVirtualMachine_otherBootDiagnostics(t *testing.T) { }) } +func TestAccWindowsVirtualMachine_otherBootDiagnosticsManaged(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + // Enabled + Config: testWindowsVirtualMachine_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + // Disabled + Config: testWindowsVirtualMachine_otherBootDiagnosticsDisabled(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + // Enabled + Config: testWindowsVirtualMachine_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccWindowsVirtualMachine_otherComputerNameDefault(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") @@ -813,6 +1021,151 @@ func TestAccWindowsVirtualMachine_otherEncryptionAtHostEnabledWithCMK(t *testing }) } +func testWindowsVirtualMachine_otherPatchModeManual(data acceptance.TestData) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } + + enable_automatic_updates = false + patch_mode = "Manual" +} +`, template) +} + +func testWindowsVirtualMachine_otherPatchModeAutomaticByOS(data acceptance.TestData) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } + + patch_mode = "AutomaticByOS" +} +`, template) +} + +func testWindowsVirtualMachine_otherPatchModeAutomaticByPlatform(data acceptance.TestData) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } + + patch_mode = "AutomaticByPlatform" +} +`, template) +} + +func TestAccWindowsVirtualMachine_otherGracefulShutdownDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherGracefulShutdown(data, false), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccWindowsVirtualMachine_otherGracefulShutdownEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_otherGracefulShutdown(data, true), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func testWindowsVirtualMachine_otherAdditionalUnattendContent(data acceptance.TestData) string { template := testWindowsVirtualMachine_template(data) return fmt.Sprintf(` @@ -978,6 +1331,39 @@ resource "azurerm_windows_virtual_machine" "test" { `, template) } +func testWindowsVirtualMachine_otherExtensionsTimeBudget(data acceptance.TestData, duration string) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } + + extensions_time_budget = "%s" +} +`, template, duration) +} + func testWindowsVirtualMachine_otherBootDiagnostics(data acceptance.TestData) string { template := testWindowsVirtualMachine_otherBootDiagnosticsTemplate(data) return fmt.Sprintf(` @@ -1013,6 +1399,39 @@ resource "azurerm_windows_virtual_machine" "test" { `, template) } +func testWindowsVirtualMachine_otherBootDiagnosticsManaged(data acceptance.TestData) string { + template := testWindowsVirtualMachine_otherBootDiagnosticsTemplate(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + boot_diagnostics {} + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } +} +`, template) +} + func testWindowsVirtualMachine_otherBootDiagnosticsDisabled(data acceptance.TestData) string { template := testWindowsVirtualMachine_otherBootDiagnosticsTemplate(data) return fmt.Sprintf(` @@ -2190,3 +2609,74 @@ resource "azurerm_windows_virtual_machine" "test" { } `, template, enabled) } + +func testWindowsVirtualMachine_otherGracefulShutdown(data acceptance.TestData, gracefulShutdown bool) string { + return fmt.Sprintf(` +locals { + vm_name = "acctestvm%s" +} + +provider "azurerm" { + features { + virtual_machine { + graceful_shutdown = %t + } + } +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestnw-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "internal" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_network_interface" "test" { + name = "acctestnic-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "internal" + subnet_id = azurerm_subnet.test.id + private_ip_address_allocation = "Dynamic" + } +} + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } +} +`, data.RandomString, gracefulShutdown, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_scaling_test.go b/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_scaling_test.go index d90fb3b16a0a..ba0fe139bde4 100644 --- a/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_scaling_test.go +++ b/azurerm/internal/services/compute/tests/windows_virtual_machine_resource_scaling_test.go @@ -72,6 +72,54 @@ func TestAccWindowsVirtualMachine_scalingDedicatedHost(t *testing.T) { }) } +func TestAccWindowsVirtualMachine_scalingDedicatedHostUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: checkWindowsVirtualMachineIsDestroyed, + Steps: []resource.TestStep{ + { + Config: testWindowsVirtualMachine_scalingDedicatedHostInitial(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_scalingDedicatedHost(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_scalingDedicatedHostUpdate(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + Config: testWindowsVirtualMachine_scalingDedicatedHostRemoved(data), + Check: resource.ComposeTestCheckFunc( + checkWindowsVirtualMachineExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccWindowsVirtualMachine_scalingProximityPlacementGroup(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine", "test") @@ -228,6 +276,44 @@ resource "azurerm_windows_virtual_machine" "test" { `, template, data.RandomInteger) } +func testWindowsVirtualMachine_scalingDedicatedHostInitial(data acceptance.TestData) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host_group" "test" { + name = "acctestDHG-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + platform_fault_domain_count = 2 +} + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_D2s_v3" # NOTE: SKU's are limited by the Dedicated Host + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } +} +`, template, data.RandomInteger) +} + func testWindowsVirtualMachine_scalingDedicatedHost(data acceptance.TestData) string { template := testWindowsVirtualMachine_template(data) return fmt.Sprintf(` @@ -275,6 +361,108 @@ resource "azurerm_windows_virtual_machine" "test" { `, template, data.RandomInteger, data.RandomInteger) } +func testWindowsVirtualMachine_scalingDedicatedHostUpdate(data acceptance.TestData) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host_group" "test" { + name = "acctestDHG-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + platform_fault_domain_count = 2 +} + +resource "azurerm_dedicated_host" "test" { + name = "acctestDH-%d" + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + location = azurerm_resource_group.test.location + sku_name = "DSv3-Type1" + platform_fault_domain = 1 +} + +resource "azurerm_dedicated_host" "second" { + name = "acctestDH2-%d" + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + location = azurerm_resource_group.test.location + sku_name = "DSv3-Type1" + platform_fault_domain = 1 +} + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_D2s_v3" # NOTE: SKU's are limited by the Dedicated Host + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + dedicated_host_id = azurerm_dedicated_host.second.id + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testWindowsVirtualMachine_scalingDedicatedHostRemoved(data acceptance.TestData) string { + template := testWindowsVirtualMachine_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_host_group" "test" { + name = "acctestDHG-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + platform_fault_domain_count = 2 +} + +resource "azurerm_dedicated_host" "second" { + name = "acctestDH2-%d" + dedicated_host_group_id = azurerm_dedicated_host_group.test.id + location = azurerm_resource_group.test.location + sku_name = "DSv3-Type1" + platform_fault_domain = 1 +} + +resource "azurerm_windows_virtual_machine" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_D2s_v3" # NOTE: SKU's are limited by the Dedicated Host + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + dedicated_host_id = azurerm_dedicated_host.second.id + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2016-Datacenter" + version = "latest" + } +} +`, template, data.RandomInteger, data.RandomInteger) +} + func testWindowsVirtualMachine_scalingProximityPlacementGroup(data acceptance.TestData) string { template := testWindowsVirtualMachine_template(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_data_resource_test.go b/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_data_resource_test.go index d2d9c00827e6..3f3657f0883a 100644 --- a/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_data_resource_test.go +++ b/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_data_resource_test.go @@ -45,7 +45,8 @@ func TestAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskCaching(t *testing }, data.ImportStep( "admin_password", - ), { + ), + { Config: testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskCaching(data, "ReadOnly"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), @@ -308,6 +309,72 @@ func TestAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountType }) } +func TestAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") + + // Are supported in East US 2, SouthEast Asia, and North Europe, in two availability zones per region + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMWindowsVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPS(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithMBPS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") + + // Are supported in East US 2, SouthEast Asia, and North Europe, in two availability zones per region + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMWindowsVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithMBPS(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + +func TestAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPSAndMBPS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") + + // Are supported in East US 2, SouthEast Asia, and North Europe, in two availability zones per region + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMWindowsVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPSAndMBPS(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskWriteAcceleratorEnabled(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") @@ -597,6 +664,8 @@ resource "azurerm_windows_virtual_machine_scale_set" "test" { admin_username = "adminuser" admin_password = "P@ssword1234!" + computer_name_prefix = "acctestVM" + source_image_reference { publisher = "MicrosoftWindowsServer" offer = "WindowsServer" @@ -789,6 +858,166 @@ resource "azurerm_windows_virtual_machine_scale_set" "test" { `, template) } +func testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPS(data acceptance.TestData) string { + template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine_scale_set" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_D2s_v3" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + zones = [1, 2, 3] + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2019-Datacenter" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + data_disk { + storage_account_type = "UltraSSD_LRS" + caching = "None" + disk_size_gb = 10 + lun = 10 + disk_iops_read_write = 101 + } + + additional_capabilities { + ultra_ssd_enabled = true + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } +} +`, template) +} + +func testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithMBPS(data acceptance.TestData) string { + template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine_scale_set" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_D2s_v3" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + zones = [1, 2, 3] + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2019-Datacenter" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + data_disk { + storage_account_type = "UltraSSD_LRS" + caching = "None" + disk_size_gb = 10 + lun = 10 + disk_mbps_read_write = 11 + } + + additional_capabilities { + ultra_ssd_enabled = true + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } +} +`, template) +} + +func testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskStorageAccountTypeUltraSSDLRSWithIOPSAndMBPS(data acceptance.TestData) string { + template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine_scale_set" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_D2s_v3" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + zones = [1, 2, 3] + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2019-Datacenter" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + data_disk { + storage_account_type = "UltraSSD_LRS" + caching = "None" + disk_size_gb = 10 + lun = 10 + disk_iops_read_write = 101 + disk_mbps_read_write = 11 + } + + additional_capabilities { + ultra_ssd_enabled = true + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } +} +`, template) +} + func testAccAzureRMWindowsVirtualMachineScaleSet_disksDataDiskWriteAcceleratorEnabled(data acceptance.TestData) string { template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_os_resource_test.go b/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_os_resource_test.go index 5936e1394901..55c2557c3f0a 100644 --- a/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_os_resource_test.go +++ b/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_disk_os_resource_test.go @@ -24,7 +24,8 @@ func TestAccAzureRMWindowsVirtualMachineScaleSet_disksOSDiskCaching(t *testing.T }, data.ImportStep( "admin_password", - ), { + ), + { Config: testAccAzureRMWindowsVirtualMachineScaleSet_disksOSDiskCaching(data, "ReadOnly"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), @@ -32,7 +33,8 @@ func TestAccAzureRMWindowsVirtualMachineScaleSet_disksOSDiskCaching(t *testing.T }, data.ImportStep( "admin_password", - ), { + ), + { Config: testAccAzureRMWindowsVirtualMachineScaleSet_disksOSDiskCaching(data, "ReadWrite"), Check: resource.ComposeTestCheckFunc( testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), diff --git a/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_other_resource_test.go b/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_other_resource_test.go index bdfabe08c3ae..dba13fcfedfc 100644 --- a/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_other_resource_test.go +++ b/azurerm/internal/services/compute/tests/windows_virtual_machine_scale_set_other_resource_test.go @@ -73,6 +73,48 @@ func TestAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnostics(t *testing }) } +func TestAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnosticsMananged(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMWindowsVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + // Enabled + Config: testAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + // Removed + Config: testAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnosticsDisabled(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + { + // Enabled + Config: testAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnosticsManaged(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + data.ImportStep( + "admin_password", + ), + }, + }) +} + func TestAccAzureRMWindowsVirtualMachineScaleSet_otherComputerNamePrefix(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") @@ -940,6 +982,26 @@ func TestAccAzureRMWindowsVirtualMachineScaleSet_otherEncryptionAtHostEnabledWit }) } +func TestAccAzureRMWindowsVirtualMachineScaleSet_otherPlatformFaultDomainCount(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_windows_virtual_machine_scale_set", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMWindowsVirtualMachineScaleSetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMWindowsVirtualMachineScaleSet_otherPlatformFaultDomainCount(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMWindowsVirtualMachineScaleSetExists(data.ResourceName), + ), + }, + // TODO - extension should be changed to extension.0.protected_settings when either binary testing is available or this feature is promoted from beta + data.ImportStep("admin_password", "extension"), + }, + }) +} + func testAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnostics(data acceptance.TestData) string { template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) return fmt.Sprintf(` @@ -992,6 +1054,56 @@ resource "azurerm_windows_virtual_machine_scale_set" "test" { `, template, data.RandomString) } +func testAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnosticsManaged(data acceptance.TestData) string { + template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "test" { + name = "accsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_windows_virtual_machine_scale_set" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_F2" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2019-Datacenter" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } + + boot_diagnostics {} +} +`, template, data.RandomString) +} + func testAccAzureRMWindowsVirtualMachineScaleSet_otherBootDiagnosticsDisabled(data acceptance.TestData) string { template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) return fmt.Sprintf(` @@ -2876,3 +2988,45 @@ resource "azurerm_windows_virtual_machine_scale_set" "test" { } `, template, enabled) } + +func testAccAzureRMWindowsVirtualMachineScaleSet_otherPlatformFaultDomainCount(data acceptance.TestData) string { + template := testAccAzureRMWindowsVirtualMachineScaleSet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_windows_virtual_machine_scale_set" "test" { + name = local.vm_name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard_F2" + instances = 1 + admin_username = "adminuser" + admin_password = "P@ssword1234!" + + source_image_reference { + publisher = "MicrosoftWindowsServer" + offer = "WindowsServer" + sku = "2019-Datacenter" + version = "latest" + } + + os_disk { + storage_account_type = "Standard_LRS" + caching = "ReadWrite" + } + + network_interface { + name = "example" + primary = true + + ip_configuration { + name = "internal" + primary = true + subnet_id = azurerm_subnet.test.id + } + } + + platform_fault_domain_count = 3 +} +`, template) +} diff --git a/azurerm/internal/services/compute/validate/availability_set.go b/azurerm/internal/services/compute/validate/availability_set.go deleted file mode 100644 index 73e152605584..000000000000 --- a/azurerm/internal/services/compute/validate/availability_set.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func AvailabilitySetID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.AvailabilitySetID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/availability_set_id.go b/azurerm/internal/services/compute/validate/availability_set_id.go new file mode 100644 index 000000000000..5012cfbcc958 --- /dev/null +++ b/azurerm/internal/services/compute/validate/availability_set_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func AvailabilitySetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AvailabilitySetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/availability_set_id_test.go b/azurerm/internal/services/compute/validate/availability_set_id_test.go new file mode 100644 index 000000000000..dfa31d551aaa --- /dev/null +++ b/azurerm/internal/services/compute/validate/availability_set_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAvailabilitySetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/availabilitySets/set1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/AVAILABILITYSETS/SET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AvailabilitySetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/dedicated_host.go b/azurerm/internal/services/compute/validate/dedicated_host.go deleted file mode 100644 index 148e77a6dffa..000000000000 --- a/azurerm/internal/services/compute/validate/dedicated_host.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func DedicatedHostID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.DedicatedHostID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/dedicated_host_group.go b/azurerm/internal/services/compute/validate/dedicated_host_group.go deleted file mode 100644 index b5964eb82c03..000000000000 --- a/azurerm/internal/services/compute/validate/dedicated_host_group.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func DedicatedHostGroupID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.DedicatedHostGroupID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/dedicated_host_group_id.go b/azurerm/internal/services/compute/validate/dedicated_host_group_id.go new file mode 100644 index 000000000000..99aa116e96f6 --- /dev/null +++ b/azurerm/internal/services/compute/validate/dedicated_host_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func DedicatedHostGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DedicatedHostGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/dedicated_host_group_id_test.go b/azurerm/internal/services/compute/validate/dedicated_host_group_id_test.go new file mode 100644 index 000000000000..2c2af34a70c0 --- /dev/null +++ b/azurerm/internal/services/compute/validate/dedicated_host_group_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDedicatedHostGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/HOSTGROUPS/HOSTGROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DedicatedHostGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/dedicated_host_id.go b/azurerm/internal/services/compute/validate/dedicated_host_id.go new file mode 100644 index 000000000000..4ed7a8d3979e --- /dev/null +++ b/azurerm/internal/services/compute/validate/dedicated_host_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func DedicatedHostID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DedicatedHostID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/dedicated_host_id_test.go b/azurerm/internal/services/compute/validate/dedicated_host_id_test.go new file mode 100644 index 000000000000..359fdd9f7cad --- /dev/null +++ b/azurerm/internal/services/compute/validate/dedicated_host_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDedicatedHostID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for HostGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/", + Valid: false, + }, + + { + // missing HostName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/", + Valid: false, + }, + + { + // missing value for HostName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/hostGroups/hostGroup1/hosts/host1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/HOSTGROUPS/HOSTGROUP1/HOSTS/HOST1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DedicatedHostID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/disk_access_id.go b/azurerm/internal/services/compute/validate/disk_access_id.go new file mode 100644 index 000000000000..b27da32b9e67 --- /dev/null +++ b/azurerm/internal/services/compute/validate/disk_access_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func DiskAccessID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DiskAccessID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/disk_access_id_test.go b/azurerm/internal/services/compute/validate/disk_access_id_test.go new file mode 100644 index 000000000000..8f547a3ae271 --- /dev/null +++ b/azurerm/internal/services/compute/validate/disk_access_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDiskAccessID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskAccesses/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskAccesses/diskAccess1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/DISKACCESSES/DISKACCESS1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DiskAccessID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/disk_encryption_set.go b/azurerm/internal/services/compute/validate/disk_encryption_set.go deleted file mode 100644 index 6b2ff8ca5f75..000000000000 --- a/azurerm/internal/services/compute/validate/disk_encryption_set.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func DiskEncryptionSetID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.DiskEncryptionSetID(v); err != nil { - errors = append(errors, fmt.Errorf("Cannot parse %q as a resource id: %+v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/disk_encryption_set_id.go b/azurerm/internal/services/compute/validate/disk_encryption_set_id.go new file mode 100644 index 000000000000..81fc3d493cb2 --- /dev/null +++ b/azurerm/internal/services/compute/validate/disk_encryption_set_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func DiskEncryptionSetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DiskEncryptionSetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/disk_encryption_set_id_test.go b/azurerm/internal/services/compute/validate/disk_encryption_set_id_test.go new file mode 100644 index 000000000000..8bbe68b3635f --- /dev/null +++ b/azurerm/internal/services/compute/validate/disk_encryption_set_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDiskEncryptionSetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/diskEncryptionSets/set1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/DISKENCRYPTIONSETS/SET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DiskEncryptionSetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/image.go b/azurerm/internal/services/compute/validate/image.go deleted file mode 100644 index 35162b9a7058..000000000000 --- a/azurerm/internal/services/compute/validate/image.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func ImageID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ImageID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/image_id.go b/azurerm/internal/services/compute/validate/image_id.go new file mode 100644 index 000000000000..333bbac2f867 --- /dev/null +++ b/azurerm/internal/services/compute/validate/image_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func ImageID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ImageID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/image_id_test.go b/azurerm/internal/services/compute/validate/image_id_test.go new file mode 100644 index 000000000000..76f2ab6c6dcf --- /dev/null +++ b/azurerm/internal/services/compute/validate/image_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestImageID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/images/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/images/image1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/IMAGES/IMAGE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ImageID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/managed_disk_id.go b/azurerm/internal/services/compute/validate/managed_disk_id.go new file mode 100644 index 000000000000..da5ccf334c82 --- /dev/null +++ b/azurerm/internal/services/compute/validate/managed_disk_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func ManagedDiskID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ManagedDiskID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/managed_disk_id_test.go b/azurerm/internal/services/compute/validate/managed_disk_id_test.go new file mode 100644 index 000000000000..d78f640062d9 --- /dev/null +++ b/azurerm/internal/services/compute/validate/managed_disk_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestManagedDiskID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DiskName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for DiskName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/disks/disk1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/DISKS/DISK1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ManagedDiskID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/proximity_placement_group.go b/azurerm/internal/services/compute/validate/proximity_placement_group.go deleted file mode 100644 index 9cb8462a10f8..000000000000 --- a/azurerm/internal/services/compute/validate/proximity_placement_group.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func ProximityPlacementGroupID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ProximityPlacementGroupID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/proximity_placement_group_id.go b/azurerm/internal/services/compute/validate/proximity_placement_group_id.go new file mode 100644 index 000000000000..af2fbc56380f --- /dev/null +++ b/azurerm/internal/services/compute/validate/proximity_placement_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func ProximityPlacementGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ProximityPlacementGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/proximity_placement_group_id_test.go b/azurerm/internal/services/compute/validate/proximity_placement_group_id_test.go new file mode 100644 index 000000000000..d54cf408e124 --- /dev/null +++ b/azurerm/internal/services/compute/validate/proximity_placement_group_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestProximityPlacementGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/proximityPlacementGroups/group1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/PROXIMITYPLACEMENTGROUPS/GROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ProximityPlacementGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/shared_image.go b/azurerm/internal/services/compute/validate/shared_image.go deleted file mode 100644 index 593b2265d639..000000000000 --- a/azurerm/internal/services/compute/validate/shared_image.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func SharedImageID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.SharedImageID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/shared_image_gallery_id.go b/azurerm/internal/services/compute/validate/shared_image_gallery_id.go new file mode 100644 index 000000000000..9910eec58453 --- /dev/null +++ b/azurerm/internal/services/compute/validate/shared_image_gallery_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func SharedImageGalleryID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SharedImageGalleryID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/shared_image_gallery_id_test.go b/azurerm/internal/services/compute/validate/shared_image_gallery_id_test.go new file mode 100644 index 000000000000..20ea26cb356c --- /dev/null +++ b/azurerm/internal/services/compute/validate/shared_image_gallery_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSharedImageGalleryID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/GALLERIES/GALLERY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SharedImageGalleryID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/shared_image_id.go b/azurerm/internal/services/compute/validate/shared_image_id.go new file mode 100644 index 000000000000..5e88ee8a4dd7 --- /dev/null +++ b/azurerm/internal/services/compute/validate/shared_image_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func SharedImageID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SharedImageID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/shared_image_id_test.go b/azurerm/internal/services/compute/validate/shared_image_id_test.go new file mode 100644 index 000000000000..9df3325dfa57 --- /dev/null +++ b/azurerm/internal/services/compute/validate/shared_image_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSharedImageID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", + Valid: false, + }, + + { + // missing ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/", + Valid: false, + }, + + { + // missing value for ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/GALLERIES/GALLERY1/IMAGES/IMAGE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SharedImageID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/shared_image_version.go b/azurerm/internal/services/compute/validate/shared_image_version.go deleted file mode 100644 index 22bd1beccca5..000000000000 --- a/azurerm/internal/services/compute/validate/shared_image_version.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func SharedImageVersionID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.SharedImageVersionID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/shared_image_version_id.go b/azurerm/internal/services/compute/validate/shared_image_version_id.go new file mode 100644 index 000000000000..a66d686710fa --- /dev/null +++ b/azurerm/internal/services/compute/validate/shared_image_version_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func SharedImageVersionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SharedImageVersionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/shared_image_version_id_test.go b/azurerm/internal/services/compute/validate/shared_image_version_id_test.go new file mode 100644 index 000000000000..6a5296508fdb --- /dev/null +++ b/azurerm/internal/services/compute/validate/shared_image_version_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSharedImageVersionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for GalleryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/", + Valid: false, + }, + + { + // missing ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/", + Valid: false, + }, + + { + // missing value for ImageName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/", + Valid: false, + }, + + { + // missing VersionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/", + Valid: false, + }, + + { + // missing value for VersionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/galleries/gallery1/images/image1/versions/version1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/GALLERIES/GALLERY1/IMAGES/IMAGE1/VERSIONS/VERSION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SharedImageVersionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine.go b/azurerm/internal/services/compute/validate/virtual_machine.go deleted file mode 100644 index d23b7abede9a..000000000000 --- a/azurerm/internal/services/compute/validate/virtual_machine.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func VirtualMachineID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.VirtualMachineID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_extension.go b/azurerm/internal/services/compute/validate/virtual_machine_extension.go deleted file mode 100644 index 9efd43348e76..000000000000 --- a/azurerm/internal/services/compute/validate/virtual_machine_extension.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func VirtualMachineExtensionID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.VirtualMachineExtensionID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_extension_id.go b/azurerm/internal/services/compute/validate/virtual_machine_extension_id.go new file mode 100644 index 000000000000..79adf3acbeff --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_extension_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func VirtualMachineExtensionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualMachineExtensionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_extension_id_test.go b/azurerm/internal/services/compute/validate/virtual_machine_extension_id_test.go new file mode 100644 index 000000000000..0da5b309791c --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_extension_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualMachineExtensionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VirtualMachineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for VirtualMachineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/", + Valid: false, + }, + + { + // missing ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/", + Valid: false, + }, + + { + // missing value for ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1/extensions/extension1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINES/MACHINE1/EXTENSIONS/EXTENSION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualMachineExtensionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_id.go b/azurerm/internal/services/compute/validate/virtual_machine_id.go new file mode 100644 index 000000000000..1b151c45461d --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func VirtualMachineID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualMachineID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_id_test.go b/azurerm/internal/services/compute/validate/virtual_machine_id_test.go new file mode 100644 index 000000000000..d0c160dcb3aa --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualMachineID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachines/machine1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINES/MACHINE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualMachineID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_scale_set.go b/azurerm/internal/services/compute/validate/virtual_machine_scale_set.go deleted file mode 100644 index 12ada49ddebe..000000000000 --- a/azurerm/internal/services/compute/validate/virtual_machine_scale_set.go +++ /dev/null @@ -1,28 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func VirtualMachineScaleSetID(i interface{}, k string) (s []string, es []error) { - v, ok := i.(string) - if !ok { - es = append(es, fmt.Errorf("expected type of %s to be string", k)) - return - } - - id, err := parse.VirtualMachineScaleSetID(v) - if err != nil { - es = append(es, fmt.Errorf("Error parsing %q as a VM Scale Set Resource ID: %s", v, err)) - return - } - - if id.Name == "" { - es = append(es, fmt.Errorf("Error parsing %q as a VM Scale Set Resource ID: `virtualMachineScaleSets` segment was empty", v)) - return - } - - return -} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_scale_set_extension_id.go b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_extension_id.go new file mode 100644 index 000000000000..1ccf707a39c3 --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_extension_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func VirtualMachineScaleSetExtensionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualMachineScaleSetExtensionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_scale_set_extension_id_test.go b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_extension_id_test.go new file mode 100644 index 000000000000..9c394748e32e --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_extension_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualMachineScaleSetExtensionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VirtualMachineScaleSetName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for VirtualMachineScaleSetName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/", + Valid: false, + }, + + { + // missing ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/", + Valid: false, + }, + + { + // missing value for ExtensionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/extensions/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1/extensions/extension1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINESCALESETS/SCALESET1/EXTENSIONS/EXTENSION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualMachineScaleSetExtensionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_scale_set_id.go b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_id.go new file mode 100644 index 000000000000..349e16d0006c --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" +) + +func VirtualMachineScaleSetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualMachineScaleSetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/compute/validate/virtual_machine_scale_set_id_test.go b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_id_test.go new file mode 100644 index 000000000000..b04cd6feac73 --- /dev/null +++ b/azurerm/internal/services/compute/validate/virtual_machine_scale_set_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualMachineScaleSetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Compute/virtualMachineScaleSets/scaleSet1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINESCALESETS/SCALESET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualMachineScaleSetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/compute/validation.go b/azurerm/internal/services/compute/validation.go index 84cc11074ebc..2c70f34db50a 100644 --- a/azurerm/internal/services/compute/validation.go +++ b/azurerm/internal/services/compute/validation.go @@ -49,19 +49,19 @@ func ValidateVmName(i interface{}, k string) (warnings []string, errors []error) func ValidateLinuxComputerNameFull(i interface{}, k string) (warnings []string, errors []error) { // Linux host name cannot exceed 64 characters in length - return ValidateLinuxComputerName(i, k, 64) + return ValidateLinuxComputerName(i, k, 64, false) } func ValidateLinuxComputerNamePrefix(i interface{}, k string) (warnings []string, errors []error) { // Linux host name prefix cannot exceed 58 characters in length - return ValidateLinuxComputerName(i, k, 58) + return ValidateLinuxComputerName(i, k, 58, true) } func ValidateOrchestratedVMSSName(i interface{}, k string) (warnings []string, errors []error) { return ValidateVmName(i, k) } -func ValidateLinuxComputerName(i interface{}, k string, maxLength int) (warnings []string, errors []error) { +func ValidateLinuxComputerName(i interface{}, k string, maxLength int, allowDashSuffix bool) (warnings []string, errors []error) { v, ok := i.(string) if !ok { errors = append(errors, fmt.Errorf("expected %q to be a string but it wasn't!", k)) @@ -82,8 +82,12 @@ func ValidateLinuxComputerName(i interface{}, k string, maxLength int) (warnings errors = append(errors, fmt.Errorf("%q cannot begin with an underscore", k)) } - if strings.HasSuffix(v, ".") || strings.HasSuffix(v, "-") { - errors = append(errors, fmt.Errorf("%q cannot end with an period or dash", k)) + if strings.HasSuffix(v, ".") { + errors = append(errors, fmt.Errorf("%q cannot end with a period", k)) + } + + if !allowDashSuffix && strings.HasSuffix(v, "-") { + errors = append(errors, fmt.Errorf("%q cannot end with a dash", k)) } // Linux host name cannot contain the following characters diff --git a/azurerm/internal/services/compute/validation_test.go b/azurerm/internal/services/compute/validation_test.go index b11f960fec76..2ff6cb1bab25 100644 --- a/azurerm/internal/services/compute/validation_test.go +++ b/azurerm/internal/services/compute/validation_test.go @@ -165,7 +165,7 @@ func TestValidateLinuxComputerName(t *testing.T) { for _, v := range testData { t.Logf("[DEBUG] Testing %q..", v.input) - _, errors := ValidateLinuxComputerName(v.input, "computer_name", 100) + _, errors := ValidateLinuxComputerName(v.input, "computer_name", 100, false) actual := len(errors) == 0 if v.expected != actual { t.Fatalf("Expected %t but got %t", v.expected, actual) @@ -226,6 +226,11 @@ func TestValidateLinuxComputerNamePrefix(t *testing.T) { input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefg", expected: false, }, + { + // dash suffix + input: "abc-", + expected: true, + }, } for _, v := range testData { diff --git a/azurerm/internal/services/compute/virtual_machine.go b/azurerm/internal/services/compute/virtual_machine.go index 3c59d6fd98d2..035926c18b6a 100644 --- a/azurerm/internal/services/compute/virtual_machine.go +++ b/azurerm/internal/services/compute/virtual_machine.go @@ -263,7 +263,7 @@ func virtualMachineOSDiskSchema() *schema.Schema { Type: schema.TypeInt, Optional: true, Computed: true, - ValidateFunc: validation.IntBetween(0, 2048), + ValidateFunc: validation.IntBetween(0, 4095), }, "name": { @@ -358,7 +358,7 @@ func flattenVirtualMachineOSDisk(ctx context.Context, disksClient *compute.Disks return nil, err } - disk, err := disksClient.Get(ctx, id.ResourceGroup, id.Name) + disk, err := disksClient.Get(ctx, id.ResourceGroup, id.DiskName) if err != nil { // turns out ephemeral disks aren't returned/available here if !utils.ResponseWasNotFound(disk.Response) { diff --git a/azurerm/internal/services/compute/virtual_machine_extension_resource.go b/azurerm/internal/services/compute/virtual_machine_extension_resource.go index 9671c5dfbbc1..eba737b4d644 100644 --- a/azurerm/internal/services/compute/virtual_machine_extension_resource.go +++ b/azurerm/internal/services/compute/virtual_machine_extension_resource.go @@ -196,24 +196,24 @@ func resourceArmVirtualMachineExtensionsRead(d *schema.ResourceData, meta interf return err } - virtualMachine, err := vmClient.Get(ctx, id.ResourceGroup, id.VirtualMachine, "") + virtualMachine, err := vmClient.Get(ctx, id.ResourceGroup, id.VirtualMachineName, "") if err != nil { if utils.ResponseWasNotFound(virtualMachine.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Virtual Machine %s: %s", id.Name, err) + return fmt.Errorf("Error making Read request on Virtual Machine %s: %s", id.ExtensionName, err) } d.Set("virtual_machine_id", virtualMachine.ID) - resp, err := vmExtensionClient.Get(ctx, id.ResourceGroup, id.VirtualMachine, id.Name, "") + resp, err := vmExtensionClient.Get(ctx, id.ResourceGroup, id.VirtualMachineName, id.ExtensionName, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Virtual Machine Extension %s: %s", id.Name, err) + return fmt.Errorf("Error making Read request on Virtual Machine Extension %s: %s", id.ExtensionName, err) } d.Set("name", resp.Name) @@ -247,7 +247,7 @@ func resourceArmVirtualMachineExtensionsDelete(d *schema.ResourceData, meta inte return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualMachine, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualMachineName, id.ExtensionName) if err != nil { return err } diff --git a/azurerm/internal/services/compute/virtual_machine_instance_test.go b/azurerm/internal/services/compute/virtual_machine_instance_test.go index df301635ef9d..8ea19bddd440 100644 --- a/azurerm/internal/services/compute/virtual_machine_instance_test.go +++ b/azurerm/internal/services/compute/virtual_machine_instance_test.go @@ -8,7 +8,7 @@ import ( ) func TestVirtualMachineShouldBeStarted(t *testing.T) { - var buildInstanceViewStatus = func(statuses ...string) *[]compute.InstanceViewStatus { + buildInstanceViewStatus := func(statuses ...string) *[]compute.InstanceViewStatus { results := make([]compute.InstanceViewStatus, 0) for _, v := range statuses { diff --git a/azurerm/internal/services/compute/virtual_machine_resource.go b/azurerm/internal/services/compute/virtual_machine_resource.go index 2cb6a528d5a4..4b4f3cb03331 100644 --- a/azurerm/internal/services/compute/virtual_machine_resource.go +++ b/azurerm/internal/services/compute/virtual_machine_resource.go @@ -24,7 +24,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" - "github.com/tombuildsstuff/giovanni/storage/2018-11-09/blob/blobs" + "github.com/tombuildsstuff/giovanni/storage/2019-12-12/blob/blobs" "golang.org/x/net/context" ) @@ -1964,6 +1964,7 @@ func resourceArmVirtualMachineGetManagedDiskInfo(d *schema.ResourceData, disk *c return &diskResp, nil } + func determineVirtualMachineIPAddress(ctx context.Context, meta interface{}, props *compute.VirtualMachineProperties) (string, error) { nicClient := meta.(*clients.Client).Network.InterfacesClient pipClient := meta.(*clients.Client).Network.PublicIPsClient diff --git a/azurerm/internal/services/compute/virtual_machine_scale_set.go b/azurerm/internal/services/compute/virtual_machine_scale_set.go index cd8dca6e33b3..0a0ccd454553 100644 --- a/azurerm/internal/services/compute/virtual_machine_scale_set.go +++ b/azurerm/internal/services/compute/virtual_machine_scale_set.go @@ -779,12 +779,26 @@ func VirtualMachineScaleSetDataDiskSchema() *schema.Schema { Optional: true, Default: false, }, + + // TODO 3.0 - change this to ultra_ssd_disk_iops_read_write + "disk_iops_read_write": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + + // TODO 3.0 - change this to ultra_ssd_disk_iops_read_write + "disk_mbps_read_write": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, }, }, } } -func ExpandVirtualMachineScaleSetDataDisk(input []interface{}) *[]compute.VirtualMachineScaleSetDataDisk { +func ExpandVirtualMachineScaleSetDataDisk(input []interface{}, ultraSSDEnabled bool) (*[]compute.VirtualMachineScaleSetDataDisk, error) { disks := make([]compute.VirtualMachineScaleSetDataDisk, 0) for _, v := range input { @@ -807,10 +821,24 @@ func ExpandVirtualMachineScaleSetDataDisk(input []interface{}) *[]compute.Virtua } } + if iops := raw["disk_iops_read_write"].(int); iops != 0 { + if !ultraSSDEnabled { + return nil, fmt.Errorf("`disk_iops_read_write` are only available for UltraSSD disks") + } + disk.DiskIOPSReadWrite = utils.Int64(int64(iops)) + } + + if mbps := raw["disk_mbps_read_write"].(int); mbps != 0 { + if !ultraSSDEnabled { + return nil, fmt.Errorf("`disk_mbps_read_write` are only available for UltraSSD disks") + } + disk.DiskMBpsReadWrite = utils.Int64(int64(mbps)) + } + disks = append(disks, disk) } - return &disks + return &disks, nil } func FlattenVirtualMachineScaleSetDataDisk(input *[]compute.VirtualMachineScaleSetDataDisk) []interface{} { @@ -845,6 +873,16 @@ func FlattenVirtualMachineScaleSetDataDisk(input *[]compute.VirtualMachineScaleS writeAcceleratorEnabled = *v.WriteAcceleratorEnabled } + iops := 0 + if v.DiskIOPSReadWrite != nil { + iops = int(*v.DiskIOPSReadWrite) + } + + mbps := 0 + if v.DiskMBpsReadWrite != nil { + mbps = int(*v.DiskMBpsReadWrite) + } + output = append(output, map[string]interface{}{ "caching": string(v.Caching), "create_option": string(v.CreateOption), @@ -853,6 +891,8 @@ func FlattenVirtualMachineScaleSetDataDisk(input *[]compute.VirtualMachineScaleS "disk_size_gb": diskSizeGb, "storage_account_type": storageAccountType, "write_accelerator_enabled": writeAcceleratorEnabled, + "disk_iops_read_write": iops, + "disk_mbps_read_write": mbps, }) } @@ -922,7 +962,7 @@ func VirtualMachineScaleSetOSDiskSchema() *schema.Schema { Type: schema.TypeInt, Optional: true, Computed: true, - ValidateFunc: validation.IntBetween(0, 2048), + ValidateFunc: validation.IntBetween(0, 4095), }, "write_accelerator_enabled": { diff --git a/azurerm/internal/services/compute/virtual_machine_scale_set_extension_resource.go b/azurerm/internal/services/compute/virtual_machine_scale_set_extension_resource.go index 3451b0ae9d1f..211c60fedfbf 100644 --- a/azurerm/internal/services/compute/virtual_machine_scale_set_extension_resource.go +++ b/azurerm/internal/services/compute/virtual_machine_scale_set_extension_resource.go @@ -255,16 +255,16 @@ func resourceArmVirtualMachineScaleSetExtensionUpdate(d *schema.ResourceData, me } extension := compute.VirtualMachineScaleSetExtension{ - Name: utils.String(id.Name), + Name: utils.String(id.ExtensionName), VirtualMachineScaleSetExtensionProperties: &props, } - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.Name, extension) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.ExtensionName, extension) if err != nil { - return fmt.Errorf("Error updating Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup, err) + return fmt.Errorf("Error updating Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for update of Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for update of Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup, err) } return resourceArmVirtualMachineScaleSetExtensionRead(d, meta) @@ -292,18 +292,18 @@ func resourceArmVirtualMachineScaleSetExtensionRead(d *schema.ResourceData, meta return fmt.Errorf("Error retrieving Virtual Machine Scale Set %q (Resource Group %q): %+v", id.VirtualMachineScaleSetName, id.ResourceGroup, err) } - resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.Name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.ExtensionName, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("Extension %q (Virtual Machine Scale Set %q / Resource Group %q) was not found - removing from state!", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup) + log.Printf("Extension %q (Virtual Machine Scale Set %q / Resource Group %q) was not found - removing from state!", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error retrieving Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.ExtensionName) d.Set("virtual_machine_scale_set_id", vmss.ID) if props := resp.VirtualMachineScaleSetExtensionProperties; props != nil { @@ -341,17 +341,17 @@ func resourceArmVirtualMachineScaleSetExtensionDelete(d *schema.ResourceData, me return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualMachineScaleSetName, id.ExtensionName) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.Name, id.VirtualMachineScaleSetName, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deletion of Extension %q (Virtual Machine Scale Set %q / Resource Group %q): %+v", id.ExtensionName, id.VirtualMachineScaleSetName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/compute/windows_virtual_machine_resource.go b/azurerm/internal/services/compute/windows_virtual_machine_resource.go index 438981836448..fc5521251353 100644 --- a/azurerm/internal/services/compute/windows_virtual_machine_resource.go +++ b/azurerm/internal/services/compute/windows_virtual_machine_resource.go @@ -16,6 +16,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" @@ -139,7 +140,6 @@ func resourceWindowsVirtualMachine() *schema.Resource { "dedicated_host_id": { Type: schema.TypeString, Optional: true, - ForceNew: true, // TODO: investigate, looks like the Portal allows migration ValidateFunc: computeValidate.DedicatedHostID, // the Compute/VM API is broken and returns the Resource Group name in UPPERCASE :shrug: DiffSuppressFunc: suppress.CaseDifference, @@ -150,7 +150,7 @@ func resourceWindowsVirtualMachine() *schema.Resource { "enable_automatic_updates": { Type: schema.TypeBool, Optional: true, - ForceNew: true, // TODO: confirm + ForceNew: true, // updating this is not allowed "Changing property 'windowsConfiguration.enableAutomaticUpdates' is not allowed." Target="windowsConfiguration.enableAutomaticUpdates" Default: true, }, @@ -170,6 +170,13 @@ func resourceWindowsVirtualMachine() *schema.Resource { }, false), }, + "extensions_time_budget": { + Type: schema.TypeString, + Optional: true, + Default: "PT1H30M", + ValidateFunc: azValidate.ISO8601DurationBetween("PT15M", "PT2H"), + }, + "identity": virtualMachineIdentitySchema(), "license_type": { @@ -196,6 +203,18 @@ func resourceWindowsVirtualMachine() *schema.Resource { ValidateFunc: validation.FloatAtLeast(-1.0), }, + // This is a preview feature: `az feature register -n InGuestAutoPatchVMPreview --namespace Microsoft.Compute` + "patch_mode": { + Type: schema.TypeString, + Optional: true, + Default: string(compute.AutomaticByOS), + ValidateFunc: validation.StringInSlice([]string{ + string(compute.AutomaticByOS), + string(compute.AutomaticByPlatform), + string(compute.Manual), + }, false), + }, + "plan": planSchema(), "priority": { @@ -335,8 +354,10 @@ func resourceWindowsVirtualMachineCreate(d *schema.ResourceData, meta interface{ adminPassword := d.Get("admin_password").(string) adminUsername := d.Get("admin_username").(string) allowExtensionOperations := d.Get("allow_extension_operations").(bool) + bootDiagnosticsRaw := d.Get("boot_diagnostics").([]interface{}) bootDiagnostics := expandBootDiagnostics(bootDiagnosticsRaw) + var computerName string if v, ok := d.GetOk("computer_name"); ok && len(v.(string)) > 0 { computerName = v.(string) @@ -417,6 +438,7 @@ func resourceWindowsVirtualMachineCreate(d *schema.ResourceData, meta interface{ // Optional AdditionalCapabilities: additionalCapabilities, DiagnosticsProfile: bootDiagnostics, + ExtensionsTimeBudget: utils.String(d.Get("extensions_time_budget").(string)), }, Tags: tags.Expand(t), } @@ -429,6 +451,13 @@ func resourceWindowsVirtualMachineCreate(d *schema.ResourceData, meta interface{ params.OsProfile.WindowsConfiguration.AdditionalUnattendContent = additionalUnattendContent } + patchMode := d.Get("patch_mode").(string) + if patchMode != string(compute.AutomaticByOS) { + params.OsProfile.WindowsConfiguration.PatchSettings = &compute.PatchSettings{ + PatchMode: compute.InGuestPatchMode(patchMode), + } + } + if v, ok := d.GetOk("availability_set_id"); ok { params.AvailabilitySet = &compute.SubResource{ ID: utils.String(v.(string)), @@ -582,6 +611,12 @@ func resourceWindowsVirtualMachineRead(d *schema.ResourceData, meta interface{}) } d.Set("license_type", props.LicenseType) + extensionsTimeBudget := "PT1H30M" + if props.ExtensionsTimeBudget != nil { + extensionsTimeBudget = *props.ExtensionsTimeBudget + } + d.Set("extensions_time_budget", extensionsTimeBudget) + // defaulted since BillingProfile isn't returned if it's unset maxBidPrice := float64(-1.0) if props.BillingProfile != nil && props.BillingProfile.MaxPrice != nil { @@ -620,6 +655,11 @@ func resourceWindowsVirtualMachineRead(d *schema.ResourceData, meta interface{}) d.Set("enable_automatic_updates", config.EnableAutomaticUpdates) d.Set("provision_vm_agent", config.ProvisionVMAgent) + + if patchSettings := config.PatchSettings; patchSettings != nil { + d.Set("patch_mode", patchSettings.PatchMode) + } + d.Set("timezone", config.TimeZone) if err := d.Set("winrm_listener", flattenWinRMListener(config.WinRM)); err != nil { @@ -773,6 +813,22 @@ func resourceWindowsVirtualMachineUpdate(d *schema.ResourceData, meta interface{ update.OsProfile.AllowExtensionOperations = utils.Bool(allowExtensionOperations) } + if d.HasChange("patch_mode") { + shouldUpdate = true + + if update.OsProfile == nil { + update.OsProfile = &compute.OSProfile{} + } + + if update.OsProfile.WindowsConfiguration == nil { + update.OsProfile.WindowsConfiguration = &compute.WindowsConfiguration{} + } + + update.OsProfile.WindowsConfiguration.PatchSettings = &compute.PatchSettings{ + PatchMode: compute.InGuestPatchMode(d.Get("patch_mode").(string)), + } + } + if d.HasChange("identity") { shouldUpdate = true @@ -784,6 +840,26 @@ func resourceWindowsVirtualMachineUpdate(d *schema.ResourceData, meta interface{ update.Identity = identity } + if d.HasChange("dedicated_host_id") { + shouldUpdate = true + + // Code="PropertyChangeNotAllowed" Message="Updating Host of VM 'VMNAME' is not allowed as the VM is currently allocated. Please Deallocate the VM and retry the operation." + shouldDeallocate = true + + if v, ok := d.GetOk("dedicated_host_id"); ok { + update.Host = &compute.SubResource{ + ID: utils.String(v.(string)), + } + } else { + update.Host = &compute.SubResource{} + } + } + + if d.HasChange("extensions_time_budget") { + shouldUpdate = true + update.ExtensionsTimeBudget = utils.String(d.Get("extensions_time_budget").(string)) + } + if d.HasChange("max_bid_price") { shouldUpdate = true @@ -1098,10 +1174,8 @@ func resourceWindowsVirtualMachineDelete(d *schema.ResourceData, meta interface{ // ISSUE: XXX // shutting down the Virtual Machine prior to removing it means users are no longer charged for the compute // thus this can be a large cost-saving when deleting larger instances - // in addition - since we're shutting down the machine to remove it, forcing a power-off is fine (as opposed - // to waiting for a graceful shut down) log.Printf("[DEBUG] Powering Off Windows Virtual Machine %q (Resource Group %q)..", id.Name, id.ResourceGroup) - skipShutdown := true + skipShutdown := !meta.(*clients.Client).Features.VirtualMachine.GracefulShutdown powerOffFuture, err := client.PowerOff(ctx, id.ResourceGroup, id.Name, utils.Bool(skipShutdown)) if err != nil { return fmt.Errorf("powering off Windows Virtual Machine %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) @@ -1138,19 +1212,19 @@ func resourceWindowsVirtualMachineDelete(d *schema.ResourceData, meta interface{ return err } - diskDeleteFuture, err := disksClient.Delete(ctx, diskId.ResourceGroup, diskId.Name) + diskDeleteFuture, err := disksClient.Delete(ctx, diskId.ResourceGroup, diskId.DiskName) if err != nil { if !response.WasNotFound(diskDeleteFuture.Response()) { - return fmt.Errorf("deleting OS Disk %q (Resource Group %q) for Windows Virtual Machine %q (Resource Group %q): %+v", diskId.Name, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting OS Disk %q (Resource Group %q) for Windows Virtual Machine %q (Resource Group %q): %+v", diskId.DiskName, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) } } if !response.WasNotFound(diskDeleteFuture.Response()) { if err := diskDeleteFuture.WaitForCompletionRef(ctx, disksClient.Client); err != nil { - return fmt.Errorf("OS Disk %q (Resource Group %q) for Windows Virtual Machine %q (Resource Group %q): %+v", diskId.Name, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) + return fmt.Errorf("OS Disk %q (Resource Group %q) for Windows Virtual Machine %q (Resource Group %q): %+v", diskId.DiskName, diskId.ResourceGroup, id.Name, id.ResourceGroup, err) } } - log.Printf("[DEBUG] Deleted OS Disk from Windows Virtual Machine %q (Resource Group %q).", diskId.Name, diskId.ResourceGroup) + log.Printf("[DEBUG] Deleted OS Disk from Windows Virtual Machine %q (Resource Group %q).", diskId.DiskName, diskId.ResourceGroup) } else { log.Printf("[DEBUG] Skipping Deleting OS Disk from Windows Virtual Machine %q (Resource Group %q) - cannot determine OS Disk ID.", id.Name, id.ResourceGroup) } @@ -1179,7 +1253,6 @@ func resourceWindowsVirtualMachineDelete(d *schema.ResourceData, meta interface{ Refresh: func() (interface{}, string, error) { log.Printf("[INFO] checking on state of Windows Virtual Machine %q", id.Name) resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return resp, strconv.Itoa(resp.StatusCode), nil diff --git a/azurerm/internal/services/compute/windows_virtual_machine_scale_set_resource.go b/azurerm/internal/services/compute/windows_virtual_machine_scale_set_resource.go index 2b8446117acb..728dd7d11c81 100644 --- a/azurerm/internal/services/compute/windows_virtual_machine_scale_set_resource.go +++ b/azurerm/internal/services/compute/windows_virtual_machine_scale_set_resource.go @@ -181,6 +181,13 @@ func resourceArmWindowsVirtualMachineScaleSet() *schema.Resource { "plan": planSchema(), + "platform_fault_domain_count": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + Computed: true, + }, + "priority": { Type: schema.TypeString, Optional: true, @@ -311,7 +318,11 @@ func resourceArmWindowsVirtualMachineScaleSetCreate(d *schema.ResourceData, meta bootDiagnostics := expandBootDiagnostics(bootDiagnosticsRaw) dataDisksRaw := d.Get("data_disk").([]interface{}) - dataDisks := ExpandVirtualMachineScaleSetDataDisk(dataDisksRaw) + ultraSSDEnabled := d.Get("additional_capabilities.0.ultra_ssd_enabled").(bool) + dataDisks, err := ExpandVirtualMachineScaleSetDataDisk(dataDisksRaw, ultraSSDEnabled) + if err != nil { + return fmt.Errorf("expanding `data_disk`: %+v", err) + } identityRaw := d.Get("identity").([]interface{}) identity, err := ExpandVirtualMachineScaleSetIdentity(identityRaw) @@ -511,6 +522,10 @@ func resourceArmWindowsVirtualMachineScaleSetCreate(d *schema.ResourceData, meta Zones: zones, } + if v, ok := d.GetOk("platform_fault_domain_count"); ok { + props.VirtualMachineScaleSetProperties.PlatformFaultDomainCount = utils.Int32(int32(v.(int))) + } + if v, ok := d.GetOk("proximity_placement_group_id"); ok { props.VirtualMachineScaleSetProperties.ProximityPlacementGroup = &compute.SubResource{ ID: utils.String(v.(string)), @@ -693,8 +708,12 @@ func resourceArmWindowsVirtualMachineScaleSetUpdate(d *schema.ResourceData, meta } if d.HasChange("data_disk") { - dataDisksRaw := d.Get("data_disk").([]interface{}) - updateProps.VirtualMachineProfile.StorageProfile.DataDisks = ExpandVirtualMachineScaleSetDataDisk(dataDisksRaw) + ultraSSDEnabled := d.Get("additional_capabilities.0.ultra_ssd_enabled").(bool) + dataDisks, err := ExpandVirtualMachineScaleSetDataDisk(d.Get("data_disk").([]interface{}), ultraSSDEnabled) + if err != nil { + return fmt.Errorf("expanding `data_disk`: %+v", err) + } + updateProps.VirtualMachineProfile.StorageProfile.DataDisks = dataDisks } if d.HasChange("os_disk") { @@ -896,6 +915,7 @@ func resourceArmWindowsVirtualMachineScaleSetRead(d *schema.ResourceData, meta i d.Set("do_not_run_extensions_on_overprovisioned_machines", props.DoNotRunExtensionsOnOverprovisionedVMs) d.Set("overprovision", props.Overprovision) + d.Set("platform_fault_domain_count", props.PlatformFaultDomainCount) proximityPlacementGroupId := "" if props.ProximityPlacementGroup != nil && props.ProximityPlacementGroup.ID != nil { proximityPlacementGroupId = *props.ProximityPlacementGroup.ID diff --git a/azurerm/internal/services/containers/client/client.go b/azurerm/internal/services/containers/client/client.go index 49ad4b4d8778..9f7c8b51e476 100644 --- a/azurerm/internal/services/containers/client/client.go +++ b/azurerm/internal/services/containers/client/client.go @@ -3,7 +3,7 @@ package client import ( "github.com/Azure/azure-sdk-for-go/services/containerinstance/mgmt/2019-12-01/containerinstance" "github.com/Azure/azure-sdk-for-go/services/containerregistry/mgmt/2019-05-01/containerregistry" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/Azure/go-autorest/autorest/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/containers/container_group_resource.go b/azurerm/internal/services/containers/container_group_resource.go index e8c987563d4e..1c4741412630 100644 --- a/azurerm/internal/services/containers/container_group_resource.go +++ b/azurerm/internal/services/containers/container_group_resource.go @@ -345,6 +345,13 @@ func resourceArmContainerGroup() *schema.Resource { ValidateFunc: validation.StringIsNotEmpty, }, + "empty_dir": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Default: false, + }, + "git_repo": { Type: schema.TypeList, Optional: true, @@ -372,6 +379,16 @@ func resourceArmContainerGroup() *schema.Resource { }, }, }, + + "secret": { + Type: schema.TypeMap, + ForceNew: true, + Optional: true, + Sensitive: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, }, }, }, @@ -592,7 +609,6 @@ func resourceArmContainerGroupRead(d *schema.ResourceData, meta interface{}) err name := id.Path["containerGroups"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Container Group %q was not found in Resource Group %q - removing from state!", name, resourceGroup) @@ -962,6 +978,7 @@ func expandContainerVolumes(input interface{}) (*[]containerinstance.VolumeMount name := volumeConfig["name"].(string) mountPath := volumeConfig["mount_path"].(string) readOnly := volumeConfig["read_only"].(bool) + emptyDir := volumeConfig["empty_dir"].(bool) shareName := volumeConfig["share_name"].(string) storageAccountName := volumeConfig["storage_account_name"].(string) storageAccountKey := volumeConfig["storage_account_key"].(string) @@ -978,15 +995,29 @@ func expandContainerVolumes(input interface{}) (*[]containerinstance.VolumeMount Name: utils.String(name), } + secret := expandSecrets(volumeConfig["secret"].(map[string]interface{})) + gitRepoVolume := expandGitRepoVolume(volumeConfig["git_repo"].([]interface{})) - if gitRepoVolume != nil { - if shareName != "" || storageAccountName != "" || storageAccountKey != "" { - return nil, nil, fmt.Errorf("only one of `git_repo` volume or `share_name`, `storage_account_name`, and `storage_account_key` can be specified") + + switch { + case emptyDir: + if shareName != "" || storageAccountName != "" || storageAccountKey != "" || secret != nil || gitRepoVolume != nil { + return nil, nil, fmt.Errorf("only one of `empty_dir` volume, `git_repo` volume, `secret` volume or storage account volume (`share_name`, `storage_account_name`, and `storage_account_key`) can be specified") + } + cv.EmptyDir = map[string]string{} + case gitRepoVolume != nil: + if shareName != "" || storageAccountName != "" || storageAccountKey != "" || secret != nil { + return nil, nil, fmt.Errorf("only one of `empty_dir` volume, `git_repo` volume, `secret` volume or storage account volume (`share_name`, `storage_account_name`, and `storage_account_key`) can be specified") } cv.GitRepo = gitRepoVolume - } else { + case secret != nil: + if shareName != "" || storageAccountName != "" || storageAccountKey != "" { + return nil, nil, fmt.Errorf("only one of `empty_dir` volume, `git_repo` volume, `secret` volume or storage account volume (`share_name`, `storage_account_name`, and `storage_account_key`) can be specified") + } + cv.Secret = secret + default: if shareName == "" && storageAccountName == "" && storageAccountKey == "" { - return nil, nil, fmt.Errorf("one of `git_repo` or `share_name`, `storage_account_name`, and `storage_account_key` must be specified") + return nil, nil, fmt.Errorf("only one of `empty_dir` volume, `git_repo` volume, `secret` volume or storage account volume (`share_name`, `storage_account_name`, and `storage_account_key`) can be specified") } else if shareName == "" || storageAccountName == "" || storageAccountKey == "" { return nil, nil, fmt.Errorf("when using a storage account volume, all of `share_name`, `storage_account_name`, `storage_account_key` must be specified") } @@ -1021,6 +1052,19 @@ func expandGitRepoVolume(input []interface{}) *containerinstance.GitRepoVolume { return gitRepoVolume } +func expandSecrets(secretsMap map[string]interface{}) map[string]*string { + if len(secretsMap) == 0 { + return nil + } + output := make(map[string]*string, len(secretsMap)) + + for name, value := range secretsMap { + output[name] = utils.String(value.(string)) + } + + return output +} + func expandContainerProbe(input interface{}) *containerinstance.ContainerProbe { probe := containerinstance.ContainerProbe{} probeRaw := input.([]interface{}) @@ -1309,6 +1353,10 @@ func flattenContainerVolumes(volumeMounts *[]containerinstance.VolumeMount, cont // skip storage_account_key, is always nil } + if cgv.EmptyDir != nil { + volumeConfig["empty_dir"] = true + } + volumeConfig["git_repo"] = flattenGitRepoVolume(cgv.GitRepo) } } @@ -1323,6 +1371,7 @@ func flattenContainerVolumes(volumeMounts *[]containerinstance.VolumeMount, cont if vm.Name != nil && *vm.Name == rawName { storageAccountKey := cv["storage_account_key"].(string) volumeConfig["storage_account_key"] = storageAccountKey + volumeConfig["secret"] = cv["secret"] } } } @@ -1509,21 +1558,21 @@ func flattenContainerGroupDnsConfig(input *containerinstance.DNSConfiguration) [ return make([]interface{}, 0) } - //We're converting to TypeSet here from an API response that looks like "a b c" (assumes space delimited) + // We're converting to TypeSet here from an API response that looks like "a b c" (assumes space delimited) var searchDomains []string if input.SearchDomains != nil { searchDomains = strings.Split(*input.SearchDomains, " ") } output["search_domains"] = searchDomains - //We're converting to TypeSet here from an API response that looks like "a b c" (assumes space delimited) + // We're converting to TypeSet here from an API response that looks like "a b c" (assumes space delimited) var options []string if input.Options != nil { options = strings.Split(*input.Options, " ") } output["options"] = options - //Nameservers is already an array from the API + // Nameservers is already an array from the API var nameservers []string if input.NameServers != nil { nameservers = *input.NameServers diff --git a/azurerm/internal/services/containers/kubernetes_addons.go b/azurerm/internal/services/containers/kubernetes_addons.go index d101b83deae6..9cb8d188e349 100644 --- a/azurerm/internal/services/containers/kubernetes_addons.go +++ b/azurerm/internal/services/containers/kubernetes_addons.go @@ -4,12 +4,14 @@ import ( "fmt" "strings" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/Azure/go-autorest/autorest/azure" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" azureHelpers "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + laparse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" ) const ( @@ -99,13 +101,11 @@ func schemaKubernetesAddOnProfiles() *schema.Schema { "http_application_routing": { Type: schema.TypeList, MaxItems: 1, - ForceNew: true, Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "enabled": { Type: schema.TypeBool, - ForceNew: true, Required: true, }, "http_application_routing_zone_name": { @@ -195,7 +195,11 @@ func expandKubernetesAddOnProfiles(input []interface{}, env azure.Environment) ( enabled := value["enabled"].(bool) if workspaceID, ok := value["log_analytics_workspace_id"]; ok && workspaceID != "" { - config["logAnalyticsWorkspaceResourceID"] = utils.String(workspaceID.(string)) + lawid, err := laparse.LogAnalyticsWorkspaceID(workspaceID.(string)) + if err != nil { + return nil, fmt.Errorf("parsing Log Analytics Workspace ID: %+v", err) + } + config["logAnalyticsWorkspaceResourceID"] = utils.String(lawid.ID()) } addonProfiles[omsAgentKey] = &containerservice.ManagedClusterAddonProfile{ @@ -248,7 +252,7 @@ func expandKubernetesAddOnProfiles(input []interface{}, env azure.Environment) ( } func filterUnsupportedKubernetesAddOns(input map[string]*containerservice.ManagedClusterAddonProfile, env azure.Environment) (*map[string]*containerservice.ManagedClusterAddonProfile, error) { - var filter = func(input map[string]*containerservice.ManagedClusterAddonProfile, key string) (*map[string]*containerservice.ManagedClusterAddonProfile, error) { + filter := func(input map[string]*containerservice.ManagedClusterAddonProfile, key string) (*map[string]*containerservice.ManagedClusterAddonProfile, error) { output := input if v, ok := output[key]; ok { if v.Enabled != nil && *v.Enabled { @@ -346,7 +350,9 @@ func flattenKubernetesAddOnProfiles(profile map[string]*containerservice.Managed workspaceID := "" if v := kubernetesAddonProfilelocateInConfig(omsAgent.Config, "logAnalyticsWorkspaceResourceID"); v != nil { - workspaceID = *v + if lawid, err := laparse.LogAnalyticsWorkspaceID(*v); err == nil { + workspaceID = lawid.ID() + } } omsagentIdentity := flattenKubernetesClusterOmsAgentIdentityProfile(omsAgent.Identity) diff --git a/azurerm/internal/services/containers/kubernetes_cluster_data_source.go b/azurerm/internal/services/containers/kubernetes_cluster_data_source.go index 2ded0e8396a1..3d84017497c4 100644 --- a/azurerm/internal/services/containers/kubernetes_cluster_data_source.go +++ b/azurerm/internal/services/containers/kubernetes_cluster_data_source.go @@ -5,7 +5,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" @@ -242,16 +242,13 @@ func dataSourceArmKubernetesCluster() *schema.Resource { }, "private_link_enabled": { - Type: schema.TypeBool, - Computed: true, - ConflictsWith: []string{"private_cluster_enabled"}, - Deprecated: "Deprecated in favor of `private_cluster_enabled`", // TODO -- remove this in next major version + Type: schema.TypeBool, + Computed: true, }, "private_cluster_enabled": { - Type: schema.TypeBool, - Computed: true, // TODO -- remove this when deprecation resolves - ConflictsWith: []string{"private_link_enabled"}, + Type: schema.TypeBool, + Computed: true, // TODO -- remove this when deprecation resolves }, "private_fqdn": { @@ -719,7 +716,6 @@ func flattenKubernetesClusterDataSourceAccessProfile(profile containerservice.Ma if strings.Contains(rawConfig, "apiserver-id:") { kubeConfigAAD, err := kubernetes.ParseKubeConfigAAD(rawConfig) - if err != nil { return utils.String(rawConfig), []interface{}{} } @@ -727,7 +723,6 @@ func flattenKubernetesClusterDataSourceAccessProfile(profile containerservice.Ma flattenedKubeConfig = flattenKubernetesClusterDataSourceKubeConfigAAD(*kubeConfigAAD) } else { kubeConfig, err := kubernetes.ParseKubeConfig(rawConfig) - if err != nil { return utils.String(rawConfig), []interface{}{} } @@ -1085,7 +1080,7 @@ func flattenKubernetesClusterDataSourceKubeConfigAAD(config kubernetes.KubeConfi func flattenKubernetesClusterDataSourceManagedClusterIdentity(input *containerservice.ManagedClusterIdentity) []interface{} { // if it's none, omit the block - if input == nil || input.Type == containerservice.None { + if input == nil || input.Type == containerservice.ResourceIdentityTypeNone { return []interface{}{} } diff --git a/azurerm/internal/services/containers/kubernetes_cluster_node_pool_data_source.go b/azurerm/internal/services/containers/kubernetes_cluster_node_pool_data_source.go index 1e666a44310a..4b9af295ceeb 100644 --- a/azurerm/internal/services/containers/kubernetes_cluster_node_pool_data_source.go +++ b/azurerm/internal/services/containers/kubernetes_cluster_node_pool_data_source.go @@ -4,7 +4,7 @@ import ( "fmt" "time" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -113,6 +113,11 @@ func dataSourceKubernetesClusterNodePool() *schema.Resource { Computed: true, }, + "os_disk_type": { + Type: schema.TypeString, + Computed: true, + }, + "os_type": { Type: schema.TypeString, Computed: true, @@ -123,6 +128,11 @@ func dataSourceKubernetesClusterNodePool() *schema.Resource { Computed: true, }, + "proximity_placement_group_id": { + Type: schema.TypeString, + Computed: true, + }, + "spot_max_price": { Type: schema.TypeFloat, Computed: true, @@ -239,6 +249,12 @@ func dataSourceKubernetesClusterNodePoolRead(d *schema.ResourceData, meta interf osDiskSizeGB = int(*props.OsDiskSizeGB) } d.Set("os_disk_size_gb", osDiskSizeGB) + + osDiskType := containerservice.Managed + if props.OsDiskType != "" { + osDiskType = props.OsDiskType + } + d.Set("os_disk_type", string(osDiskType)) d.Set("os_type", string(props.OsType)) // not returned from the API if not Spot @@ -248,6 +264,12 @@ func dataSourceKubernetesClusterNodePoolRead(d *schema.ResourceData, meta interf } d.Set("priority", priority) + proximityPlacementGroupId := "" + if props.ProximityPlacementGroupID != nil { + proximityPlacementGroupId = *props.ProximityPlacementGroupID + } + d.Set("proximity_placement_group_id", proximityPlacementGroupId) + spotMaxPrice := -1.0 if props.SpotMaxPrice != nil { spotMaxPrice = *props.SpotMaxPrice diff --git a/azurerm/internal/services/containers/kubernetes_cluster_node_pool_resource.go b/azurerm/internal/services/containers/kubernetes_cluster_node_pool_resource.go index 1ed375caad6f..0c14323310c2 100644 --- a/azurerm/internal/services/containers/kubernetes_cluster_node_pool_resource.go +++ b/azurerm/internal/services/containers/kubernetes_cluster_node_pool_resource.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -30,7 +30,7 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { Delete: resourceArmKubernetesClusterNodePoolDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.KubernetesNodePoolID(id) + _, err := parse.NodePoolID(id) return err }), @@ -53,14 +53,14 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: containerValidate.KubernetesClusterID, + ValidateFunc: containerValidate.ClusterID, }, "node_count": { Type: schema.TypeInt, Optional: true, Computed: true, - ValidateFunc: validation.IntBetween(0, 100), + ValidateFunc: validation.IntBetween(0, 1000), }, "tags": tags.Schema(), @@ -76,6 +76,7 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { "availability_zones": { Type: schema.TypeList, Optional: true, + ForceNew: true, Elem: &schema.Schema{ Type: schema.TypeString, }, @@ -104,7 +105,7 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { "max_count": { Type: schema.TypeInt, Optional: true, - ValidateFunc: validation.IntBetween(0, 100), + ValidateFunc: validation.IntBetween(0, 1000), }, "max_pods": { @@ -128,7 +129,7 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { Type: schema.TypeInt, Optional: true, // NOTE: rather than setting `0` users should instead pass `null` here - ValidateFunc: validation.IntBetween(0, 100), + ValidateFunc: validation.IntBetween(0, 1000), }, "node_labels": { @@ -164,6 +165,17 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { ValidateFunc: validation.IntAtLeast(1), }, + "os_disk_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: containerservice.Managed, + ValidateFunc: validation.StringInSlice([]string{ + string(containerservice.Ephemeral), + string(containerservice.Managed), + }, false), + }, + "os_type": { Type: schema.TypeString, Optional: true, @@ -186,6 +198,13 @@ func resourceArmKubernetesClusterNodePool() *schema.Resource { }, false), }, + "proximity_placement_group_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: computeValidate.ProximityPlacementGroupID, + }, + "spot_max_price": { Type: schema.TypeFloat, Optional: true, @@ -211,13 +230,13 @@ func resourceArmKubernetesClusterNodePoolCreate(d *schema.ResourceData, meta int ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() - kubernetesClusterId, err := parse.KubernetesClusterID(d.Get("kubernetes_cluster_id").(string)) + kubernetesClusterId, err := parse.ClusterID(d.Get("kubernetes_cluster_id").(string)) if err != nil { return err } resourceGroup := kubernetesClusterId.ResourceGroup - clusterName := kubernetesClusterId.Name + clusterName := kubernetesClusterId.ManagedClusterName name := d.Get("name").(string) log.Printf("[DEBUG] Retrieving Kubernetes Cluster %q (Resource Group %q)..", clusterName, resourceGroup) @@ -326,6 +345,15 @@ func resourceArmKubernetesClusterNodePoolCreate(d *schema.ResourceData, meta int profile.OsDiskSizeGB = utils.Int32(int32(osDiskSizeGB)) } + proximityPlacementGroupId := d.Get("proximity_placement_group_id").(string) + if proximityPlacementGroupId != "" { + profile.ProximityPlacementGroupID = &proximityPlacementGroupId + } + + if osDiskType := d.Get("os_disk_type").(string); osDiskType != "" { + profile.OsDiskType = containerservice.OSDiskType(osDiskType) + } + if vnetSubnetID := d.Get("vnet_subnet_id").(string); vnetSubnetID != "" { profile.VnetSubnetID = utils.String(vnetSubnetID) } @@ -392,24 +420,24 @@ func resourceArmKubernetesClusterNodePoolUpdate(d *schema.ResourceData, meta int ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KubernetesNodePoolID(d.Id()) + id, err := parse.NodePoolID(d.Id()) if err != nil { return err } d.Partial(true) - log.Printf("[DEBUG] Retrieving existing Node Pool %q (Kubernetes Cluster %q / Resource Group %q)..", id.Name, id.ClusterName, id.ResourceGroup) - existing, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + log.Printf("[DEBUG] Retrieving existing Node Pool %q (Kubernetes Cluster %q / Resource Group %q)..", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup) + existing, err := client.Get(ctx, id.ResourceGroup, id.ManagedClusterName, id.AgentPoolName) if err != nil { if utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Node Pool %q was not found in Managed Kubernetes Cluster %q / Resource Group %q!", id.Name, id.ClusterName, id.ResourceGroup) + return fmt.Errorf("Node Pool %q was not found in Managed Kubernetes Cluster %q / Resource Group %q!", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup) } - return fmt.Errorf("retrieving Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.Name, id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("retrieving Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup, err) } if existing.ManagedClusterAgentPoolProfileProperties == nil { - return fmt.Errorf("retrieving Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): `properties` was nil", id.Name, id.ClusterName, id.ResourceGroup) + return fmt.Errorf("retrieving Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): `properties` was nil", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup) } props := existing.ManagedClusterAgentPoolProfileProperties @@ -420,7 +448,7 @@ func resourceArmKubernetesClusterNodePoolUpdate(d *schema.ResourceData, meta int enableAutoScaling = *props.EnableAutoScaling } - log.Printf("[DEBUG] Determining delta for existing Node Pool %q (Kubernetes Cluster %q / Resource Group %q)..", id.Name, id.ClusterName, id.ResourceGroup) + log.Printf("[DEBUG] Determining delta for existing Node Pool %q (Kubernetes Cluster %q / Resource Group %q)..", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup) // delta patching if d.HasChange("availability_zones") { @@ -466,7 +494,7 @@ func resourceArmKubernetesClusterNodePoolUpdate(d *schema.ResourceData, meta int } orchestratorVersion := d.Get("orchestrator_version").(string) - if err := validateNodePoolSupportsVersion(ctx, containersClient, id.ResourceGroup, id.ClusterName, id.Name, orchestratorVersion); err != nil { + if err := validateNodePoolSupportsVersion(ctx, containersClient, id.ResourceGroup, id.ManagedClusterName, id.AgentPoolName, orchestratorVersion); err != nil { return err } @@ -505,15 +533,15 @@ func resourceArmKubernetesClusterNodePoolUpdate(d *schema.ResourceData, meta int props.MinCount = nil } - log.Printf("[DEBUG] Updating existing Node Pool %q (Kubernetes Cluster %q / Resource Group %q)..", id.Name, id.ClusterName, id.ResourceGroup) + log.Printf("[DEBUG] Updating existing Node Pool %q (Kubernetes Cluster %q / Resource Group %q)..", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup) existing.ManagedClusterAgentPoolProfileProperties = props - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.ClusterName, id.Name, existing) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.ManagedClusterName, id.AgentPoolName, existing) if err != nil { - return fmt.Errorf("updating Node Pool %q (Kubernetes Cluster %q / Resource Group %q): %+v", id.Name, id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("updating Node Pool %q (Kubernetes Cluster %q / Resource Group %q): %+v", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for update of Node Pool %q (Kubernetes Cluster %q / Resource Group %q): %+v", id.Name, id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("waiting for update of Node Pool %q (Kubernetes Cluster %q / Resource Group %q): %+v", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup, err) } d.Partial(false) @@ -527,35 +555,35 @@ func resourceArmKubernetesClusterNodePoolRead(d *schema.ResourceData, meta inter ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KubernetesNodePoolID(d.Id()) + id, err := parse.NodePoolID(d.Id()) if err != nil { return err } // if the parent cluster doesn't exist then the node pool won't - cluster, err := clustersClient.Get(ctx, id.ResourceGroup, id.ClusterName) + cluster, err := clustersClient.Get(ctx, id.ResourceGroup, id.ManagedClusterName) if err != nil { if utils.ResponseWasNotFound(cluster.Response) { - log.Printf("[DEBUG] Managed Kubernetes Cluster %q was not found in Resource Group %q - removing from state!", id.ClusterName, id.ResourceGroup) + log.Printf("[DEBUG] Managed Kubernetes Cluster %q was not found in Resource Group %q - removing from state!", id.ManagedClusterName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("retrieving Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("retrieving Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } - resp, err := poolsClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + resp, err := poolsClient.Get(ctx, id.ResourceGroup, id.ManagedClusterName, id.AgentPoolName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Node Pool %q was not found in Managed Kubernetes Cluster %q / Resource Group %q - removing from state!", id.Name, id.ClusterName, id.ResourceGroup) + log.Printf("[DEBUG] Node Pool %q was not found in Managed Kubernetes Cluster %q / Resource Group %q - removing from state!", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("retrieving Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.Name, id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("retrieving Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.AgentPoolName) d.Set("kubernetes_cluster_id", cluster.ID) if props := resp.ManagedClusterAgentPoolProfileProperties; props != nil { @@ -616,6 +644,12 @@ func resourceArmKubernetesClusterNodePoolRead(d *schema.ResourceData, meta inter osDiskSizeGB = int(*props.OsDiskSizeGB) } d.Set("os_disk_size_gb", osDiskSizeGB) + + osDiskType := containerservice.Managed + if props.OsDiskType != "" { + osDiskType = props.OsDiskType + } + d.Set("os_disk_type", osDiskType) d.Set("os_type", string(props.OsType)) // not returned from the API if not Spot @@ -625,6 +659,8 @@ func resourceArmKubernetesClusterNodePoolRead(d *schema.ResourceData, meta inter } d.Set("priority", priority) + d.Set("proximity_placement_group_id", props.ProximityPlacementGroupID) + spotMaxPrice := -1.0 if props.SpotMaxPrice != nil { spotMaxPrice = *props.SpotMaxPrice @@ -643,18 +679,18 @@ func resourceArmKubernetesClusterNodePoolDelete(d *schema.ResourceData, meta int ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KubernetesNodePoolID(d.Id()) + id, err := parse.NodePoolID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ManagedClusterName, id.AgentPoolName) if err != nil { - return fmt.Errorf("deleting Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.Name, id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("deleting Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for the deletion of Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.Name, id.ClusterName, id.ResourceGroup, err) + return fmt.Errorf("waiting for the deletion of Node Pool %q (Managed Kubernetes Cluster %q / Resource Group %q): %+v", id.AgentPoolName, id.ManagedClusterName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/containers/kubernetes_cluster_resource.go b/azurerm/internal/services/containers/kubernetes_cluster_resource.go index 551a6346452e..f61b72c321b4 100644 --- a/azurerm/internal/services/containers/kubernetes_cluster_resource.go +++ b/azurerm/internal/services/containers/kubernetes_cluster_resource.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/hashicorp/terraform-plugin-sdk/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -34,7 +34,7 @@ func resourceArmKubernetesCluster() *schema.Resource { Delete: resourceArmKubernetesClusterDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.KubernetesClusterID(id) + _, err := parse.ClusterID(id) return err }), @@ -178,7 +178,7 @@ func resourceArmKubernetesCluster() *schema.Resource { Required: true, ForceNew: true, ValidateFunc: validation.StringInSlice([]string{ - string(containerservice.SystemAssigned), + string(containerservice.ResourceIdentityTypeSystemAssigned), }, false), }, "principal_id": { @@ -414,7 +414,7 @@ func resourceArmKubernetesCluster() *schema.Resource { ForceNew: true, Computed: true, ConflictsWith: []string{"private_cluster_enabled"}, - Deprecated: "Deprecated in favor of `private_cluster_enabled`", // TODO -- remove this in next major version + Deprecated: "Deprecated in favour of `private_cluster_enabled`", // TODO -- remove this in next major version }, "private_cluster_enabled": { @@ -729,7 +729,9 @@ func resourceArmKubernetesClusterCreate(d *schema.ResourceData, meta interface{} nodeResourceGroup := d.Get("node_resource_group").(string) - enablePodSecurityPolicy := d.Get("enable_pod_security_policy").(bool) + if d.Get("enable_pod_security_policy").(bool) { + return fmt.Errorf("The AKS API has removed support for this field on 2020-10-15 and is no longer possible to configure this the Pod Security Policy - as such you'll need to set `enable_pod_security_policy` to `false`") + } autoScalerProfileRaw := d.Get("auto_scaler_profile").([]interface{}) autoScalerProfile := expandKubernetesClusterAutoScalerProfile(autoScalerProfileRaw) @@ -742,19 +744,18 @@ func resourceArmKubernetesClusterCreate(d *schema.ResourceData, meta interface{} Tier: containerservice.ManagedClusterSKUTier(d.Get("sku_tier").(string)), }, ManagedClusterProperties: &containerservice.ManagedClusterProperties{ - APIServerAccessProfile: &apiAccessProfile, - AadProfile: azureADProfile, - AddonProfiles: *addonProfiles, - AgentPoolProfiles: agentProfiles, - AutoScalerProfile: autoScalerProfile, - DNSPrefix: utils.String(dnsPrefix), - EnableRBAC: utils.Bool(rbacEnabled), - KubernetesVersion: utils.String(kubernetesVersion), - LinuxProfile: linuxProfile, - WindowsProfile: windowsProfile, - NetworkProfile: networkProfile, - NodeResourceGroup: utils.String(nodeResourceGroup), - EnablePodSecurityPolicy: utils.Bool(enablePodSecurityPolicy), + APIServerAccessProfile: &apiAccessProfile, + AadProfile: azureADProfile, + AddonProfiles: *addonProfiles, + AgentPoolProfiles: agentProfiles, + AutoScalerProfile: autoScalerProfile, + DNSPrefix: utils.String(dnsPrefix), + EnableRBAC: utils.Bool(rbacEnabled), + KubernetesVersion: utils.String(kubernetesVersion), + LinuxProfile: linuxProfile, + WindowsProfile: windowsProfile, + NetworkProfile: networkProfile, + NodeResourceGroup: utils.String(nodeResourceGroup), }, Tags: tags.Expand(t), } @@ -819,7 +820,7 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} log.Printf("[INFO] preparing arguments for Managed Kubernetes Cluster update.") - id, err := parse.KubernetesClusterID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } @@ -827,20 +828,20 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} d.Partial(true) // we need to conditionally update the cluster - existing, err := clusterClient.Get(ctx, id.ResourceGroup, id.Name) + existing, err := clusterClient.Get(ctx, id.ResourceGroup, id.ManagedClusterName) if err != nil { - return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if existing.ManagedClusterProperties == nil { - return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): `properties` was nil", id.Name, id.ResourceGroup) + return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): `properties` was nil", id.ManagedClusterName, id.ResourceGroup) } - if err := validateKubernetesCluster(d, &existing, id.ResourceGroup, id.Name); err != nil { + if err := validateKubernetesCluster(d, &existing, id.ResourceGroup, id.ManagedClusterName); err != nil { return err } if d.HasChange("service_principal") { - log.Printf("[DEBUG] Updating the Service Principal for Kubernetes Cluster %q (Resource Group %q)..", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Updating the Service Principal for Kubernetes Cluster %q (Resource Group %q)..", id.ManagedClusterName, id.ResourceGroup) servicePrincipals := d.Get("service_principal").([]interface{}) // we'll be rotating the Service Principal - removing the SP block is handled by the validate function servicePrincipalRaw := servicePrincipals[0].(map[string]interface{}) @@ -852,23 +853,23 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} Secret: utils.String(clientSecret), } - future, err := clusterClient.ResetServicePrincipalProfile(ctx, id.ResourceGroup, id.Name, params) + future, err := clusterClient.ResetServicePrincipalProfile(ctx, id.ResourceGroup, id.ManagedClusterName, params) if err != nil { - return fmt.Errorf("updating Service Principal for Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("updating Service Principal for Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, clusterClient.Client); err != nil { - return fmt.Errorf("waiting for update of Service Principal for Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for update of Service Principal for Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } - log.Printf("[DEBUG] Updated the Service Principal for Kubernetes Cluster %q (Resource Group %q).", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Updated the Service Principal for Kubernetes Cluster %q (Resource Group %q).", id.ManagedClusterName, id.ResourceGroup) // since we're patching it, re-retrieve the latest version of the cluster - existing, err = clusterClient.Get(ctx, id.ResourceGroup, id.Name) + existing, err = clusterClient.Get(ctx, id.ResourceGroup, id.ManagedClusterName) if err != nil { - return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if existing.ManagedClusterProperties == nil { - return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): `properties` was nil", id.Name, id.ResourceGroup) + return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): `properties` was nil", id.ManagedClusterName, id.ResourceGroup) } } @@ -896,13 +897,13 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} // Reset AAD profile is only possible if not managed if props.AadProfile.Managed == nil || !*props.AadProfile.Managed { log.Printf("[DEBUG] Updating the RBAC AAD profile") - future, err := clusterClient.ResetAADProfile(ctx, id.ResourceGroup, id.Name, *props.AadProfile) + future, err := clusterClient.ResetAADProfile(ctx, id.ResourceGroup, id.ManagedClusterName, *props.AadProfile) if err != nil { - return fmt.Errorf("updating Managed Kubernetes Cluster AAD Profile in cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("updating Managed Kubernetes Cluster AAD Profile in cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, clusterClient.Client); err != nil { - return fmt.Errorf("waiting for update of RBAC AAD profile of Managed Cluster %q (Resource Group %q):, %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for update of RBAC AAD profile of Managed Cluster %q (Resource Group %q):, %+v", id.ManagedClusterName, id.ResourceGroup, err) } } } else { @@ -951,10 +952,8 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} existing.ManagedClusterProperties.AutoScalerProfile = autoScalerProfile } - if d.HasChange("enable_pod_security_policy") { - updateCluster = true - enablePodSecurityPolicy := d.Get("enable_pod_security_policy").(bool) - existing.ManagedClusterProperties.EnablePodSecurityPolicy = utils.Bool(enablePodSecurityPolicy) + if d.HasChange("enable_pod_security_policy") && d.Get("enable_pod_security_policy").(bool) { + return fmt.Errorf("The AKS API has removed support for this field on 2020-10-15 and is no longer possible to configure this the Pod Security Policy - as such you'll need to set `enable_pod_security_policy` to `false`") } if d.HasChange("linux_profile") { @@ -1051,39 +1050,39 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} } if updateCluster { - log.Printf("[DEBUG] Updating the Kubernetes Cluster %q (Resource Group %q)..", id.Name, id.ResourceGroup) - future, err := clusterClient.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, existing) + log.Printf("[DEBUG] Updating the Kubernetes Cluster %q (Resource Group %q)..", id.ManagedClusterName, id.ResourceGroup) + future, err := clusterClient.CreateOrUpdate(ctx, id.ResourceGroup, id.ManagedClusterName, existing) if err != nil { - return fmt.Errorf("updating Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("updating Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, clusterClient.Client); err != nil { - return fmt.Errorf("waiting for update of Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for update of Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } - log.Printf("[DEBUG] Updated the Kubernetes Cluster %q (Resource Group %q)..", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Updated the Kubernetes Cluster %q (Resource Group %q)..", id.ManagedClusterName, id.ResourceGroup) } // then roll the version of Kubernetes if necessary if d.HasChange("kubernetes_version") { - existing, err = clusterClient.Get(ctx, id.ResourceGroup, id.Name) + existing, err = clusterClient.Get(ctx, id.ResourceGroup, id.ManagedClusterName) if err != nil { - return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if existing.ManagedClusterProperties == nil { - return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): `properties` was nil", id.Name, id.ResourceGroup) + return fmt.Errorf("retrieving existing Kubernetes Cluster %q (Resource Group %q): `properties` was nil", id.ManagedClusterName, id.ResourceGroup) } kubernetesVersion := d.Get("kubernetes_version").(string) log.Printf("[DEBUG] Upgrading the version of Kubernetes to %q..", kubernetesVersion) existing.ManagedClusterProperties.KubernetesVersion = utils.String(kubernetesVersion) - future, err := clusterClient.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, existing) + future, err := clusterClient.CreateOrUpdate(ctx, id.ResourceGroup, id.ManagedClusterName, existing) if err != nil { - return fmt.Errorf("updating Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("updating Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, clusterClient.Client); err != nil { - return fmt.Errorf("waiting for update of Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for update of Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } log.Printf("[DEBUG] Upgraded the version of Kubernetes to %q..", kubernetesVersion) @@ -1103,18 +1102,18 @@ func resourceArmKubernetesClusterUpdate(d *schema.ResourceData, meta interface{} // if a users specified a version - confirm that version is supported on the cluster if nodePoolVersion := agentProfile.ManagedClusterAgentPoolProfileProperties.OrchestratorVersion; nodePoolVersion != nil { - if err := validateNodePoolSupportsVersion(ctx, containersClient, id.ResourceGroup, id.Name, nodePoolName, *nodePoolVersion); err != nil { + if err := validateNodePoolSupportsVersion(ctx, containersClient, id.ResourceGroup, id.ManagedClusterName, nodePoolName, *nodePoolVersion); err != nil { return err } } - agentPool, err := nodePoolsClient.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, nodePoolName, agentProfile) + agentPool, err := nodePoolsClient.CreateOrUpdate(ctx, id.ResourceGroup, id.ManagedClusterName, nodePoolName, agentProfile) if err != nil { - return fmt.Errorf("updating Default Node Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("updating Default Node Pool %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if err := agentPool.WaitForCompletionRef(ctx, nodePoolsClient.Client); err != nil { - return fmt.Errorf("waiting for update of Default Node Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for update of Default Node Pool %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } log.Printf("[DEBUG] Updated Default Node Pool.") } @@ -1129,25 +1128,25 @@ func resourceArmKubernetesClusterRead(d *schema.ResourceData, meta interface{}) ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KubernetesClusterID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ManagedClusterName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Managed Kubernetes Cluster %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Managed Kubernetes Cluster %q was not found in Resource Group %q - removing from state!", id.ManagedClusterName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("retrieving Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } - profile, err := client.GetAccessProfile(ctx, id.ResourceGroup, id.Name, "clusterUser") + profile, err := client.GetAccessProfile(ctx, id.ResourceGroup, id.ManagedClusterName, "clusterUser") if err != nil { - return fmt.Errorf("retrieving Access Profile for Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Access Profile for Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } d.Set("name", resp.Name) @@ -1232,9 +1231,9 @@ func resourceArmKubernetesClusterRead(d *schema.ResourceData, meta interface{}) // adminProfile is only available for RBAC enabled clusters with AAD if props.AadProfile != nil { - adminProfile, err := client.GetAccessProfile(ctx, id.ResourceGroup, id.Name, "clusterAdmin") + adminProfile, err := client.GetAccessProfile(ctx, id.ResourceGroup, id.ManagedClusterName, "clusterAdmin") if err != nil { - return fmt.Errorf("retrieving Admin Access Profile for Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Admin Access Profile for Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } adminKubeConfigRaw, adminKubeConfig := flattenKubernetesClusterAccessProfile(adminProfile) @@ -1266,18 +1265,18 @@ func resourceArmKubernetesClusterDelete(d *schema.ResourceData, meta interface{} ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KubernetesClusterID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ManagedClusterName) if err != nil { - return fmt.Errorf("deleting Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for the deletion of Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for the deletion of Managed Kubernetes Cluster %q (Resource Group %q): %+v", id.ManagedClusterName, id.ResourceGroup, err) } return nil @@ -1706,7 +1705,7 @@ func expandKubernetesClusterRoleBasedAccessControl(input []interface{}, provider func expandKubernetesClusterManagedClusterIdentity(input []interface{}) *containerservice.ManagedClusterIdentity { if len(input) == 0 || input[0] == nil { return &containerservice.ManagedClusterIdentity{ - Type: containerservice.None, + Type: containerservice.ResourceIdentityTypeNone, } } @@ -1859,7 +1858,7 @@ func flattenKubernetesClusterKubeConfigAAD(config kubernetes.KubeConfigAAD) []in func flattenKubernetesClusterManagedClusterIdentity(input *containerservice.ManagedClusterIdentity) []interface{} { // if it's none, omit the block - if input == nil || input.Type == containerservice.None { + if input == nil || input.Type == containerservice.ResourceIdentityTypeNone { return []interface{}{} } diff --git a/azurerm/internal/services/containers/kubernetes_cluster_validate.go b/azurerm/internal/services/containers/kubernetes_cluster_validate.go index 4bff33b6380f..0e495fc0da73 100644 --- a/azurerm/internal/services/containers/kubernetes_cluster_validate.go +++ b/azurerm/internal/services/containers/kubernetes_cluster_validate.go @@ -6,7 +6,7 @@ import ( "net/http" "strings" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/containers/client" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -105,7 +105,7 @@ func validateKubernetesCluster(d *schema.ResourceData, cluster *containerservice hasIdentity := false if identity := cluster.Identity; identity != nil { - hasIdentity = identity.Type != containerservice.None + hasIdentity = identity.Type != containerservice.ResourceIdentityTypeNone } if hasIdentity { diff --git a/azurerm/internal/services/containers/kubernetes_nodepool.go b/azurerm/internal/services/containers/kubernetes_nodepool.go index 21ab500fab69..beccd539ecb5 100644 --- a/azurerm/internal/services/containers/kubernetes_nodepool.go +++ b/azurerm/internal/services/containers/kubernetes_nodepool.go @@ -3,7 +3,9 @@ package containers import ( "fmt" - "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-04-01/containerservice" + computeValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/validate" + + "github.com/Azure/azure-sdk-for-go/services/containerservice/mgmt/2020-09-01/containerservice" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -49,6 +51,7 @@ func SchemaDefaultNodePool() *schema.Schema { "availability_zones": { Type: schema.TypeList, Optional: true, + ForceNew: true, Elem: &schema.Schema{ Type: schema.TypeString, }, @@ -69,7 +72,7 @@ func SchemaDefaultNodePool() *schema.Schema { Type: schema.TypeInt, Optional: true, // NOTE: rather than setting `0` users should instead pass `null` here - ValidateFunc: validation.IntBetween(1, 100), + ValidateFunc: validation.IntBetween(1, 1000), }, "max_pods": { @@ -83,14 +86,14 @@ func SchemaDefaultNodePool() *schema.Schema { Type: schema.TypeInt, Optional: true, // NOTE: rather than setting `0` users should instead pass `null` here - ValidateFunc: validation.IntBetween(1, 100), + ValidateFunc: validation.IntBetween(1, 1000), }, "node_count": { Type: schema.TypeInt, Optional: true, Computed: true, - ValidateFunc: validation.IntBetween(1, 100), + ValidateFunc: validation.IntBetween(1, 1000), }, "node_labels": { @@ -121,6 +124,17 @@ func SchemaDefaultNodePool() *schema.Schema { ValidateFunc: validation.IntAtLeast(1), }, + "os_disk_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: containerservice.Managed, + ValidateFunc: validation.StringInSlice([]string{ + string(containerservice.Ephemeral), + string(containerservice.Managed), + }, false), + }, + "vnet_subnet_id": { Type: schema.TypeString, Optional: true, @@ -133,6 +147,12 @@ func SchemaDefaultNodePool() *schema.Schema { Computed: true, ValidateFunc: validation.StringIsNotEmpty, }, + "proximity_placement_group_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: computeValidate.ProximityPlacementGroupID, + }, }, }, } @@ -143,26 +163,28 @@ func ConvertDefaultNodePoolToAgentPool(input *[]containerservice.ManagedClusterA return containerservice.AgentPool{ Name: defaultCluster.Name, ManagedClusterAgentPoolProfileProperties: &containerservice.ManagedClusterAgentPoolProfileProperties{ - Count: defaultCluster.Count, - VMSize: defaultCluster.VMSize, - OsDiskSizeGB: defaultCluster.OsDiskSizeGB, - VnetSubnetID: defaultCluster.VnetSubnetID, - MaxPods: defaultCluster.MaxPods, - OsType: defaultCluster.OsType, - MaxCount: defaultCluster.MaxCount, - MinCount: defaultCluster.MinCount, - EnableAutoScaling: defaultCluster.EnableAutoScaling, - Type: defaultCluster.Type, - OrchestratorVersion: defaultCluster.OrchestratorVersion, - AvailabilityZones: defaultCluster.AvailabilityZones, - EnableNodePublicIP: defaultCluster.EnableNodePublicIP, - ScaleSetPriority: defaultCluster.ScaleSetPriority, - ScaleSetEvictionPolicy: defaultCluster.ScaleSetEvictionPolicy, - SpotMaxPrice: defaultCluster.SpotMaxPrice, - Mode: defaultCluster.Mode, - NodeLabels: defaultCluster.NodeLabels, - NodeTaints: defaultCluster.NodeTaints, - Tags: defaultCluster.Tags, + Count: defaultCluster.Count, + VMSize: defaultCluster.VMSize, + OsDiskSizeGB: defaultCluster.OsDiskSizeGB, + OsDiskType: defaultCluster.OsDiskType, + VnetSubnetID: defaultCluster.VnetSubnetID, + MaxPods: defaultCluster.MaxPods, + OsType: defaultCluster.OsType, + MaxCount: defaultCluster.MaxCount, + MinCount: defaultCluster.MinCount, + EnableAutoScaling: defaultCluster.EnableAutoScaling, + Type: defaultCluster.Type, + OrchestratorVersion: defaultCluster.OrchestratorVersion, + ProximityPlacementGroupID: defaultCluster.ProximityPlacementGroupID, + AvailabilityZones: defaultCluster.AvailabilityZones, + EnableNodePublicIP: defaultCluster.EnableNodePublicIP, + ScaleSetPriority: defaultCluster.ScaleSetPriority, + ScaleSetEvictionPolicy: defaultCluster.ScaleSetEvictionPolicy, + SpotMaxPrice: defaultCluster.SpotMaxPrice, + Mode: defaultCluster.Mode, + NodeLabels: defaultCluster.NodeLabels, + NodeTaints: defaultCluster.NodeTaints, + Tags: defaultCluster.Tags, }, } } @@ -176,6 +198,11 @@ func ExpandDefaultNodePool(d *schema.ResourceData) (*[]containerservice.ManagedC nodeLabels := utils.ExpandMapStringPtrString(nodeLabelsRaw) nodeTaintsRaw := raw["node_taints"].([]interface{}) nodeTaints := utils.ExpandStringSlice(nodeTaintsRaw) + + if len(*nodeTaints) != 0 { + return nil, fmt.Errorf("The AKS API has removed support for tainting all nodes in the default node pool and it is no longer possible to configure this. To taint a node pool, create a separate one") + } + t := raw["tags"].(map[string]interface{}) profile := containerservice.ManagedClusterAgentPoolProfile{ @@ -183,7 +210,6 @@ func ExpandDefaultNodePool(d *schema.ResourceData) (*[]containerservice.ManagedC EnableNodePublicIP: utils.Bool(raw["enable_node_public_ip"].(bool)), Name: utils.String(raw["name"].(string)), NodeLabels: nodeLabels, - NodeTaints: nodeTaints, Tags: tags.Expand(t), Type: containerservice.AgentPoolType(raw["type"].(string)), VMSize: containerservice.VMSizeTypes(raw["vm_size"].(string)), @@ -219,6 +245,11 @@ func ExpandDefaultNodePool(d *schema.ResourceData) (*[]containerservice.ManagedC profile.OsDiskSizeGB = utils.Int32(osDiskSizeGB) } + profile.OsDiskType = containerservice.Managed + if osDiskType := raw["os_disk_type"].(string); osDiskType != "" { + profile.OsDiskType = containerservice.OSDiskType(raw["os_disk_type"].(string)) + } + if vnetSubnetID := raw["vnet_subnet_id"].(string); vnetSubnetID != "" { profile.VnetSubnetID = utils.String(vnetSubnetID) } @@ -227,11 +258,15 @@ func ExpandDefaultNodePool(d *schema.ResourceData) (*[]containerservice.ManagedC profile.OrchestratorVersion = utils.String(orchestratorVersion) } + if proximityPlacementGroupId := raw["proximity_placement_group_id"].(string); proximityPlacementGroupId != "" { + profile.ProximityPlacementGroupID = utils.String(proximityPlacementGroupId) + } + count := raw["node_count"].(int) maxCount := raw["max_count"].(int) minCount := raw["min_count"].(int) - // Count must always be set (see #6094), RP behavior has changed + // Count must always be set (see #6094), RP behaviour has changed // since the API version upgrade in v2.1.0 making Count required // for all create/update requests profile.Count = utils.Int32(int32(count)) @@ -337,16 +372,16 @@ func FlattenDefaultNodePool(input *[]containerservice.ManagedClusterAgentPoolPro } } - var nodeTaints []string - if agentPool.NodeTaints != nil { - nodeTaints = *agentPool.NodeTaints - } - osDiskSizeGB := 0 if agentPool.OsDiskSizeGB != nil { osDiskSizeGB = int(*agentPool.OsDiskSizeGB) } + osDiskType := containerservice.Managed + if agentPool.OsDiskType != "" { + osDiskType = agentPool.OsDiskType + } + vnetSubnetId := "" if agentPool.VnetSubnetID != nil { vnetSubnetId = *agentPool.VnetSubnetID @@ -357,24 +392,31 @@ func FlattenDefaultNodePool(input *[]containerservice.ManagedClusterAgentPoolPro orchestratorVersion = *agentPool.OrchestratorVersion } + proximityPlacementGroupId := "" + if agentPool.ProximityPlacementGroupID != nil { + proximityPlacementGroupId = *agentPool.ProximityPlacementGroupID + } + return &[]interface{}{ map[string]interface{}{ - "availability_zones": availabilityZones, - "enable_auto_scaling": enableAutoScaling, - "enable_node_public_ip": enableNodePublicIP, - "max_count": maxCount, - "max_pods": maxPods, - "min_count": minCount, - "name": name, - "node_count": count, - "node_labels": nodeLabels, - "node_taints": nodeTaints, - "os_disk_size_gb": osDiskSizeGB, - "tags": tags.Flatten(agentPool.Tags), - "type": string(agentPool.Type), - "vm_size": string(agentPool.VMSize), - "orchestrator_version": orchestratorVersion, - "vnet_subnet_id": vnetSubnetId, + "availability_zones": availabilityZones, + "enable_auto_scaling": enableAutoScaling, + "enable_node_public_ip": enableNodePublicIP, + "max_count": maxCount, + "max_pods": maxPods, + "min_count": minCount, + "name": name, + "node_count": count, + "node_labels": nodeLabels, + "node_taints": []string{}, + "os_disk_size_gb": osDiskSizeGB, + "os_disk_type": string(osDiskType), + "tags": tags.Flatten(agentPool.Tags), + "type": string(agentPool.Type), + "vm_size": string(agentPool.VMSize), + "orchestrator_version": orchestratorVersion, + "proximity_placement_group_id": proximityPlacementGroupId, + "vnet_subnet_id": vnetSubnetId, }, }, nil } diff --git a/azurerm/internal/services/containers/parse/cluster.go b/azurerm/internal/services/containers/parse/cluster.go new file mode 100644 index 000000000000..71f3eb1a176f --- /dev/null +++ b/azurerm/internal/services/containers/parse/cluster.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ClusterId struct { + SubscriptionId string + ResourceGroup string + ManagedClusterName string +} + +func NewClusterID(subscriptionId, resourceGroup, managedClusterName string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ManagedClusterName: managedClusterName, + } +} + +func (id ClusterId) String() string { + segments := []string{ + fmt.Sprintf("Managed Cluster Name %q", id.ManagedClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) +} + +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ContainerService/managedClusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ManagedClusterName) +} + +// ClusterID parses a Cluster ID into an ClusterId struct +func ClusterID(input string) (*ClusterId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ClusterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ManagedClusterName, err = id.PopSegment("managedClusters"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/containers/parse/cluster_test.go b/azurerm/internal/services/containers/parse/cluster_test.go new file mode 100644 index 000000000000..15e19e4c1bd0 --- /dev/null +++ b/azurerm/internal/services/containers/parse/cluster_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ClusterId{} + +func TestClusterIDFormatter(t *testing.T) { + actual := NewClusterID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestClusterID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ClusterId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/", + Error: true, + }, + + { + // missing value for ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1", + Expected: &ClusterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ManagedClusterName: "cluster1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CONTAINERSERVICE/MANAGEDCLUSTERS/CLUSTER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ClusterID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ManagedClusterName != v.Expected.ManagedClusterName { + t.Fatalf("Expected %q but got %q for ManagedClusterName", v.Expected.ManagedClusterName, actual.ManagedClusterName) + } + } +} diff --git a/azurerm/internal/services/containers/parse/kubernetes_cluster_id.go b/azurerm/internal/services/containers/parse/kubernetes_cluster_id.go deleted file mode 100644 index 69ee8c5d39ef..000000000000 --- a/azurerm/internal/services/containers/parse/kubernetes_cluster_id.go +++ /dev/null @@ -1,29 +0,0 @@ -package parse - -import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - -type KubernetesClusterId struct { - Name string - ResourceGroup string -} - -func KubernetesClusterID(input string) (*KubernetesClusterId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - cluster := KubernetesClusterId{ - ResourceGroup: id.ResourceGroup, - } - - if cluster.Name, err = id.PopSegment("managedClusters"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &cluster, nil -} diff --git a/azurerm/internal/services/containers/parse/kubernetes_cluster_id_test.go b/azurerm/internal/services/containers/parse/kubernetes_cluster_id_test.go deleted file mode 100644 index 2d2c3ac4496e..000000000000 --- a/azurerm/internal/services/containers/parse/kubernetes_cluster_id_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKubernetesClusterID(t *testing.T) { - testData := []struct { - input string - expected *KubernetesClusterId - }{ - { - input: "", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello", - expected: nil, - }, - { - // wrong case - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedclusters/cluster1", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedClusters/cluster1", - expected: &KubernetesClusterId{ - Name: "cluster1", - ResourceGroup: "hello", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - actual, err := KubernetesClusterID(v.input) - - // if we get something there shouldn't be an error - if v.expected != nil && err == nil { - continue - } - - // if nothing's expected we should get an error - if v.expected == nil && err != nil { - continue - } - - if v.expected == nil && actual == nil { - continue - } - - if v.expected == nil && actual != nil { - t.Fatalf("Expected nothing but got %+v", actual) - } - if v.expected != nil && actual == nil { - t.Fatalf("Expected %+v but got nil", actual) - } - - if v.expected.ResourceGroup != actual.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but got %q", v.expected.ResourceGroup, actual.ResourceGroup) - } - if v.expected.Name != actual.Name { - t.Fatalf("Expected Name to be %q but got %q", v.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/containers/parse/kubernetes_node_pool_id.go b/azurerm/internal/services/containers/parse/kubernetes_node_pool_id.go deleted file mode 100644 index e403e1b16b1b..000000000000 --- a/azurerm/internal/services/containers/parse/kubernetes_node_pool_id.go +++ /dev/null @@ -1,36 +0,0 @@ -package parse - -import ( - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KubernetesNodePoolId struct { - Name string - ClusterName string - ResourceGroup string -} - -func KubernetesNodePoolID(input string) (*KubernetesNodePoolId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - pool := KubernetesNodePoolId{ - ResourceGroup: id.ResourceGroup, - } - - if pool.ClusterName, err = id.PopSegment("managedClusters"); err != nil { - return nil, err - } - - if pool.Name, err = id.PopSegment("agentPools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &pool, nil -} diff --git a/azurerm/internal/services/containers/parse/kubernetes_node_pool_id_test.go b/azurerm/internal/services/containers/parse/kubernetes_node_pool_id_test.go deleted file mode 100644 index 5c0bd0046785..000000000000 --- a/azurerm/internal/services/containers/parse/kubernetes_node_pool_id_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKubernetesNodePoolID(t *testing.T) { - testData := []struct { - input string - expected *KubernetesNodePoolId - }{ - { - input: "", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedClusters/", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedClusters/cluster1", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedClusters/cluster1/agentPools/", - expected: nil, - }, - { - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedClusters/cluster1/agentPools/pool1", - expected: &KubernetesNodePoolId{ - Name: "pool1", - ClusterName: "cluster1", - ResourceGroup: "hello", - }, - }, - { - // wrong case - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/hello/managedClusters/cluster1/agentpools/pool1", - expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - actual, err := KubernetesNodePoolID(v.input) - - // if we get something there shouldn't be an error - if v.expected != nil && err == nil { - continue - } - - // if nothing's expected we should get an error - if v.expected == nil && err != nil { - continue - } - - if v.expected == nil && actual == nil { - continue - } - - if v.expected == nil && actual != nil { - t.Fatalf("Expected nothing but got %+v", actual) - } - if v.expected != nil && actual == nil { - t.Fatalf("Expected %+v but got nil", actual) - } - - if v.expected.ResourceGroup != actual.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but got %q", v.expected.ResourceGroup, actual.ResourceGroup) - } - if v.expected.Name != actual.Name { - t.Fatalf("Expected Name to be %q but got %q", v.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/containers/parse/node_pool.go b/azurerm/internal/services/containers/parse/node_pool.go new file mode 100644 index 000000000000..04f0930ed519 --- /dev/null +++ b/azurerm/internal/services/containers/parse/node_pool.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type NodePoolId struct { + SubscriptionId string + ResourceGroup string + ManagedClusterName string + AgentPoolName string +} + +func NewNodePoolID(subscriptionId, resourceGroup, managedClusterName, agentPoolName string) NodePoolId { + return NodePoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ManagedClusterName: managedClusterName, + AgentPoolName: agentPoolName, + } +} + +func (id NodePoolId) String() string { + segments := []string{ + fmt.Sprintf("Agent Pool Name %q", id.AgentPoolName), + fmt.Sprintf("Managed Cluster Name %q", id.ManagedClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Node Pool", segmentsStr) +} + +func (id NodePoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ContainerService/managedClusters/%s/agentPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ManagedClusterName, id.AgentPoolName) +} + +// NodePoolID parses a NodePool ID into an NodePoolId struct +func NodePoolID(input string) (*NodePoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := NodePoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ManagedClusterName, err = id.PopSegment("managedClusters"); err != nil { + return nil, err + } + if resourceId.AgentPoolName, err = id.PopSegment("agentPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/containers/parse/node_pool_test.go b/azurerm/internal/services/containers/parse/node_pool_test.go new file mode 100644 index 000000000000..2875c6d11917 --- /dev/null +++ b/azurerm/internal/services/containers/parse/node_pool_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NodePoolId{} + +func TestNodePoolIDFormatter(t *testing.T) { + actual := NewNodePoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/agentPools/pool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestNodePoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *NodePoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/", + Error: true, + }, + + { + // missing value for ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/", + Error: true, + }, + + { + // missing AgentPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/", + Error: true, + }, + + { + // missing value for AgentPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/agentPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/agentPools/pool1", + Expected: &NodePoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ManagedClusterName: "cluster1", + AgentPoolName: "pool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CONTAINERSERVICE/MANAGEDCLUSTERS/CLUSTER1/AGENTPOOLS/POOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := NodePoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ManagedClusterName != v.Expected.ManagedClusterName { + t.Fatalf("Expected %q but got %q for ManagedClusterName", v.Expected.ManagedClusterName, actual.ManagedClusterName) + } + if actual.AgentPoolName != v.Expected.AgentPoolName { + t.Fatalf("Expected %q but got %q for AgentPoolName", v.Expected.AgentPoolName, actual.AgentPoolName) + } + } +} diff --git a/azurerm/internal/services/containers/resourceids.go b/azurerm/internal/services/containers/resourceids.go new file mode 100644 index 000000000000..a8eafdc5a59f --- /dev/null +++ b/azurerm/internal/services/containers/resourceids.go @@ -0,0 +1,4 @@ +package containers + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NodePool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/agentPools/pool1 diff --git a/azurerm/internal/services/containers/tests/container_group_resource_test.go b/azurerm/internal/services/containers/tests/container_group_resource_test.go index 60dc167bdab9..306670bb5d22 100644 --- a/azurerm/internal/services/containers/tests/container_group_resource_test.go +++ b/azurerm/internal/services/containers/tests/container_group_resource_test.go @@ -465,6 +465,44 @@ func TestAccAzureRMContainerGroup_gitRepoVolume(t *testing.T) { }) } +func TestAccAzureRMContainerGroup_emptyDirVolume(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_container_group", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMContainerGroupDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMContainerGroup_emptyDirVolume(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMContainerGroupExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMContainerGroup_secretVolume(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_container_group", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMContainerGroupDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMContainerGroup_secretVolume(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMContainerGroupExists(data.ResourceName), + ), + }, + data.ImportStep("container.0.volume.0.secret"), + }, + }) +} + func testAccAzureRMContainerGroup_SystemAssignedIdentity(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { @@ -1309,6 +1347,124 @@ resource "azurerm_container_group" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } +func testAccAzureRMContainerGroup_emptyDirVolume(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_container_group" "test" { + name = "acctestcontainergroup-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + ip_address_type = "public" + dns_name_label = "acctestcontainergroup-%d" + os_type = "Linux" + restart_policy = "OnFailure" + + container { + name = "hf" + image = "seanmckenna/aci-hellofiles" + cpu = "1" + memory = "1.5" + + ports { + port = 80 + protocol = "TCP" + } + + volume { + name = "logs" + mount_path = "/aci/logs" + read_only = false + empty_dir = true + } + + environment_variables = { + foo = "bar" + foo1 = "bar1" + } + + readiness_probe { + exec = ["cat", "/tmp/healthy"] + initial_delay_seconds = 1 + period_seconds = 1 + failure_threshold = 1 + success_threshold = 1 + timeout_seconds = 1 + } + + liveness_probe { + http_get { + path = "/" + port = 443 + scheme = "Http" + } + + initial_delay_seconds = 1 + period_seconds = 1 + failure_threshold = 1 + success_threshold = 1 + timeout_seconds = 1 + } + + commands = ["/bin/bash", "-c", "ls"] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMContainerGroup_secretVolume(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_container_group" "test" { + name = "acctestcontainergroup-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + ip_address_type = "public" + os_type = "Linux" + + container { + name = "hw" + image = "microsoft/aci-helloworld:latest" + cpu = "0.5" + memory = "0.5" + ports { + port = 80 + protocol = "TCP" + } + + volume { + name = "config" + mount_path = "/var/config" + + secret = { + mysecret1 = "TXkgZmlyc3Qgc2VjcmV0IEZPTwo=" + mysecret2 = "TXkgc2Vjb25kIHNlY3JldCBCQVIK" + } + } + } + + tags = { + environment = "Testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + func testCheckAzureRMContainerGroupExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { conn := acceptance.AzureProvider.Meta().(*clients.Client).Containers.GroupsClient @@ -1350,7 +1506,6 @@ func testCheckAzureRMContainerGroupDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { if !utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Container Group still exists:\n%#v", resp) diff --git a/azurerm/internal/services/containers/tests/container_registry_webhook_resource_test.go b/azurerm/internal/services/containers/tests/container_registry_webhook_resource_test.go index 4949daf8ac43..d4e90c5e61ae 100644 --- a/azurerm/internal/services/containers/tests/container_registry_webhook_resource_test.go +++ b/azurerm/internal/services/containers/tests/container_registry_webhook_resource_test.go @@ -658,7 +658,6 @@ func testCheckAzureRMContainerRegistryWebhookDestroy(s *terraform.State) error { name := rs.Primary.Attributes["name"] resp, err := client.Get(ctx, resourceGroup, registryName, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil diff --git a/azurerm/internal/services/containers/tests/helpers_test.go b/azurerm/internal/services/containers/tests/helpers_test.go index a697ca486a27..f35de388c205 100644 --- a/azurerm/internal/services/containers/tests/helpers_test.go +++ b/azurerm/internal/services/containers/tests/helpers_test.go @@ -1,18 +1,13 @@ package tests import ( - "os" "testing" ) -func checkIfShouldRunTestsCombined(t *testing.T) { - if os.Getenv("TF_PROVIDER_SPLIT_COMBINED_TESTS") != "" { - t.Skip("Skipping since this is being run Individually") - } -} - func checkIfShouldRunTestsIndividually(t *testing.T) { - if os.Getenv("TF_PROVIDER_SPLIT_COMBINED_TESTS") == "" { - t.Skip("Skipping since this is being run as a Combined Test") - } + // NOTE: leaving this around so we can remove this gradually without + // causing merge conflicts on open PR's + // + // This is no longer necessary since we limit the concurrent tests + // for this package at the CI level } diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_addons_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_addons_resource_test.go index 119044f82969..891127f43da9 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_addons_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_addons_resource_test.go @@ -15,7 +15,7 @@ var kubernetesAddOnTests = map[string]func(t *testing.T){ "addonProfileKubeDashboard": testAccAzureRMKubernetesCluster_addonProfileKubeDashboard, "addonProfileOMS": testAccAzureRMKubernetesCluster_addonProfileOMS, "addonProfileOMSToggle": testAccAzureRMKubernetesCluster_addonProfileOMSToggle, - "addonProfileRouting": testAccAzureRMKubernetesCluster_addonProfileRouting, + "addonProfileRouting": testAccAzureRMKubernetesCluster_addonProfileRoutingToggle, } func TestAccAzureRMKubernetesCluster_addonProfileAciConnectorLinux(t *testing.T) { @@ -244,12 +244,12 @@ func testAccAzureRMKubernetesCluster_addonProfileOMSToggle(t *testing.T) { }) } -func TestAccAzureRMKubernetesCluster_addonProfileRouting(t *testing.T) { +func TestAccAzureRMKubernetesCluster_addonProfileRoutingToggle(t *testing.T) { checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_addonProfileRouting(t) + testAccAzureRMKubernetesCluster_addonProfileRoutingToggle(t) } -func testAccAzureRMKubernetesCluster_addonProfileRouting(t *testing.T) { +func testAccAzureRMKubernetesCluster_addonProfileRoutingToggle(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") resource.ParallelTest(t, resource.TestCase{ @@ -262,7 +262,18 @@ func testAccAzureRMKubernetesCluster_addonProfileRouting(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMKubernetesClusterExists(data.ResourceName), resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.http_application_routing.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "addon_profile.0.http_application_routing.0.enabled"), + resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.http_application_routing.0.enabled", "true"), + resource.TestCheckResourceAttrSet(data.ResourceName, "addon_profile.0.http_application_routing.0.http_application_routing_zone_name"), + resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.oms_agent.#", "0"), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMKubernetesCluster_addonProfileRoutingConfigDisabled(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesClusterExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.http_application_routing.#", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.http_application_routing.0.enabled", "false"), resource.TestCheckResourceAttrSet(data.ResourceName, "addon_profile.0.http_application_routing.0.http_application_routing_zone_name"), resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.oms_agent.#", "0"), ), @@ -685,6 +696,56 @@ resource "azurerm_kubernetes_cluster" "test" { http_application_routing { enabled = true } + kube_dashboard { + enabled = false + } + } + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMKubernetesCluster_addonProfileRoutingConfigDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-aks-%d" + location = "%s" +} + +resource "azurerm_kubernetes_cluster" "test" { + name = "acctestaks%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + dns_prefix = "acctestaks%d" + + linux_profile { + admin_username = "acctestuser%d" + + ssh_key { + key_data = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" + } + } + + default_node_pool { + name = "default" + node_count = 1 + vm_size = "Standard_DS2_v2" + } + + addon_profile { + http_application_routing { + enabled = false + } + kube_dashboard { + enabled = false + } } identity { diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_auth_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_auth_resource_test.go index 675f8107629d..04aecdf53a64 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_auth_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_auth_resource_test.go @@ -10,7 +10,6 @@ import ( var kubernetesAuthTests = map[string]func(t *testing.T){ "apiServerAuthorizedIPRanges": testAccAzureRMKubernetesCluster_apiServerAuthorizedIPRanges, - "enablePodSecurityPolicy": testAccAzureRMKubernetesCluster_enablePodSecurityPolicy, "managedClusterIdentity": testAccAzureRMKubernetesCluster_managedClusterIdentity, "roleBasedAccessControl": testAccAzureRMKubernetesCluster_roleBasedAccessControl, "AAD": testAccAzureRMKubernetesCluster_roleBasedAccessControlAAD, @@ -57,31 +56,6 @@ func testAccAzureRMKubernetesCluster_apiServerAuthorizedIPRanges(t *testing.T) { }) } -func TestAccAzureRMKubernetesCluster_enablePodSecurityPolicy(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_enablePodSecurityPolicy(t) -} - -func testAccAzureRMKubernetesCluster_enablePodSecurityPolicy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKubernetesCluster_enablePodSecurityPolicyConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_pod_security_policy", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - func TestAccAzureRMKubernetesCluster_managedClusterIdentity(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccAzureRMKubernetesCluster_managedClusterIdentity(t) @@ -475,41 +449,6 @@ resource "azurerm_kubernetes_cluster" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } -func testAccAzureRMKubernetesCluster_enablePodSecurityPolicyConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-aks-%d" - location = "%s" -} - -resource "azurerm_kubernetes_cluster" "test" { - name = "acctestaks%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - dns_prefix = "acctestaks%d" - enable_pod_security_policy = true - - role_based_access_control { - enabled = true - } - - default_node_pool { - name = "default" - node_count = 1 - vm_size = "Standard_DS2_v2" - } - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - func testAccAzureRMKubernetesCluster_managedClusterIdentityConfig(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_data_source_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_data_source_test.go index ad24f17ffcac..1ece94b8f32f 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_data_source_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_data_source_test.go @@ -29,7 +29,6 @@ var kubernetesDataSourceTests = map[string]func(t *testing.T){ "autoscalingNoAvailabilityZones": testAccDataSourceAzureRMKubernetesCluster_autoscalingNoAvailabilityZones, "autoscalingWithAvailabilityZones": testAccDataSourceAzureRMKubernetesCluster_autoscalingWithAvailabilityZones, "nodeLabels": testAccDataSourceAzureRMKubernetesCluster_nodeLabels, - "nodeTaints": testAccDataSourceAzureRMKubernetesCluster_nodeTaints, "enableNodePublicIP": testAccDataSourceAzureRMKubernetesCluster_enableNodePublicIP, "privateCluster": testAccDataSourceAzureRMKubernetesCluster_privateCluster, } @@ -500,7 +499,6 @@ func testAccDataSourceAzureRMKubernetesCluster_addOnProfileAzurePolicy(t *testin testCheckAzureRMKubernetesClusterExists(data.ResourceName), resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.azure_policy.#", "1"), resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.azure_policy.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "addon_profile.0.azure_policy.0.version", "v2"), ), }, }, @@ -617,30 +615,6 @@ func testAccDataSourceAzureRMKubernetesCluster_nodeLabels(t *testing.T) { }) } -func TestAccDataSourceAzureRMKubernetesCluster_nodeTaints(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccDataSourceAzureRMKubernetesCluster_nodeTaints(t) -} - -func testAccDataSourceAzureRMKubernetesCluster_nodeTaints(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_kubernetes_cluster", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKubernetesCluster_nodeTaintsConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "agent_pool_profile.1.node_taints.0", "key=value:NoSchedule"), - ), - }, - }, - }) -} - func TestAccDataSourceAzureRMKubernetesCluster_enableNodePublicIP(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccDataSourceAzureRMKubernetesCluster_enableNodePublicIP(t) @@ -893,18 +867,6 @@ data "azurerm_kubernetes_cluster" "test" { `, r) } -func testAccDataSourceAzureRMKubernetesCluster_nodeTaintsConfig(data acceptance.TestData) string { - r := testAccAzureRMKubernetesCluster_nodeTaintsConfig(data) - return fmt.Sprintf(` -%s - -data "azurerm_kubernetes_cluster" "test" { - name = azurerm_kubernetes_cluster.test.name - resource_group_name = azurerm_kubernetes_cluster.test.resource_group_name -} -`, r) -} - func testAccDataSourceAzureRMKubernetesCluster_enableNodePublicIPConfig(data acceptance.TestData) string { r := testAccAzureRMKubernetesCluster_enableNodePublicIPConfig(data, true) return fmt.Sprintf(` diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_network_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_network_resource_test.go index 899cc287f4d2..7abb17f974a1 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_network_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_network_resource_test.go @@ -328,30 +328,6 @@ func testAccAzureRMKubernetesCluster_outboundTypeLoadBalancer(t *testing.T) { }) } -func TestAccAzureRMKubernetesCluster_outboundTypeUserDefinedRouting(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_outboundTypeUserDefinedRouting(t) -} - -func testAccAzureRMKubernetesCluster_outboundTypeUserDefinedRouting(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKubernetesCluster_outboundTypeUserDefinedRoutingConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - func TestAccAzureRMKubernetesCluster_privateClusterOn(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccAzureRMKubernetesCluster_privateClusterOn(t) @@ -709,19 +685,6 @@ resource "azurerm_resource_group" "test" { location = "%s" } -resource "azurerm_route_table" "test" { - name = "akc-routetable-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - route { - name = "akc-route-%d" - address_prefix = "10.100.0.0/14" - next_hop_type = "VirtualAppliance" - next_hop_in_ip_address = "10.10.1.1" - } -} - resource "azurerm_virtual_network" "test" { name = "acctestvirtnet%d" address_space = ["10.1.0.0/16"] @@ -736,11 +699,6 @@ resource "azurerm_subnet" "test" { address_prefix = "10.1.0.0/24" } -resource "azurerm_subnet_route_table_association" "test" { - subnet_id = azurerm_subnet.test.id - route_table_id = azurerm_route_table.test.id -} - resource "azurerm_kubernetes_cluster" "test" { name = "acctestaks%d" location = azurerm_resource_group.test.location @@ -773,7 +731,7 @@ resource "azurerm_kubernetes_cluster" "test" { service_cidr = "10.10.0.0/16" } } -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, networkPlugin) +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, networkPlugin) } // nolint unparam @@ -1041,89 +999,6 @@ resource "azurerm_kubernetes_cluster" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } -func testAccAzureRMKubernetesCluster_outboundTypeUserDefinedRoutingConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-aks-%d" - location = "%s" -} - - - -resource "azurerm_route_table" "test" { - name = "akc-routetable-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - route { - name = "first" - address_prefix = "10.100.0.0/14" - next_hop_type = "VirtualAppliance" - next_hop_in_ip_address = "10.10.1.1" - } - - route { - name = "second" - address_prefix = "0.0.0.0/0" - next_hop_type = "VirtualAppliance" - next_hop_in_ip_address = "10.10.1.1" - } -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.1.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "internal" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.1.0.0/24" -} - -resource "azurerm_subnet_route_table_association" "test" { - subnet_id = azurerm_subnet.test.id - route_table_id = azurerm_route_table.test.id -} - -resource "azurerm_kubernetes_cluster" "test" { - name = "acctestaks%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - dns_prefix = "acctestaks%d" - - default_node_pool { - name = "default" - node_count = 2 - vm_size = "Standard_DS2_v2" - vnet_subnet_id = azurerm_subnet.test.id - max_pods = 60 - } - - identity { - type = "SystemAssigned" - } - - network_profile { - network_plugin = "kubenet" - load_balancer_sku = "Standard" - pod_cidr = "10.244.0.0/16" - service_cidr = "10.0.0.0/16" - dns_service_ip = "10.0.0.10" - docker_bridge_cidr = "172.17.0.1/16" - outbound_type = "userDefinedRouting" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - func testAccAzureRMKubernetesCluster_privateClusterConfig(data acceptance.TestData, enablePrivateCluster bool) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_node_pool_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_node_pool_resource_test.go index 79d759a6576e..e02eeb5c4c62 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_node_pool_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_node_pool_resource_test.go @@ -27,12 +27,15 @@ var kubernetesNodePoolTests = map[string]func(t *testing.T){ "manualScaleMultiplePoolsUpdate": testAccAzureRMKubernetesClusterNodePool_manualScaleMultiplePoolsUpdate, "manualScaleUpdate": testAccAzureRMKubernetesClusterNodePool_manualScaleUpdate, "manualScaleVMSku": testAccAzureRMKubernetesClusterNodePool_manualScaleVMSku, + "maxSize": testAccAzureRMKubernetesClusterNodePool_maxSize, "nodeLabels": testAccAzureRMKubernetesClusterNodePool_nodeLabels, "nodePublicIP": testAccAzureRMKubernetesClusterNodePool_nodePublicIP, "nodeTaints": testAccAzureRMKubernetesClusterNodePool_nodeTaints, "requiresImport": testAccAzureRMKubernetesClusterNodePool_requiresImport, "spot": testAccAzureRMKubernetesClusterNodePool_spot, "osDiskSizeGB": testAccAzureRMKubernetesClusterNodePool_osDiskSizeGB, + "proximityPlacementGroupId": testAccAzureRMKubernetesClusterNodePool_proximityPlacementGroupId, + "osDiskType": testAccAzureRMKubernetesClusterNodePool_osDiskType, "modeSystem": testAccAzureRMKubernetesClusterNodePool_modeSystem, "modeUpdate": testAccAzureRMKubernetesClusterNodePool_modeUpdate, "virtualNetworkAutomatic": testAccAzureRMKubernetesClusterNodePool_virtualNetworkAutomatic, @@ -572,6 +575,54 @@ func testAccAzureRMKubernetesClusterNodePool_osDiskSizeGB(t *testing.T) { }) } +func TestAccAzureRMKubernetesClusterNodePool_proximityPlacementGroupId(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesClusterNodePool_proximityPlacementGroupId(t) +} + +func testAccAzureRMKubernetesClusterNodePool_proximityPlacementGroupId(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster_node_pool", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterNodePoolDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesClusterNodePool_proximityPlacementGroupIdConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesNodePoolExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMKubernetesClusterNodePool_osDiskType(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesClusterNodePool_osDiskType(t) +} + +func testAccAzureRMKubernetesClusterNodePool_osDiskType(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster_node_pool", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterNodePoolDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesClusterNodePool_osDiskTypeConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesNodePoolExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func TestAccAzureRMKubernetesClusterNodePool_requiresImport(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccAzureRMKubernetesClusterNodePool_requiresImport(t) @@ -735,12 +786,31 @@ func TestAccAzureRMKubernetesClusterNodePool_zeroSize(t *testing.T) { testAccAzureRMKubernetesClusterNodePool_zeroSize(t) } -func TestAccAzureRMKubernetesClusterNodePool_sameSize(t *testing.T) { +func testAccAzureRMKubernetesClusterNodePool_zeroSize(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster_node_pool", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterNodePoolDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesClusterNodePool_zeroSizeConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesNodePoolExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMKubernetesClusterNodePool_maxSize(t *testing.T) { checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesClusterNodePool_sameSize(t) + testAccAzureRMKubernetesClusterNodePool_maxSize(t) } -func testAccAzureRMKubernetesClusterNodePool_zeroSize(t *testing.T) { +func testAccAzureRMKubernetesClusterNodePool_maxSize(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster_node_pool", "test") resource.ParallelTest(t, resource.TestCase{ @@ -749,7 +819,7 @@ func testAccAzureRMKubernetesClusterNodePool_zeroSize(t *testing.T) { CheckDestroy: testCheckAzureRMKubernetesClusterNodePoolDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMKubernetesClusterNodePool_zeroSizeConfig(data), + Config: testAccAzureRMKubernetesClusterNodePool_maxSizeConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMKubernetesNodePoolExists(data.ResourceName), ), @@ -759,6 +829,11 @@ func testAccAzureRMKubernetesClusterNodePool_zeroSize(t *testing.T) { }) } +func TestAccAzureRMKubernetesClusterNodePool_sameSize(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesClusterNodePool_sameSize(t) +} + func testAccAzureRMKubernetesClusterNodePool_sameSize(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster_node_pool", "test") @@ -787,13 +862,12 @@ func testCheckAzureRMKubernetesClusterNodePoolDestroy(s *terraform.State) error continue } - parsedK8sId, err := parse.KubernetesNodePoolID(rs.Primary.ID) + parsedK8sId, err := parse.NodePoolID(rs.Primary.ID) if err != nil { return fmt.Errorf("Error parsing kubernetes node pool id: %+v", err) } - resp, err := client.Get(ctx, parsedK8sId.ResourceGroup, parsedK8sId.ClusterName, parsedK8sId.Name) - + resp, err := client.Get(ctx, parsedK8sId.ResourceGroup, parsedK8sId.ManagedClusterName, parsedK8sId.AgentPoolName) if err != nil { return nil } @@ -819,18 +893,18 @@ func testCheckAzureRMKubernetesNodePoolExists(resourceName string) resource.Test name := rs.Primary.Attributes["name"] kubernetesClusterId := rs.Primary.Attributes["kubernetes_cluster_id"] - parsedK8sId, err := parse.KubernetesClusterID(kubernetesClusterId) + parsedK8sId, err := parse.ClusterID(kubernetesClusterId) if err != nil { return fmt.Errorf("Error parsing kubernetes cluster id: %+v", err) } - agentPool, err := client.Get(ctx, parsedK8sId.ResourceGroup, parsedK8sId.Name, name) + agentPool, err := client.Get(ctx, parsedK8sId.ResourceGroup, parsedK8sId.ManagedClusterName, name) if err != nil { return fmt.Errorf("Bad: Get on kubernetesClustersClient: %+v", err) } if agentPool.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Node Pool %q (Kubernetes Cluster %q / Resource Group: %q) does not exist", name, parsedK8sId.Name, parsedK8sId.ResourceGroup) + return fmt.Errorf("Bad: Node Pool %q (Kubernetes Cluster %q / Resource Group: %q) does not exist", name, parsedK8sId.ManagedClusterName, parsedK8sId.ResourceGroup) } return nil @@ -850,12 +924,12 @@ func testCheckAzureRMKubernetesNodePoolScale(resourceName string, nodeCount int) nodePoolName := rs.Primary.Attributes["name"] kubernetesClusterId := rs.Primary.Attributes["kubernetes_cluster_id"] - parsedK8sId, err := parse.KubernetesClusterID(kubernetesClusterId) + parsedK8sId, err := parse.ClusterID(kubernetesClusterId) if err != nil { return fmt.Errorf("Error parsing kubernetes cluster id: %+v", err) } - clusterName := parsedK8sId.Name + clusterName := parsedK8sId.ManagedClusterName resourceGroup := parsedK8sId.ResourceGroup nodePool, err := client.Get(ctx, resourceGroup, clusterName, nodePoolName) @@ -899,18 +973,18 @@ func testCheckAzureRMKubernetesNodePoolNodeLabels(resourceName string, expectedL name := rs.Primary.Attributes["name"] kubernetesClusterId := rs.Primary.Attributes["kubernetes_cluster_id"] - parsedK8sId, err := parse.KubernetesClusterID(kubernetesClusterId) + parsedK8sId, err := parse.ClusterID(kubernetesClusterId) if err != nil { return fmt.Errorf("Error parsing kubernetes cluster id: %+v", err) } - agent_pool, err := client.Get(ctx, parsedK8sId.ResourceGroup, parsedK8sId.Name, name) + agent_pool, err := client.Get(ctx, parsedK8sId.ResourceGroup, parsedK8sId.ManagedClusterName, name) if err != nil { return fmt.Errorf("Bad: Get on kubernetesClustersClient: %+v", err) } if agent_pool.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Node Pool %q (Kubernetes Cluster %q / Resource Group: %q) does not exist", name, parsedK8sId.Name, parsedK8sId.ResourceGroup) + return fmt.Errorf("Bad: Node Pool %q (Kubernetes Cluster %q / Resource Group: %q) does not exist", name, parsedK8sId.ManagedClusterName, parsedK8sId.ResourceGroup) } labels := make(map[string]string) @@ -918,7 +992,7 @@ func testCheckAzureRMKubernetesNodePoolNodeLabels(resourceName string, expectedL labels[k] = *v } if !reflect.DeepEqual(labels, expectedLabels) { - return fmt.Errorf("Bad: Node Pool %q (Kubernetes Cluster %q / Resource Group: %q) nodeLabels %v do not match expected %v", name, parsedK8sId.Name, parsedK8sId.ResourceGroup, labels, expectedLabels) + return fmt.Errorf("Bad: Node Pool %q (Kubernetes Cluster %q / Resource Group: %q) nodeLabels %v do not match expected %v", name, parsedK8sId.ManagedClusterName, parsedK8sId.ResourceGroup, labels, expectedLabels) } return nil @@ -1386,6 +1460,65 @@ resource "azurerm_kubernetes_cluster_node_pool" "test" { `, template) } +func testAccAzureRMKubernetesClusterNodePool_proximityPlacementGroupIdConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-aks-%d" + location = "%s" +} +resource "azurerm_kubernetes_cluster" "test" { + name = "acctestaks%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + dns_prefix = "acctestaks%d" + default_node_pool { + name = "default" + node_count = 1 + vm_size = "Standard_DS2_v2" + } + identity { + type = "SystemAssigned" + } +} +resource "azurerm_proximity_placement_group" "test" { + name = "acctestPPG-aks-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tags = { + environment = "Production" + } +} +resource "azurerm_kubernetes_cluster_node_pool" "test" { + name = "internal" + kubernetes_cluster_id = azurerm_kubernetes_cluster.test.id + vm_size = "Standard_DS2_v2" + node_count = 1 + proximity_placement_group_id = azurerm_proximity_placement_group.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMKubernetesClusterNodePool_osDiskTypeConfig(data acceptance.TestData) string { + template := testAccAzureRMKubernetesClusterNodePool_templateConfig(data) + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +%s +resource "azurerm_kubernetes_cluster_node_pool" "test" { + name = "internal" + kubernetes_cluster_id = azurerm_kubernetes_cluster.test.id + vm_size = "Standard_DS3_v2" + node_count = 1 + os_disk_size_gb = 100 + os_disk_type = "Ephemeral" +} +`, template) +} + func testAccAzureRMKubernetesClusterNodePool_spotConfig(data acceptance.TestData) string { template := testAccAzureRMKubernetesClusterNodePool_templateConfig(data) return fmt.Sprintf(` @@ -1535,19 +1668,6 @@ resource "azurerm_resource_group" "test" { location = "%s" } -resource "azurerm_route_table" "test" { - name = "acctestrt-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - route { - name = "akc-route-%d" - address_prefix = "10.100.0.0/14" - next_hop_type = "VirtualAppliance" - next_hop_in_ip_address = "10.10.1.1" - } -} - resource "azurerm_virtual_network" "test" { name = "acctestvirtnet%d" address_space = ["10.1.0.0/16"] @@ -1562,11 +1682,6 @@ resource "azurerm_subnet" "test" { address_prefix = "10.1.0.0/24" } -resource "azurerm_subnet_route_table_association" "test" { - subnet_id = azurerm_subnet.test.id - route_table_id = azurerm_route_table.test.id -} - resource "azurerm_kubernetes_cluster" "test" { name = "acctestaks%d" location = azurerm_resource_group.test.location @@ -1584,7 +1699,7 @@ resource "azurerm_kubernetes_cluster" "test" { type = "SystemAssigned" } } -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } func testAccAzureRMKubernetesClusterNodePool_templateWindowsConfig(data acceptance.TestData) string { @@ -1647,6 +1762,27 @@ resource "azurerm_kubernetes_cluster_node_pool" "test" { `, template) } +func testAccAzureRMKubernetesClusterNodePool_maxSizeConfig(data acceptance.TestData) string { + template := testAccAzureRMKubernetesClusterNodePool_templateConfig(data) + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_kubernetes_cluster_node_pool" "test" { + name = "internal" + kubernetes_cluster_id = azurerm_kubernetes_cluster.test.id + vm_size = "Standard_DS2_v2" + enable_auto_scaling = true + min_count = 1 + max_count = 1000 + node_count = 1 +} +`, template) +} + func testAccAzureRMKubernetesClusterNodePool_sameSizeConfig(data acceptance.TestData) string { template := testAccAzureRMKubernetesClusterNodePool_templateConfig(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_other_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_other_resource_test.go index f9a79c0687ee..6f9ba378df49 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_other_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_other_resource_test.go @@ -2,7 +2,6 @@ package tests import ( "fmt" - "regexp" "strings" "testing" @@ -11,21 +10,19 @@ import ( ) var kubernetesOtherTests = map[string]func(t *testing.T){ - "basicAvailabilitySet": testAccAzureRMKubernetesCluster_basicAvailabilitySet, - "basicVMSS": testAccAzureRMKubernetesCluster_basicVMSS, - "requiresImport": testAccAzureRMKubernetesCluster_requiresImport, - "linuxProfile": testAccAzureRMKubernetesCluster_linuxProfile, - "nodeLabels": testAccAzureRMKubernetesCluster_nodeLabels, - "nodeTaints": testAccAzureRMKubernetesCluster_nodeTaints, - "nodeResourceGroup": testAccAzureRMKubernetesCluster_nodeResourceGroup, - "paidSku": testAccAzureRMKubernetesCluster_paidSku, - "upgradeConfig": testAccAzureRMKubernetesCluster_upgrade, - "tags": testAccAzureRMKubernetesCluster_tags, - "windowsProfile": testAccAzureRMKubernetesCluster_windowsProfile, - "outboundTypeLoadBalancer": testAccAzureRMKubernetesCluster_outboundTypeLoadBalancer, - "outboundTypeUserDefinedRouting": testAccAzureRMKubernetesCluster_outboundTypeUserDefinedRouting, - "privateClusterOn": testAccAzureRMKubernetesCluster_privateClusterOn, - "privateClusterOff": testAccAzureRMKubernetesCluster_privateClusterOff, + "basicAvailabilitySet": testAccAzureRMKubernetesCluster_basicAvailabilitySet, + "basicVMSS": testAccAzureRMKubernetesCluster_basicVMSS, + "requiresImport": testAccAzureRMKubernetesCluster_requiresImport, + "linuxProfile": testAccAzureRMKubernetesCluster_linuxProfile, + "nodeLabels": testAccAzureRMKubernetesCluster_nodeLabels, + "nodeResourceGroup": testAccAzureRMKubernetesCluster_nodeResourceGroup, + "paidSku": testAccAzureRMKubernetesCluster_paidSku, + "upgradeConfig": testAccAzureRMKubernetesCluster_upgrade, + "tags": testAccAzureRMKubernetesCluster_tags, + "windowsProfile": testAccAzureRMKubernetesCluster_windowsProfile, + "outboundTypeLoadBalancer": testAccAzureRMKubernetesCluster_outboundTypeLoadBalancer, + "privateClusterOn": testAccAzureRMKubernetesCluster_privateClusterOn, + "privateClusterOff": testAccAzureRMKubernetesCluster_privateClusterOff, } func TestAccAzureRMKubernetesCluster_basicAvailabilitySet(t *testing.T) { @@ -61,7 +58,7 @@ func testAccAzureRMKubernetesCluster_basicAvailabilitySet(t *testing.T) { resource.TestCheckResourceAttrSet(data.ResourceName, "kube_config.0.password"), resource.TestCheckResourceAttr(data.ResourceName, "kube_admin_config.#", "0"), resource.TestCheckResourceAttr(data.ResourceName, "kube_admin_config_raw", ""), - resource.TestCheckResourceAttr(data.ResourceName, "network_profile.0.load_balancer_sku", "Basic"), + resource.TestCheckResourceAttr(data.ResourceName, "network_profile.0.load_balancer_sku", "Standard"), ), }, data.ImportStep(), @@ -128,7 +125,7 @@ func testAccAzureRMKubernetesCluster_basicVMSS(t *testing.T) { resource.TestCheckResourceAttrSet(data.ResourceName, "kube_config.0.password"), resource.TestCheckResourceAttr(data.ResourceName, "kube_admin_config.#", "0"), resource.TestCheckResourceAttr(data.ResourceName, "kube_admin_config_raw", ""), - resource.TestCheckResourceAttr(data.ResourceName, "network_profile.0.load_balancer_sku", "Basic"), + resource.TestCheckResourceAttr(data.ResourceName, "network_profile.0.load_balancer_sku", "Standard"), ), }, data.ImportStep(), @@ -194,85 +191,6 @@ func testAccAzureRMKubernetesCluster_linuxProfile(t *testing.T) { }) } -func TestAccAzureRMKubernetesCluster_autoScalingError(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_autoScalingError(t) -} - -func testAccAzureRMKubernetesCluster_autoScalingError(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKubernetesCluster_autoScalingEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "default_node_pool.0.node_count", "2"), - ), - }, - { - Config: testAccAzureRMKubernetesCluster_autoScalingEnabledUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - ), - ExpectError: regexp.MustCompile("cannot change `node_count` when `enable_auto_scaling` is set to `true`"), - }, - }, - }) -} - -func TestAccAzureRMKubernetesCluster_autoScalingErrorMax(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_autoScalingErrorMax(t) -} - -func testAccAzureRMKubernetesCluster_autoScalingErrorMax(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKubernetesCluster_autoScalingEnabledUpdateMax(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - ), - ExpectError: regexp.MustCompile("`node_count`\\(11\\) must be equal to or less than `max_count`\\(10\\) when `enable_auto_scaling` is set to `true`"), - }, - }, - }) -} - -func TestAccAzureRMKubernetesCluster_autoScalingErrorMin(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_autoScalingErrorMin(t) -} - -func testAccAzureRMKubernetesCluster_autoScalingErrorMin(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKubernetesCluster_autoScalingEnabledUpdateMin(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - ), - ExpectError: regexp.MustCompile("`node_count`\\(1\\) must be equal to or greater than `min_count`\\(2\\) when `enable_auto_scaling` is set to `true`"), - }, - }, - }) -} - func TestAccAzureRMKubernetesCluster_nodeLabels(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccAzureRMKubernetesCluster_nodeLabels(t) @@ -314,31 +232,6 @@ func testAccAzureRMKubernetesCluster_nodeLabels(t *testing.T) { }) } -func TestAccAzureRMKubernetesCluster_nodeTaints(t *testing.T) { - checkIfShouldRunTestsIndividually(t) - testAccAzureRMKubernetesCluster_nodeTaints(t) -} - -func testAccAzureRMKubernetesCluster_nodeTaints(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKubernetesCluster_nodeTaintsConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKubernetesClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "default_node_pool.0.node_taints.0", "key=value:PreferNoSchedule"), - ), - }, - data.ImportStep(), - }, - }) -} - func TestAccAzureRMKubernetesCluster_nodeResourceGroup(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccAzureRMKubernetesCluster_nodeResourceGroup(t) @@ -705,6 +598,39 @@ resource "azurerm_kubernetes_cluster" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } +func testAccAzureRMKubernetesCluster_autoScalingWithMaxCountConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-AKS-%d" + location = "%s" +} + +resource "azurerm_kubernetes_cluster" "test" { + name = "acctestAKS%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + dns_prefix = "acctestAKS%d" + + default_node_pool { + name = "default" + vm_size = "Standard_DS2_v2" + enable_auto_scaling = true + min_count = 1 + max_count = 1000 + node_count = 1 + } + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + func testAccAzureRMKubernetesCluster_autoScalingEnabledUpdateMin(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { @@ -864,39 +790,6 @@ resource "azurerm_kubernetes_cluster" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, labelsStr) } -func testAccAzureRMKubernetesCluster_nodeTaintsConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-aks-%d" - location = "%s" -} - -resource "azurerm_kubernetes_cluster" "test" { - name = "acctestaks%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - dns_prefix = "acctestaks%d" - - default_node_pool { - name = "default" - node_count = 2 - vm_size = "Standard_DS2_v2" - node_taints = [ - "key=value:PreferNoSchedule" - ] - } - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - func testAccAzureRMKubernetesCluster_nodeResourceGroupConfig(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_resource_test.go index 63b29bafd2ce..968085695557 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_resource_test.go @@ -12,16 +12,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -var olderKubernetesVersion = "1.16.9" -var currentKubernetesVersion = "1.17.5" +var ( + olderKubernetesVersion = "1.16.15" + currentKubernetesVersion = "1.17.11" +) func TestAccAzureRMKubernetes_all(t *testing.T) { - // we can conditionally run tests tests individually, or combined - checkIfShouldRunTestsCombined(t) - - // NOTE: this is a combined test rather than separate split out tests to - // ease the load on the kubernetes api - testCases := map[string]map[string]func(t *testing.T){ + // NOTE: this test is no longer used, but this assignment kicks around temporarily + // to allow us to migrate off this without causing conflicts in open PR's + _ = map[string]map[string]func(t *testing.T){ "auth": kubernetesAuthTests, "clusterAddOn": kubernetesAddOnTests, "datasource": kubernetesDataSourceTests, @@ -33,18 +32,7 @@ func TestAccAzureRMKubernetes_all(t *testing.T) { "upgrade": kubernetesUpgradeTests, } - for group, m := range testCases { - m := m - t.Run(group, func(t *testing.T) { - for name, tc := range m { - tc := tc - - t.Run(name, func(t *testing.T) { - tc(t) - }) - } - }) - } + t.Skip("Skipping since this is being run Individually") } func testCheckAzureRMKubernetesClusterExists(resourceName string) resource.TestCheckFunc { @@ -90,7 +78,6 @@ func testCheckAzureRMKubernetesClusterDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/containers/tests/kubernetes_cluster_scaling_resource_test.go b/azurerm/internal/services/containers/tests/kubernetes_cluster_scaling_resource_test.go index 08292ca31b10..63fa0534bb2b 100644 --- a/azurerm/internal/services/containers/tests/kubernetes_cluster_scaling_resource_test.go +++ b/azurerm/internal/services/containers/tests/kubernetes_cluster_scaling_resource_test.go @@ -2,6 +2,7 @@ package tests import ( "fmt" + "regexp" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" @@ -15,6 +16,7 @@ var kubernetesScalingTests = map[string]func(t *testing.T){ "autoScalingEnabledError": testAccAzureRMKubernetesCluster_autoScalingError, "autoScalingEnabledErrorMax": testAccAzureRMKubernetesCluster_autoScalingErrorMax, "autoScalingEnabledErrorMin": testAccAzureRMKubernetesCluster_autoScalingErrorMin, + "autoScalingEnabledWithMaxCount": testAccAzureRMKubernetesCluster_autoScalingWithMaxCount, "autoScalingNodeCountUnset": testAccAzureRMKubernetesCluster_autoScalingNodeCountUnset, "autoScalingNoAvailabilityZones": testAccAzureRMKubernetesCluster_autoScalingNoAvailabilityZones, "autoScalingWithAvailabilityZones": testAccAzureRMKubernetesCluster_autoScalingWithAvailabilityZones, @@ -112,6 +114,109 @@ func testAccAzureRMKubernetesCluster_removeAgent(t *testing.T) { }) } +func TestAccAzureRMKubernetesCluster_autoScalingError(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesCluster_autoScalingError(t) +} + +func testAccAzureRMKubernetesCluster_autoScalingError(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesCluster_autoScalingEnabled(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesClusterExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "default_node_pool.0.node_count", "2"), + ), + }, + { + Config: testAccAzureRMKubernetesCluster_autoScalingEnabledUpdate(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesClusterExists(data.ResourceName), + ), + ExpectError: regexp.MustCompile("cannot change `node_count` when `enable_auto_scaling` is set to `true`"), + }, + }, + }) +} + +func TestAccAzureRMKubernetesCluster_autoScalingErrorMax(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesCluster_autoScalingErrorMax(t) +} + +func testAccAzureRMKubernetesCluster_autoScalingErrorMax(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesCluster_autoScalingEnabledUpdateMax(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesClusterExists(data.ResourceName), + ), + ExpectError: regexp.MustCompile("`node_count`\\(11\\) must be equal to or less than `max_count`\\(10\\) when `enable_auto_scaling` is set to `true`"), + }, + }, + }) +} + +func TestAccAzureRMKubernetesCluster_autoScalingWithMaxCount(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesCluster_autoScalingWithMaxCount(t) +} + +func testAccAzureRMKubernetesCluster_autoScalingWithMaxCount(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesCluster_autoScalingWithMaxCountConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesClusterExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMKubernetesCluster_autoScalingErrorMin(t *testing.T) { + checkIfShouldRunTestsIndividually(t) + testAccAzureRMKubernetesCluster_autoScalingErrorMin(t) +} + +func testAccAzureRMKubernetesCluster_autoScalingErrorMin(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kubernetes_cluster", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMKubernetesClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMKubernetesCluster_autoScalingEnabledUpdateMin(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMKubernetesClusterExists(data.ResourceName), + ), + ExpectError: regexp.MustCompile("`node_count`\\(1\\) must be equal to or greater than `min_count`\\(2\\) when `enable_auto_scaling` is set to `true`"), + }, + }, + }) +} + func TestAccAzureRMKubernetesCluster_autoScalingNodeCountUnset(t *testing.T) { checkIfShouldRunTestsIndividually(t) testAccAzureRMKubernetesCluster_autoScalingNodeCountUnset(t) diff --git a/azurerm/internal/services/containers/validate/cluster_id.go b/azurerm/internal/services/containers/validate/cluster_id.go new file mode 100644 index 000000000000..d8a0e8f80ff1 --- /dev/null +++ b/azurerm/internal/services/containers/validate/cluster_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/containers/parse" +) + +func ClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/containers/validate/cluster_id_test.go b/azurerm/internal/services/containers/validate/cluster_id_test.go new file mode 100644 index 000000000000..cd25d8219f3f --- /dev/null +++ b/azurerm/internal/services/containers/validate/cluster_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestClusterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/", + Valid: false, + }, + + { + // missing value for ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CONTAINERSERVICE/MANAGEDCLUSTERS/CLUSTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ClusterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/containers/validate/duration.go b/azurerm/internal/services/containers/validate/duration.go index 01c2dc0028ec..a051709bf68f 100644 --- a/azurerm/internal/services/containers/validate/duration.go +++ b/azurerm/internal/services/containers/validate/duration.go @@ -6,7 +6,11 @@ import ( ) func Duration(i interface{}, k string) (warnings []string, errors []error) { - value := i.(string) + value, ok := i.(string) + if !ok { + return nil, []error{fmt.Errorf("expected type of %q to be string", k)} + } + duration, err := time.ParseDuration(value) if err != nil { errors = append(errors, fmt.Errorf( diff --git a/azurerm/internal/services/containers/validate/kubernetes_cluster_id.go b/azurerm/internal/services/containers/validate/kubernetes_cluster_id.go deleted file mode 100644 index a64c3794ce8f..000000000000 --- a/azurerm/internal/services/containers/validate/kubernetes_cluster_id.go +++ /dev/null @@ -1,21 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/containers/parse" -) - -func KubernetesClusterID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.KubernetesClusterID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a Resource Id: %v", v, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/containers/validate/node_pool_id.go b/azurerm/internal/services/containers/validate/node_pool_id.go new file mode 100644 index 000000000000..c2c2f29d985a --- /dev/null +++ b/azurerm/internal/services/containers/validate/node_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/containers/parse" +) + +func NodePoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NodePoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/containers/validate/node_pool_id_test.go b/azurerm/internal/services/containers/validate/node_pool_id_test.go new file mode 100644 index 000000000000..db918da1a000 --- /dev/null +++ b/azurerm/internal/services/containers/validate/node_pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNodePoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/", + Valid: false, + }, + + { + // missing value for ManagedClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/", + Valid: false, + }, + + { + // missing AgentPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/", + Valid: false, + }, + + { + // missing value for AgentPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/agentPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.ContainerService/managedClusters/cluster1/agentPools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CONTAINERSERVICE/MANAGEDCLUSTERS/CLUSTER1/AGENTPOOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NodePoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/client/client.go b/azurerm/internal/services/cosmos/client/client.go index 9dd30bd85fcf..77f4584a9067 100644 --- a/azurerm/internal/services/cosmos/client/client.go +++ b/azurerm/internal/services/cosmos/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/cosmos/common/autoscale_settings.go b/azurerm/internal/services/cosmos/common/autoscale_settings.go index 4229e090d2df..8ca5c235e2ad 100644 --- a/azurerm/internal/services/cosmos/common/autoscale_settings.go +++ b/azurerm/internal/services/cosmos/common/autoscale_settings.go @@ -3,7 +3,7 @@ package common import ( "log" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) diff --git a/azurerm/internal/services/cosmos/common/indexing_policy.go b/azurerm/internal/services/cosmos/common/indexing_policy.go index 10c3cbd34ffd..4353f377a447 100644 --- a/azurerm/internal/services/cosmos/common/indexing_policy.go +++ b/azurerm/internal/services/cosmos/common/indexing_policy.go @@ -3,7 +3,7 @@ package common import ( "fmt" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) diff --git a/azurerm/internal/services/cosmos/common/indexing_policy_test.go b/azurerm/internal/services/cosmos/common/indexing_policy_test.go index ff4cfef42d9e..a464eb8b8a56 100644 --- a/azurerm/internal/services/cosmos/common/indexing_policy_test.go +++ b/azurerm/internal/services/cosmos/common/indexing_policy_test.go @@ -3,7 +3,7 @@ package common import ( "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) diff --git a/azurerm/internal/services/cosmos/common/ip_rules.go b/azurerm/internal/services/cosmos/common/ip_rules.go index d084d9318557..a7673de0e07d 100644 --- a/azurerm/internal/services/cosmos/common/ip_rules.go +++ b/azurerm/internal/services/cosmos/common/ip_rules.go @@ -3,7 +3,7 @@ package common import ( "strings" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) diff --git a/azurerm/internal/services/cosmos/common/ip_rules_test.go b/azurerm/internal/services/cosmos/common/ip_rules_test.go index cf99b3021781..be4c536d2173 100644 --- a/azurerm/internal/services/cosmos/common/ip_rules_test.go +++ b/azurerm/internal/services/cosmos/common/ip_rules_test.go @@ -4,7 +4,7 @@ import ( "reflect" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" ) var ( diff --git a/azurerm/internal/services/cosmos/common/schema.go b/azurerm/internal/services/cosmos/common/schema.go index d24208b993bc..bd190f484a0f 100644 --- a/azurerm/internal/services/cosmos/common/schema.go +++ b/azurerm/internal/services/cosmos/common/schema.go @@ -1,7 +1,7 @@ package common import ( - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" diff --git a/azurerm/internal/services/cosmos/common/throughput.go b/azurerm/internal/services/cosmos/common/throughput.go index 47c4325bf585..cb4468ae5bb1 100644 --- a/azurerm/internal/services/cosmos/common/throughput.go +++ b/azurerm/internal/services/cosmos/common/throughput.go @@ -3,7 +3,7 @@ package common import ( "fmt" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) diff --git a/azurerm/internal/services/cosmos/cosmosdb_account_data_source.go b/azurerm/internal/services/cosmos/cosmosdb_account_data_source.go index 4962225df882..69b84c3873cb 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_account_data_source.go +++ b/azurerm/internal/services/cosmos/cosmosdb_account_data_source.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/common" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" @@ -16,9 +16,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmCosmosDbAccount() *schema.Resource { +func dataSourceCosmosDbAccount() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmCosmosDbAccountRead, + Read: dataSourceCosmosDbAccountRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -138,6 +138,11 @@ func dataSourceArmCosmosDbAccount() *schema.Resource { }, }, + "key_vault_key_id": { + Type: schema.TypeString, + Computed: true, + }, + "enable_multiple_write_locations": { Type: schema.TypeBool, Computed: true, @@ -219,7 +224,7 @@ func dataSourceArmCosmosDbAccount() *schema.Resource { } } -func dataSourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -253,6 +258,10 @@ func dataSourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) d.Set("enable_free_tier", resp.EnableFreeTier) d.Set("enable_automatic_failover", resp.EnableAutomaticFailover) + if v := props.KeyVaultKeyURI; v != nil { + d.Set("key_vault_key_id", resp.KeyVaultKeyURI) + } + if err = d.Set("consistency_policy", flattenAzureRmCosmosDBAccountConsistencyPolicy(resp.ConsistencyPolicy)); err != nil { return fmt.Errorf("Error setting `consistency_policy`: %+v", err) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_account_data_source_test.go b/azurerm/internal/services/cosmos/cosmosdb_account_data_source_test.go index 808d2d12ba1d..8ee3eeb2c764 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_account_data_source_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_account_data_source_test.go @@ -4,54 +4,50 @@ import ( "fmt" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" ) -func TestAccDataSourceAzureRMCosmosDBAccount_basic(t *testing.T) { +type CosmosDBAccountDataSourceResource struct { +} + +func TestAccDataSourceCosmosDBAccount_basic(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_cosmosdb_account", "test") + r := CosmosDBAccountDataSourceResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMCosmosDBAccount_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), + ), }, }) } -func TestAccDataSourceAzureRMCosmosDBAccount_complete(t *testing.T) { +func TestAccDataSourceCosmosDBAccount_complete(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_cosmosdb_account", "test") + r := CosmosDBAccountDataSourceResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMCosmosDBAccount_complete(data), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 3), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.0.location", data.Locations.Primary), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.1.location", data.Locations.Secondary), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.2.location", data.Locations.Ternary), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.0.failover_priority", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.1.failover_priority", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.2.failover_priority", "2"), - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 3), + check.That(data.ResourceName).Key("geo_location.0.location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("geo_location.1.location").HasValue(data.Locations.Secondary), + check.That(data.ResourceName).Key("geo_location.2.location").HasValue(data.Locations.Ternary), + check.That(data.ResourceName).Key("geo_location.0.failover_priority").HasValue("0"), + check.That(data.ResourceName).Key("geo_location.1.failover_priority").HasValue("1"), + check.That(data.ResourceName).Key("geo_location.2.failover_priority").HasValue("2"), + ), }, }) } -func testAccDataSourceAzureRMCosmosDBAccount_basic(data acceptance.TestData) string { +func (CosmosDBAccountDataSourceResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -59,10 +55,10 @@ data "azurerm_cosmosdb_account" "test" { name = azurerm_cosmosdb_account.test.name resource_group_name = azurerm_resource_group.test.name } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.GlobalDocumentDB, documentdb.BoundedStaleness)) +`, CosmosDBAccountResource{}.basic(data, documentdb.GlobalDocumentDB, documentdb.BoundedStaleness)) } -func testAccDataSourceAzureRMCosmosDBAccount_complete(data acceptance.TestData) string { +func (CosmosDBAccountDataSourceResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -70,5 +66,5 @@ data "azurerm_cosmosdb_account" "test" { name = azurerm_cosmosdb_account.test.name resource_group_name = azurerm_resource_group.test.name } -`, testAccAzureRMCosmosDBAccount_complete(data, documentdb.GlobalDocumentDB, documentdb.BoundedStaleness)) +`, CosmosDBAccountResource{}.complete(data, documentdb.GlobalDocumentDB, documentdb.BoundedStaleness)) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_account_resource.go b/azurerm/internal/services/cosmos/cosmosdb_account_resource.go index 30b24c5a2ea1..e8cc96e0b28a 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_account_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_account_resource.go @@ -10,10 +10,7 @@ import ( "strings" "time" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/common" - - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" @@ -22,6 +19,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/common" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -40,12 +40,12 @@ func suppressConsistencyPolicyStalenessConfiguration(_, _, _ string, d *schema.R return consistencyPolicy["consistency_level"].(string) != string(documentdb.BoundedStaleness) } -func resourceArmCosmosDbAccount() *schema.Resource { +func resourceCosmosDbAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbAccountCreate, - Read: resourceArmCosmosDbAccountRead, - Update: resourceArmCosmosDbAccountUpdate, - Delete: resourceArmCosmosDbAccountDelete, + Create: resourceCosmosDbAccountCreate, + Read: resourceCosmosDbAccountRead, + Update: resourceCosmosDbAccountUpdate, + Delete: resourceCosmosDbAccountDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -111,12 +111,26 @@ func resourceArmCosmosDbAccount() *schema.Resource { ForceNew: true, }, + "public_network_access_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + "enable_automatic_failover": { Type: schema.TypeBool, Optional: true, Default: false, }, + "key_vault_key_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + DiffSuppressFunc: diffSuppressIgnoreKeyVaultKeyVersion, + ValidateFunc: azure.ValidateKeyVaultChildIdVersionOptional, + }, + "consistency_policy": { Type: schema.TypeList, Required: true, @@ -341,7 +355,7 @@ func resourceArmCosmosDbAccount() *schema.Resource { } } -func resourceArmCosmosDbAccountCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbAccountCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -389,6 +403,11 @@ func resourceArmCosmosDbAccountCreate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error expanding CosmosDB Account %q (Resource Group %q) geo locations: %+v", name, resourceGroup, err) } + publicNetworkAccess := documentdb.Enabled + if enabled := d.Get("public_network_access_enabled").(bool); !enabled { + publicNetworkAccess = documentdb.Disabled + } + account := documentdb.DatabaseAccountCreateUpdateParameters{ Location: utils.String(location), Kind: documentdb.DatabaseAccountKind(kind), @@ -403,10 +422,20 @@ func resourceArmCosmosDbAccountCreate(d *schema.ResourceData, meta interface{}) Capabilities: expandAzureRmCosmosDBAccountCapabilities(d), VirtualNetworkRules: expandAzureRmCosmosDBAccountVirtualNetworkRules(d), EnableMultipleWriteLocations: utils.Bool(enableMultipleWriteLocations), + PublicNetworkAccess: publicNetworkAccess, }, Tags: tags.Expand(t), } + if keyVaultKeyIDRaw, ok := d.GetOk("key_vault_key_id"); ok { + keyVaultKey, err := azure.ParseKeyVaultChildIDVersionOptional(keyVaultKeyIDRaw.(string)) + if err != nil { + return fmt.Errorf("could not parse Key Vault Key ID: %+v", err) + } + keyVaultKeyURI := fmt.Sprintf("%skeys/%s", keyVaultKey.KeyVaultBaseUrl, keyVaultKey.Name) + account.DatabaseAccountCreateUpdateProperties.KeyVaultKeyURI = utils.String(keyVaultKeyURI) + } + // additional validation on MaxStalenessPrefix as it varies depending on if the DB is multi region or not consistencyPolicy := account.DatabaseAccountCreateUpdateProperties.ConsistencyPolicy if len(geoLocations) > 1 && consistencyPolicy != nil && consistencyPolicy.DefaultConsistencyLevel == documentdb.BoundedStaleness { @@ -418,7 +447,7 @@ func resourceArmCosmosDbAccountCreate(d *schema.ResourceData, meta interface{}) } } - resp, err := resourceArmCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d) + resp, err := resourceCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d) if err != nil { return fmt.Errorf("Error creating CosmosDB Account %q (Resource Group %q): %+v", name, resourceGroup, err) } @@ -430,10 +459,10 @@ func resourceArmCosmosDbAccountCreate(d *schema.ResourceData, meta interface{}) d.SetId(*id) - return resourceArmCosmosDbAccountRead(d, meta) + return resourceCosmosDbAccountRead(d, meta) } -func resourceArmCosmosDbAccountUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbAccountUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -460,23 +489,30 @@ func resourceArmCosmosDbAccountUpdate(d *schema.ResourceData, meta interface{}) // get existing locations (if exists) resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { return fmt.Errorf("Error making Read request on AzureRM CosmosDB Account '%s': %s", name, err) } oldLocations := make([]documentdb.Location, 0) oldLocationsMap := map[string]documentdb.Location{} - for _, l := range *resp.FailoverPolicies { + for _, l := range *resp.Locations { location := documentdb.Location{ ID: l.ID, LocationName: l.LocationName, FailoverPriority: l.FailoverPriority, + IsZoneRedundant: l.IsZoneRedundant, } oldLocations = append(oldLocations, location) oldLocationsMap[azure.NormalizeLocation(*location.LocationName)] = location } + publicNetworkAccess := documentdb.Enabled + if enabled := d.Get("public_network_access_enabled").(bool); !enabled { + publicNetworkAccess = documentdb.Disabled + } + // cannot update properties and add/remove replication locations or updating enabling of multiple // write locations at the same time. so first just update any changed properties account := documentdb.DatabaseAccountCreateUpdateParameters{ @@ -493,18 +529,28 @@ func resourceArmCosmosDbAccountUpdate(d *schema.ResourceData, meta interface{}) Locations: &oldLocations, VirtualNetworkRules: expandAzureRmCosmosDBAccountVirtualNetworkRules(d), EnableMultipleWriteLocations: resp.EnableMultipleWriteLocations, + PublicNetworkAccess: publicNetworkAccess, }, Tags: tags.Expand(t), } - if _, err = resourceArmCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d); err != nil { + if keyVaultKeyIDRaw, ok := d.GetOk("key_vault_key_id"); ok { + keyVaultKey, err := azure.ParseKeyVaultChildIDVersionOptional(keyVaultKeyIDRaw.(string)) + if err != nil { + return fmt.Errorf("could not parse Key Vault Key ID: %+v", err) + } + keyVaultKeyURI := fmt.Sprintf("%skeys/%s", keyVaultKey.KeyVaultBaseUrl, keyVaultKey.Name) + account.DatabaseAccountCreateUpdateProperties.KeyVaultKeyURI = utils.String(keyVaultKeyURI) + } + + if _, err = resourceCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d); err != nil { return fmt.Errorf("Error updating CosmosDB Account %q properties (Resource Group %q): %+v", name, resourceGroup, err) } // Update the property independently after the initial upsert as no other properties may change at the same time. account.DatabaseAccountCreateUpdateProperties.EnableMultipleWriteLocations = utils.Bool(enableMultipleWriteLocations) if *resp.EnableMultipleWriteLocations != enableMultipleWriteLocations { - if _, err = resourceArmCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d); err != nil { + if _, err = resourceCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d); err != nil { return fmt.Errorf("Error updating CosmosDB Account %q EnableMultipleWriteLocations (Resource Group %q): %+v", name, resourceGroup, err) } } @@ -531,14 +577,14 @@ func resourceArmCosmosDbAccountUpdate(d *schema.ResourceData, meta interface{}) } account.DatabaseAccountCreateUpdateProperties.Locations = &locationsUnchanged - if _, err = resourceArmCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d); err != nil { + if _, err = resourceCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d); err != nil { return fmt.Errorf("Error removing CosmosDB Account %q renamed locations (Resource Group %q): %+v", name, resourceGroup, err) } } // add any new/renamed locations account.DatabaseAccountCreateUpdateProperties.Locations = &newLocations - upsertResponse, err := resourceArmCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d) + upsertResponse, err := resourceCosmosDbAccountApiUpsert(client, ctx, resourceGroup, name, account, d) if err != nil { return fmt.Errorf("Error updating CosmosDB Account %q locations (Resource Group %q): %+v", name, resourceGroup, err) } @@ -549,37 +595,33 @@ func resourceArmCosmosDbAccountUpdate(d *schema.ResourceData, meta interface{}) d.SetId(*upsertResponse.ID) - return resourceArmCosmosDbAccountRead(d, meta) + return resourceCosmosDbAccountRead(d, meta) } -func resourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseAccountID(d.Id()) if err != nil { return err } - name := id.Path["databaseAccounts"] - resourceGroup := id.ResourceGroup - - resp, err := client.Get(ctx, resourceGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on AzureRM CosmosDB Account '%s': %s", name, err) + return fmt.Errorf("retrieving CosmosDB Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("name", resp.Name) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - d.Set("resource_group_name", resourceGroup) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + + d.Set("location", location.NormalizeNilable(resp.Location)) d.Set("kind", string(resp.Kind)) d.Set("offer_type", string(resp.DatabaseAccountOfferType)) @@ -587,6 +629,7 @@ func resourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) er d.Set("endpoint", resp.DocumentEndpoint) d.Set("enable_free_tier", resp.EnableFreeTier) + d.Set("public_network_access_enabled", resp.PublicNetworkAccess == documentdb.Enabled) if v := resp.IsVirtualNetworkFilterEnabled; v != nil { d.Set("is_virtual_network_filter_enabled", resp.IsVirtualNetworkFilterEnabled) @@ -596,12 +639,16 @@ func resourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) er d.Set("enable_automatic_failover", resp.EnableAutomaticFailover) } + if v := resp.KeyVaultKeyURI; v != nil { + d.Set("key_vault_key_id", resp.KeyVaultKeyURI) + } + if v := resp.EnableMultipleWriteLocations; v != nil { d.Set("enable_multiple_write_locations", resp.EnableMultipleWriteLocations) } if err = d.Set("consistency_policy", flattenAzureRmCosmosDBAccountConsistencyPolicy(resp.ConsistencyPolicy)); err != nil { - return fmt.Errorf("Error setting CosmosDB Account %q `consistency_policy` (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("Error setting CosmosDB Account %q `consistency_policy` (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } if err = d.Set("geo_location", flattenAzureRmCosmosDBAccountGeoLocations(resp.DatabaseAccountGetProperties)); err != nil { @@ -646,45 +693,45 @@ func resourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) er // ListKeys returns a data structure containing a DatabaseAccountListReadOnlyKeysResult pointer // implying that it also returns the read only keys, however this appears to not be the case - keys, err := client.ListKeys(ctx, resourceGroup, name) + keys, err := client.ListKeys(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(keys.Response) { - log.Printf("[DEBUG] Keys were not found for CosmosDB Account %q (Resource Group %q) - removing from state!", name, resourceGroup) + log.Printf("[DEBUG] Keys were not found for CosmosDB Account %q (Resource Group %q) - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("[ERROR] Unable to List Write keys for CosmosDB Account %s: %s", name, err) + return fmt.Errorf("[ERROR] Unable to List Write keys for CosmosDB Account %s: %s", id.Name, err) } d.Set("primary_key", keys.PrimaryMasterKey) d.Set("secondary_key", keys.SecondaryMasterKey) d.Set("primary_master_key", keys.PrimaryMasterKey) d.Set("secondary_master_key", keys.SecondaryMasterKey) - readonlyKeys, err := client.ListReadOnlyKeys(ctx, resourceGroup, name) + readonlyKeys, err := client.ListReadOnlyKeys(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(keys.Response) { - log.Printf("[DEBUG] Read Only Keys were not found for CosmosDB Account %q (Resource Group %q) - removing from state!", name, resourceGroup) + log.Printf("[DEBUG] Read Only Keys were not found for CosmosDB Account %q (Resource Group %q) - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("[ERROR] Unable to List read-only keys for CosmosDB Account %s: %s", name, err) + return fmt.Errorf("[ERROR] Unable to List read-only keys for CosmosDB Account %s: %s", id.Name, err) } d.Set("primary_readonly_key", readonlyKeys.PrimaryReadonlyMasterKey) d.Set("secondary_readonly_key", readonlyKeys.SecondaryReadonlyMasterKey) d.Set("primary_readonly_master_key", readonlyKeys.PrimaryReadonlyMasterKey) d.Set("secondary_readonly_master_key", readonlyKeys.SecondaryReadonlyMasterKey) - connStringResp, err := client.ListConnectionStrings(ctx, resourceGroup, name) + connStringResp, err := client.ListConnectionStrings(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(keys.Response) { - log.Printf("[DEBUG] Connection Strings were not found for CosmosDB Account %q (Resource Group %q) - removing from state!", name, resourceGroup) + log.Printf("[DEBUG] Connection Strings were not found for CosmosDB Account %q (Resource Group %q) - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("[ERROR] Unable to List connection strings for CosmosDB Account %s: %s", name, err) + return fmt.Errorf("[ERROR] Unable to List connection strings for CosmosDB Account %s: %s", id.Name, err) } var connStrings []string @@ -699,25 +746,22 @@ func resourceArmCosmosDbAccountRead(d *schema.ResourceData, meta interface{}) er return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmCosmosDbAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseAccountID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["databaseAccounts"] - - future, err := client.Delete(ctx, resourceGroup, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if future.Response().StatusCode == http.StatusNoContent { return nil } - return fmt.Errorf("Error issuing AzureRM delete request for CosmosDB Account '%s': %+v", name, err) + return fmt.Errorf("deleting CosmosDB Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } // the SDK now will return a `WasNotFound` response even when still deleting @@ -727,12 +771,12 @@ func resourceArmCosmosDbAccountDelete(d *schema.ResourceData, meta interface{}) MinTimeout: 30 * time.Second, Timeout: d.Timeout(schema.TimeoutDelete), Refresh: func() (interface{}, string, error) { - resp, err2 := client.Get(ctx, resourceGroup, name) + resp, err2 := client.Get(ctx, id.ResourceGroup, id.Name) if err2 != nil { if utils.ResponseWasNotFound(resp.Response) { return resp, "NotFound", nil } - return nil, "", fmt.Errorf("Error reading CosmosDB Account %q after delete (Resource Group %q): %+v", name, resourceGroup, err2) + return nil, "", err2 } return resp, "Deleting", nil @@ -740,13 +784,13 @@ func resourceArmCosmosDbAccountDelete(d *schema.ResourceData, meta interface{}) } if _, err = stateConf.WaitForState(); err != nil { - return fmt.Errorf("Waiting for CosmosDB Account %q to delete (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("waiting for CosmosDB Account %q (Resource Group %q) to be deleted: %+v", id.Name, id.ResourceGroup, err) } return nil } -func resourceArmCosmosDbAccountApiUpsert(client *documentdb.DatabaseAccountsClient, ctx context.Context, resourceGroup string, name string, account documentdb.DatabaseAccountCreateUpdateParameters, d *schema.ResourceData) (*documentdb.DatabaseAccountGetResults, error) { +func resourceCosmosDbAccountApiUpsert(client *documentdb.DatabaseAccountsClient, ctx context.Context, resourceGroup string, name string, account documentdb.DatabaseAccountCreateUpdateParameters, d *schema.ResourceData) (*documentdb.DatabaseAccountGetResults, error) { future, err := client.CreateOrUpdate(ctx, resourceGroup, name, account) if err != nil { return nil, fmt.Errorf("Error creating/updating CosmosDB Account %q (Resource Group %q): %+v", name, resourceGroup, err) @@ -1020,3 +1064,16 @@ func resourceAzureRMCosmosDBAccountVirtualNetworkRuleHash(v interface{}) int { return hashcode.String(buf.String()) } + +func diffSuppressIgnoreKeyVaultKeyVersion(k, old, new string, d *schema.ResourceData) bool { + oldKey, err := azure.ParseKeyVaultChildIDVersionOptional(old) + if err != nil { + return false + } + newKey, err := azure.ParseKeyVaultChildIDVersionOptional(new) + if err != nil { + return false + } + + return (oldKey.KeyVaultBaseUrl == newKey.KeyVaultBaseUrl) && (oldKey.Name == newKey.Name) +} diff --git a/azurerm/internal/services/cosmos/cosmosdb_account_resource_failover_test.go b/azurerm/internal/services/cosmos/cosmosdb_account_resource_failover_test.go index 735e31786e95..0b3230338dcc 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_account_resource_failover_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_account_resource_failover_test.go @@ -6,137 +6,110 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" ) -func TestAccAzureRMCosmosDBAccount_failover_boundedStaleness(t *testing.T) { +func TestAccCosmosDBAccount_failover_boundedStaleness(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_boundedStaleness(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "GlobalDocumentDB"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_boundedStaleness(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("GlobalDocumentDB"), + ), }, }) } -func TestAccAzureRMCosmosDBAccount_failover_boundedStalenessComplete(t *testing.T) { +func TestAccCosmosDBAccount_failover_boundedStalenessComplete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_boundedStalenessComplete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists("azurerm_cosmosdb_account.test"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_boundedStalenessComplete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMCosmosDBAccount_failover_eventualConsistency(t *testing.T) { +func TestAccCosmosDBAccount_failover_eventualConsistency(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_eventualConsistency(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists("azurerm_cosmosdb_account.test"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_eventualConsistency(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMCosmosDBAccount_failover_mongoDB(t *testing.T) { +func TestAccCosmosDBAccount_failover_mongoDB(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_mongoDB(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "kind", "MongoDB"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_mongoDB(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kind").HasValue("MongoDB"), + ), }, }) } -func TestAccAzureRMCosmosDBAccount_failover_session(t *testing.T) { +func TestAccCosmosDBAccount_failover_session(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_session(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists("azurerm_cosmosdb_account.test"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_session(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMCosmosDBAccount_failover_strong(t *testing.T) { +func TestAccCosmosDBAccount_failover_strong(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_strong(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists("azurerm_cosmosdb_account.test"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_strong(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMCosmosDBAccount_failover_geoReplicated(t *testing.T) { +func TestAccCosmosDBAccount_failover_geoReplicated(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_failover_geoReplicated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists("azurerm_cosmosdb_account.test"), - ), - }, + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.failover_geoReplicated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func testAccAzureRMCosmosDBAccount_failover_boundedStaleness(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_boundedStaleness(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -165,7 +138,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_failover_boundedStalenessComplete(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_boundedStalenessComplete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -196,7 +169,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_failover_eventualConsistency(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_eventualConsistency(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -225,7 +198,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_failover_session(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_session(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -254,7 +227,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_failover_mongoDB(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_mongoDB(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -284,7 +257,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_failover_strong(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_strong(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -313,7 +286,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_failover_geoReplicated(data acceptance.TestData) string { +func (CosmosDBAccountResource) failover_geoReplicated(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/cosmos/cosmosdb_account_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_account_resource_test.go index 69c5e13557c8..0806a27208f1 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_account_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_account_resource_test.go @@ -1,522 +1,523 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "strconv" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDBAccount_basic_global_boundedStaleness(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.BoundedStaleness) +type CosmosDBAccountResource struct { } -func TestAccAzureRMCosmosDBAccount_basic_global_consistentPrefix(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.ConsistentPrefix) +func TestAccCosmosDBAccount_basic_global_boundedStaleness(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.BoundedStaleness) } -func TestAccAzureRMCosmosDBAccount_basic_global_eventual(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.Eventual) +func TestAccCosmosDBAccount_basic_global_consistentPrefix(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.ConsistentPrefix) } -func TestAccAzureRMCosmosDBAccount_basic_global_session(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.Session) +func TestAccCosmosDBAccount_basic_global_eventual(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.Eventual) } -func TestAccAzureRMCosmosDBAccount_basic_global_strong(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.Strong) +func TestAccCosmosDBAccount_basic_global_session(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.Session) } -func TestAccAzureRMCosmosDBAccount_basic_mongo_boundedStaleness(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.BoundedStaleness) +func TestAccCosmosDBAccount_basic_global_strong(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.GlobalDocumentDB, documentdb.Strong) } -func TestAccAzureRMCosmosDBAccount_basic_mongo_consistentPrefix(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.ConsistentPrefix) +func TestAccCosmosDBAccount_basic_mongo_boundedStaleness(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.BoundedStaleness) } -func TestAccAzureRMCosmosDBAccount_basic_mongo_eventual(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Eventual) +func TestAccCosmosDBAccount_basic_mongo_consistentPrefix(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.ConsistentPrefix) } -func TestAccAzureRMCosmosDBAccount_basic_mongo_session(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Session) +func TestAccCosmosDBAccount_basic_mongo_eventual(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Eventual) } -func TestAccAzureRMCosmosDBAccount_basic_mongo_strong(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Strong) +func TestAccCosmosDBAccount_basic_mongo_session(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Session) } -func TestAccAzureRMCosmosDBAccount_basic_parse_boundedStaleness(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.BoundedStaleness) +func TestAccCosmosDBAccount_basic_mongo_strong(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Strong) } -func TestAccAzureRMCosmosDBAccount_basic_parse_consistentPrefix(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.ConsistentPrefix) +func TestAccCosmosDBAccount_basic_parse_boundedStaleness(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.BoundedStaleness) } -func TestAccAzureRMCosmosDBAccount_basic_parse_eventual(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Eventual) +func TestAccCosmosDBAccount_basic_parse_consistentPrefix(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.ConsistentPrefix) } -func TestAccAzureRMCosmosDBAccount_basic_parse_session(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Session) +func TestAccCosmosDBAccount_basic_parse_eventual(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Eventual) } -func TestAccAzureRMCosmosDBAccount_basic_parse_strong(t *testing.T) { - testAccAzureRMCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Strong) +func TestAccCosmosDBAccount_basic_parse_session(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Session) } -func testAccAzureRMCosmosDBAccount_basicWith(t *testing.T, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) { +func TestAccCosmosDBAccount_basic_parse_strong(t *testing.T) { + testAccCosmosDBAccount_basicWith(t, documentdb.MongoDB, documentdb.Strong) +} + +func TestAccCosmosDBAccount_public_network_access_enabled(t *testing.T) { + testAccCosmosDBAccount_public_network_access_enabled(t, documentdb.MongoDB, documentdb.Strong) +} + +func testAccCosmosDBAccount_public_network_access_enabled(t *testing.T, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.network_access_enabled(data, kind, consistency), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, consistency, 1), + ), + }, + data.ImportStep(), + }) +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_basic(data, kind, consistency), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, consistency, 1), - ), - }, - data.ImportStep(), +func TestAccCosmosDBAccount_keyVaultUri(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.key_vault_uri(data, documentdb.MongoDB, documentdb.Strong), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_requiresImport(t *testing.T) { +func TestAccCosmosDBAccount_keyVaultUriUpdateConsistancy(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.key_vault_uri(data, documentdb.MongoDB, documentdb.Strong), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), + }, + data.ImportStep(), + { + Config: r.key_vault_uri(data, documentdb.MongoDB, documentdb.Session), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), + }, + data.ImportStep(), + }) +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_basic(data, "GlobalDocumentDB", documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 1), - ), - }, - { - Config: testAccAzureRMCosmosDBAccount_requiresImport(data, documentdb.Eventual), - ExpectError: acceptance.RequiresImportError("azurerm_cosmosdb_account"), - }, +func testAccCosmosDBAccount_basicWith(t *testing.T, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, kind, consistency), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, consistency, 1), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCosmosDBAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "GlobalDocumentDB", documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + ), + }, + { + Config: r.requiresImport(data, documentdb.Eventual), + ExpectError: acceptance.RequiresImportError("azurerm_cosmosdb_account"), }, }) } -func TestAccAzureRMCosmosDBAccount_updateConsistency_global(t *testing.T) { - testAccAzureRMCosmosDBAccount_updateConsistency(t, documentdb.GlobalDocumentDB) +func TestAccCosmosDBAccount_updateConsistency_global(t *testing.T) { + testAccCosmosDBAccount_updateConsistency(t, documentdb.GlobalDocumentDB) } -func TestAccAzureRMCosmosDBAccount_updateConsistency_mongo(t *testing.T) { - testAccAzureRMCosmosDBAccount_updateConsistency(t, documentdb.MongoDB) +func TestAccCosmosDBAccount_updateConsistency_mongo(t *testing.T) { + testAccCosmosDBAccount_updateConsistency(t, documentdb.MongoDB) } -func testAccAzureRMCosmosDBAccount_updateConsistency(t *testing.T, kind documentdb.DatabaseAccountKind) { +func testAccCosmosDBAccount_updateConsistency(t *testing.T, kind documentdb.DatabaseAccountKind) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_basic(data, kind, documentdb.Strong), - Check: checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_consistency(data, kind, documentdb.Strong, 8, 880), - Check: checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_basic(data, kind, documentdb.BoundedStaleness), - Check: checkAccAzureRMCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_consistency(data, kind, documentdb.BoundedStaleness, 7, 770), - Check: checkAccAzureRMCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_consistency(data, kind, documentdb.BoundedStaleness, 77, 700), - Check: checkAccAzureRMCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_basic(data, kind, documentdb.ConsistentPrefix), - Check: checkAccAzureRMCosmosDBAccount_basic(data, documentdb.ConsistentPrefix, 1), - }, - data.ImportStep(), + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, kind, documentdb.Strong), + Check: checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), }, + data.ImportStep(), + { + Config: r.consistency(data, kind, documentdb.Strong, 8, 880), + Check: checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + }, + data.ImportStep(), + { + Config: r.basic(data, kind, documentdb.BoundedStaleness), + Check: checkAccCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), + }, + data.ImportStep(), + { + Config: r.consistency(data, kind, documentdb.BoundedStaleness, 7, 770), + Check: checkAccCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), + }, + data.ImportStep(), + { + Config: r.consistency(data, kind, documentdb.BoundedStaleness, 77, 700), + Check: checkAccCosmosDBAccount_basic(data, documentdb.BoundedStaleness, 1), + }, + data.ImportStep(), + { + Config: r.basic(data, kind, documentdb.ConsistentPrefix), + Check: checkAccCosmosDBAccount_basic(data, documentdb.ConsistentPrefix, 1), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_complete_mongo(t *testing.T) { - testAccAzureRMCosmosDBAccount_completeWith(t, documentdb.MongoDB) +func TestAccCosmosDBAccount_complete_mongo(t *testing.T) { + testAccCosmosDBAccount_completeWith(t, documentdb.MongoDB) } -func TestAccAzureRMCosmosDBAccount_complete_global(t *testing.T) { - testAccAzureRMCosmosDBAccount_completeWith(t, documentdb.GlobalDocumentDB) +func TestAccCosmosDBAccount_complete_global(t *testing.T) { + testAccCosmosDBAccount_completeWith(t, documentdb.GlobalDocumentDB) } -func TestAccAzureRMCosmosDBAccount_complete_parse(t *testing.T) { - testAccAzureRMCosmosDBAccount_completeWith(t, documentdb.Parse) +func TestAccCosmosDBAccount_complete_parse(t *testing.T) { + testAccCosmosDBAccount_completeWith(t, documentdb.Parse) } -func testAccAzureRMCosmosDBAccount_completeWith(t *testing.T, kind documentdb.DatabaseAccountKind) { +func testAccCosmosDBAccount_completeWith(t *testing.T, kind documentdb.DatabaseAccountKind) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_complete(data, kind, documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 3), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 3), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_completeZoneRedundant_mongo(t *testing.T) { - testAccAzureRMCosmosDBAccount_zoneRedundantWith(t, documentdb.MongoDB) +func TestAccCosmosDBAccount_completeZoneRedundant_mongo(t *testing.T) { + testAccCosmosDBAccount_zoneRedundantWith(t, documentdb.MongoDB) } -func TestAccAzureRMCosmosDBAccount_completeZoneRedundant_global(t *testing.T) { - testAccAzureRMCosmosDBAccount_zoneRedundantWith(t, documentdb.GlobalDocumentDB) +func TestAccCosmosDBAccount_completeZoneRedundant_global(t *testing.T) { + testAccCosmosDBAccount_zoneRedundantWith(t, documentdb.GlobalDocumentDB) } -func TestAccAzureRMCosmosDBAccount_completeZoneRedundant_parse(t *testing.T) { - testAccAzureRMCosmosDBAccount_zoneRedundantWith(t, documentdb.Parse) +func TestAccCosmosDBAccount_completeZoneRedundant_parse(t *testing.T) { + testAccCosmosDBAccount_zoneRedundantWith(t, documentdb.Parse) } -func testAccAzureRMCosmosDBAccount_zoneRedundantWith(t *testing.T, kind documentdb.DatabaseAccountKind) { +func testAccCosmosDBAccount_zoneRedundantWith(t *testing.T, kind documentdb.DatabaseAccountKind) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.zoneRedundant(data, kind), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_zoneRedundant(data, kind), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), +func testAccCosmosDBAccount_zoneRedundant_updateWith(t *testing.T, kind documentdb.DatabaseAccountKind) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + ), }, + data.ImportStep(), + { + Config: r.zoneRedundantUpdate(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 2), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_update_mongo(t *testing.T) { - testAccAzureRMCosmosDBAccount_updateWith(t, documentdb.MongoDB) +func TestAccCosmosDBAccount_zoneRedundant_update_mongo(t *testing.T) { + testAccCosmosDBAccount_zoneRedundant_updateWith(t, documentdb.MongoDB) } -func TestAccAzureRMCosmosDBAccount_update_global(t *testing.T) { - testAccAzureRMCosmosDBAccount_updateWith(t, documentdb.GlobalDocumentDB) +func TestAccCosmosDBAccount_update_mongo(t *testing.T) { + testAccCosmosDBAccount_updateWith(t, documentdb.MongoDB) } -func TestAccAzureRMCosmosDBAccount_update_parse(t *testing.T) { - testAccAzureRMCosmosDBAccount_updateWith(t, documentdb.Parse) +func TestAccCosmosDBAccount_update_global(t *testing.T) { + testAccCosmosDBAccount_updateWith(t, documentdb.GlobalDocumentDB) } -func testAccAzureRMCosmosDBAccount_updateWith(t *testing.T, kind documentdb.DatabaseAccountKind) { +func TestAccCosmosDBAccount_update_parse(t *testing.T) { + testAccCosmosDBAccount_updateWith(t, documentdb.Parse) +} +func testAccCosmosDBAccount_updateWith(t *testing.T, kind documentdb.DatabaseAccountKind) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_basic(data, kind, documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 1), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_complete(data, kind, documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 3), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_completeUpdated(data, kind, documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 3), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_basicWithResources(data, kind, documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - // checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 1), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + ), + }, + data.ImportStep(), + { + Config: r.complete(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 3), + ), + }, + data.ImportStep(), + { + Config: r.completeUpdated(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 3), + ), }, + data.ImportStep(), + { + Config: r.basicWithResources(data, kind, documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + // checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_capabilities_EnableAggregationPipeline(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableAggregationPipeline"}) +func TestAccCosmosDBAccount_capabilities_EnableAggregationPipeline(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableAggregationPipeline"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_EnableCassandra(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}) +func TestAccCosmosDBAccount_capabilities_EnableCassandra(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_EnableGremlin(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}) +func TestAccCosmosDBAccount_capabilities_EnableGremlin(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_EnableTable(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableTable"}) +func TestAccCosmosDBAccount_capabilities_EnableTable(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableTable"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_EnableServerless(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableServerless"}) +func TestAccCosmosDBAccount_capabilities_EnableServerless(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.GlobalDocumentDB, []string{"EnableServerless"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_EnableMongo(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"EnableMongo"}) +func TestAccCosmosDBAccount_capabilities_EnableMongo(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"EnableMongo"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_MongoDBv34(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"MongoDBv3.4"}) +func TestAccCosmosDBAccount_capabilities_MongoDBv34(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"MongoDBv3.4"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_mongoEnableDocLevelTTL(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"mongoEnableDocLevelTTL"}) +func TestAccCosmosDBAccount_capabilities_mongoEnableDocLevelTTL(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"mongoEnableDocLevelTTL"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_DisableRateLimitingResponses(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"DisableRateLimitingResponses"}) +func TestAccCosmosDBAccount_capabilities_DisableRateLimitingResponses(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"DisableRateLimitingResponses"}) } -func TestAccAzureRMCosmosDBAccount_capabilities_AllowSelfServeUpgradeToMongo36(t *testing.T) { - testAccAzureRMCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"AllowSelfServeUpgradeToMongo36"}) +func TestAccCosmosDBAccount_capabilities_AllowSelfServeUpgradeToMongo36(t *testing.T) { + testAccCosmosDBAccount_capabilitiesWith(t, documentdb.MongoDB, []string{"AllowSelfServeUpgradeToMongo36"}) } -func testAccAzureRMCosmosDBAccount_capabilitiesWith(t *testing.T, kind documentdb.DatabaseAccountKind, capabilities []string) { +func testAccCosmosDBAccount_capabilitiesWith(t *testing.T, kind documentdb.DatabaseAccountKind, capabilities []string) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_capabilities(data, kind, capabilities), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.capabilities(data, kind, capabilities), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_capabilitiesAdd(t *testing.T) { +func TestAccCosmosDBAccount_capabilitiesAdd(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra", "EnableAggregationPipeline"}), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), + }, + data.ImportStep(), + { + Config: r.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra", "EnableAggregationPipeline"}), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_capabilitiesUpdate(t *testing.T) { +func TestAccCosmosDBAccount_capabilitiesUpdate(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable", "EnableAggregationPipeline"}), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Strong, 1), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), + }, + data.ImportStep(), + { + Config: r.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable", "EnableAggregationPipeline"}), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Strong, 1), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_geoLocationsUpdate(t *testing.T) { +func TestAccCosmosDBAccount_geoLocationsUpdate(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_basic(data, "GlobalDocumentDB", documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 1), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_geoLocationUpdate(data, "GlobalDocumentDB", documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 2), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDBAccount_basic(data, "GlobalDocumentDB", documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 1), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "GlobalDocumentDB", documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + ), + }, + data.ImportStep(), + { + Config: r.geoLocationUpdate(data, "GlobalDocumentDB", documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 2), + ), + }, + data.ImportStep(), + { + Config: r.basic(data, "GlobalDocumentDB", documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_freeTier(t *testing.T) { +func TestAccCosmosDBAccount_freeTier(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_freeTier(data, "GlobalDocumentDB", documentdb.Eventual), - Check: resource.ComposeAggregateTestCheckFunc( - checkAccAzureRMCosmosDBAccount_basic(data, documentdb.Eventual, 1), - resource.TestCheckResourceAttr(data.ResourceName, "enable_free_tier", "true"), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.freeTier(data, "GlobalDocumentDB", documentdb.Eventual), + Check: resource.ComposeAggregateTestCheckFunc( + checkAccCosmosDBAccount_basic(data, documentdb.Eventual, 1), + check.That(data.ResourceName).Key("enable_free_tier").HasValue("true"), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDBAccount_vNetFilters(t *testing.T) { +func TestAccCosmosDBAccount_vNetFilters(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDBAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDBAccount_vNetFilters(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "is_virtual_network_filter_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "virtual_network_rule.#", "2"), - ), - }, - data.ImportStep(), + r := CosmosDBAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.vNetFilters(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("is_virtual_network_filter_enabled").HasValue("true"), + check.That(data.ResourceName).Key("virtual_network_rule.#").HasValue("2"), + ), }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDBAccountDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.DatabaseClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_account" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("CosmosDB Account still exists:\n%#v", resp) - } +func (t CosmosDBAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabaseAccountID(state.ID) + if err != nil { + return nil, err } - return nil -} - -func testCheckAzureRMCosmosDBAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.DatabaseClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: CosmosDB Account '%s' (resource group: '%s') does not exist", name, resourceGroup) - } - - return nil + resp, err := clients.Cosmos.DatabaseClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Database (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDBAccount_basic(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { +func (CosmosDBAccountResource) basic(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -546,7 +547,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), string(consistency)) } -func testAccAzureRMCosmosDBAccount_requiresImport(data acceptance.TestData, consistency documentdb.DefaultConsistencyLevel) string { +func (r CosmosDBAccountResource) requiresImport(data acceptance.TestData, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` %s @@ -565,10 +566,10 @@ resource "azurerm_cosmosdb_account" "import" { failover_priority = 0 } } -`, testAccAzureRMCosmosDBAccount_basic(data, "GlobalDocumentDB", consistency)) +`, r.basic(data, "GlobalDocumentDB", consistency)) } -func testAccAzureRMCosmosDBAccount_consistency(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel, interval, staleness int) string { +func (CosmosDBAccountResource) consistency(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel, interval, staleness int) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -600,7 +601,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), string(consistency), interval, staleness) } -func testAccAzureRMCosmosDBAccount_completePreReqs(data acceptance.TestData) string { +func (CosmosDBAccountResource) completePreReqs(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -637,7 +638,7 @@ resource "azurerm_subnet" "subnet2" { `, data.RandomInteger, data.Locations.Primary) } -func testAccAzureRMCosmosDBAccount_complete(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { +func (r CosmosDBAccountResource) complete(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` %[1]s @@ -681,10 +682,10 @@ resource "azurerm_cosmosdb_account" "test" { failover_priority = 2 } } -`, testAccAzureRMCosmosDBAccount_completePreReqs(data), data.RandomInteger, string(kind), string(consistency), data.Locations.Secondary, data.Locations.Ternary) +`, r.completePreReqs(data), data.RandomInteger, string(kind), string(consistency), data.Locations.Secondary, data.Locations.Ternary) } -func testAccAzureRMCosmosDBAccount_zoneRedundant(data acceptance.TestData, kind documentdb.DatabaseAccountKind) string { +func (CosmosDBAccountResource) zoneRedundant(data acceptance.TestData, kind documentdb.DatabaseAccountKind) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -724,7 +725,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), data.Locations.Secondary) } -func testAccAzureRMCosmosDBAccount_completeUpdated(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { +func (r CosmosDBAccountResource) completeUpdated(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` %[1]s @@ -764,10 +765,10 @@ resource "azurerm_cosmosdb_account" "test" { failover_priority = 2 } } -`, testAccAzureRMCosmosDBAccount_completePreReqs(data), data.RandomInteger, string(kind), string(consistency), data.Locations.Secondary, data.Locations.Ternary) +`, r.completePreReqs(data), data.RandomInteger, string(kind), string(consistency), data.Locations.Secondary, data.Locations.Ternary) } -func testAccAzureRMCosmosDBAccount_basicWithResources(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { +func (r CosmosDBAccountResource) basicWithResources(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` %[1]s @@ -787,10 +788,10 @@ resource "azurerm_cosmosdb_account" "test" { failover_priority = 0 } } -`, testAccAzureRMCosmosDBAccount_completePreReqs(data), data.RandomInteger, string(kind), string(consistency)) +`, r.completePreReqs(data), data.RandomInteger, string(kind), string(consistency)) } -func testAccAzureRMCosmosDBAccount_capabilities(data acceptance.TestData, kind documentdb.DatabaseAccountKind, capabilities []string) string { +func (CosmosDBAccountResource) capabilities(data acceptance.TestData, kind documentdb.DatabaseAccountKind, capabilities []string) string { capeTf := "" for _, c := range capabilities { capeTf += fmt.Sprintf("capabilities {name = \"%s\"}\n", c) @@ -827,7 +828,7 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), capeTf) } -func testAccAzureRMCosmosDBAccount_geoLocationUpdate(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { +func (CosmosDBAccountResource) geoLocationUpdate(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -862,7 +863,64 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), string(consistency), data.Locations.Secondary) } -func testAccAzureRMCosmosDBAccount_vNetFiltersPreReqs(data acceptance.TestData) string { +func (CosmosDBAccountResource) zoneRedundantUpdate(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { + return fmt.Sprintf(` +variable "geo_location" { + type = list(object({ + location = string + failover_priority = string + zone_redundant = bool + })) + default = [ + { + location = "%s" + failover_priority = 0 + zone_redundant = false + }, + { + location = "%s" + failover_priority = 1 + zone_redundant = true + } + ] +} + +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cosmos-%d" + location = "%s" +} + +resource "azurerm_cosmosdb_account" "test" { + name = "acctest-ca-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + offer_type = "Standard" + kind = "%s" + + enable_multiple_write_locations = true + enable_automatic_failover = true + + consistency_policy { + consistency_level = "%s" + } + + dynamic "geo_location" { + for_each = var.geo_location + content { + location = geo_location.value.location + failover_priority = geo_location.value.failover_priority + zone_redundant = geo_location.value.zone_redundant + } + } +} +`, data.Locations.Primary, data.Locations.Secondary, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), string(consistency)) +} + +func (CosmosDBAccountResource) vNetFiltersPreReqs(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -902,7 +960,7 @@ resource "azurerm_subnet" "subnet2" { `, data.RandomInteger, data.Locations.Primary) } -func testAccAzureRMCosmosDBAccount_vNetFilters(data acceptance.TestData) string { +func (r CosmosDBAccountResource) vNetFilters(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -940,10 +998,10 @@ resource "azurerm_cosmosdb_account" "test" { failover_priority = 0 } } -`, testAccAzureRMCosmosDBAccount_vNetFiltersPreReqs(data), data.RandomInteger) +`, r.vNetFiltersPreReqs(data), data.RandomInteger) } -func testAccAzureRMCosmosDBAccount_freeTier(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { +func (CosmosDBAccountResource) freeTier(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -975,21 +1033,155 @@ resource "azurerm_cosmosdb_account" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), string(consistency)) } -func checkAccAzureRMCosmosDBAccount_basic(data acceptance.TestData, consistency documentdb.DefaultConsistencyLevel, locationCount int) resource.TestCheckFunc { +func checkAccCosmosDBAccount_basic(data acceptance.TestData, consistency documentdb.DefaultConsistencyLevel, locationCount int) resource.TestCheckFunc { return resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosDBAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "offer_type", string(documentdb.Standard)), - resource.TestCheckResourceAttr(data.ResourceName, "consistency_policy.0.consistency_level", string(consistency)), - resource.TestCheckResourceAttr(data.ResourceName, "geo_location.#", strconv.Itoa(locationCount)), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttr(data.ResourceName, "read_endpoints.#", strconv.Itoa(locationCount)), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_readonly_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_readonly_key"), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("location").HasValue(azure.NormalizeLocation(data.Locations.Primary)), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("offer_type").HasValue(string(documentdb.Standard)), + check.That(data.ResourceName).Key("consistency_policy.0.consistency_level").HasValue(string(consistency)), + check.That(data.ResourceName).Key("geo_location.#").HasValue(strconv.Itoa(locationCount)), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("read_endpoints.#").HasValue(strconv.Itoa(locationCount)), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("primary_readonly_key").Exists(), + check.That(data.ResourceName).Key("secondary_readonly_key").Exists(), ) } + +func (CosmosDBAccountResource) network_access_enabled(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cosmos-%d" + location = "%s" +} + +resource "azurerm_cosmosdb_account" "test" { + name = "acctest-ca-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + offer_type = "Standard" + kind = "%s" + public_network_access_enabled = true + + consistency_policy { + consistency_level = "%s" + } + + geo_location { + location = azurerm_resource_group.test.location + failover_priority = 0 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, string(kind), string(consistency)) +} + +func (CosmosDBAccountResource) key_vault_uri(data acceptance.TestData, kind documentdb.DatabaseAccountKind, consistency documentdb.DefaultConsistencyLevel) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cosmos-%d" + location = "%s" +} + +data "azuread_service_principal" "cosmosdb" { + display_name = "Azure Cosmos DB" +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + + purge_protection_enabled = true + soft_delete_enabled = true + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "list", + "create", + "delete", + "get", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] + } + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azuread_service_principal.cosmosdb.id + + key_permissions = [ + "list", + "create", + "delete", + "get", + "update", + "unwrapKey", + "wrapKey", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] + } +} + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] +} + +resource "azurerm_cosmosdb_account" "test" { + name = "acctest-ca-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + offer_type = "Standard" + kind = "%s" + key_vault_key_id = azurerm_key_vault_key.test.id + + consistency_policy { + consistency_level = "%s" + } + + geo_location { + location = azurerm_resource_group.test.location + failover_priority = 0 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomInteger, string(kind), string(consistency)) +} diff --git a/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go b/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go index 6e810df93907..d19c2449b7a0 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbCassandraKeyspace() *schema.Resource { +func resourceCosmosDbCassandraKeyspace() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbCassandraKeyspaceCreate, - Read: resourceArmCosmosDbCassandraKeyspaceRead, - Update: resourceArmCosmosDbCassandraKeyspaceUpdate, - Delete: resourceArmCosmosDbCassandraKeyspaceDelete, + Create: resourceCosmosDbCassandraKeyspaceCreate, + Read: resourceCosmosDbCassandraKeyspaceRead, + Update: resourceCosmosDbCassandraKeyspaceUpdate, + Delete: resourceCosmosDbCassandraKeyspaceDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -75,7 +75,7 @@ func resourceArmCosmosDbCassandraKeyspace() *schema.Resource { } } -func resourceArmCosmosDbCassandraKeyspaceCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbCassandraKeyspaceCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.CassandraClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,10 +136,10 @@ func resourceArmCosmosDbCassandraKeyspaceCreate(d *schema.ResourceData, meta int d.SetId(*resp.ID) - return resourceArmCosmosDbCassandraKeyspaceRead(d, meta) + return resourceCosmosDbCassandraKeyspaceRead(d, meta) } -func resourceArmCosmosDbCassandraKeyspaceUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbCassandraKeyspaceUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.CassandraClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -151,7 +151,7 @@ func resourceArmCosmosDbCassandraKeyspaceUpdate(d *schema.ResourceData, meta int err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos Cassandra Keyspace %q (Account: %q) - %+v", id.Name, id.Account, err) + return fmt.Errorf("Error updating Cosmos Cassandra Keyspace %q (Account: %q) - %+v", id.Name, id.DatabaseAccountName, err) } db := documentdb.CassandraKeyspaceCreateUpdateParameters{ @@ -163,34 +163,34 @@ func resourceArmCosmosDbCassandraKeyspaceUpdate(d *schema.ResourceData, meta int }, } - future, err := client.CreateUpdateCassandraKeyspace(ctx, id.ResourceGroup, id.Account, id.Name, db) + future, err := client.CreateUpdateCassandraKeyspace(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.ResourceGroup, id.Account, err) + return fmt.Errorf("Error issuing create/update request for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.ResourceGroup, id.DatabaseAccountName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.ResourceGroup, id.Account, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.ResourceGroup, id.DatabaseAccountName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateCassandraKeyspaceThroughput(ctx, id.ResourceGroup, id.Account, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateCassandraKeyspaceThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos Cassandra Keyspace %q (Account: %q): %+v - "+ - "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, err) + "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.DatabaseAccountName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } - return resourceArmCosmosDbCassandraKeyspaceRead(d, meta) + return resourceCosmosDbCassandraKeyspaceRead(d, meta) } -func resourceArmCosmosDbCassandraKeyspaceRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbCassandraKeyspaceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.CassandraClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -200,29 +200,29 @@ func resourceArmCosmosDbCassandraKeyspaceRead(d *schema.ResourceData, meta inter return err } - resp, err := client.GetCassandraKeyspace(ctx, id.ResourceGroup, id.Account, id.Name) + resp, err := client.GetCassandraKeyspace(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos Cassandra Keyspace %q (Account: %q) - removing from state", id.Name, id.Account) + log.Printf("[INFO] Error reading Cosmos Cassandra Keyspace %q (Account: %q) - removing from state", id.Name, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) + d.Set("account_name", id.DatabaseAccountName) if props := resp.CassandraKeyspaceGetProperties; props != nil { if res := props.Resource; res != nil { d.Set("name", res.ID) } } - throughputResp, err := client.GetCassandraKeyspaceThroughput(ctx, id.ResourceGroup, id.Account, id.Name) + throughputResp, err := client.GetCassandraKeyspaceThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Throughput on Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } else { d.Set("throughput", nil) d.Set("autoscale_settings", nil) @@ -234,7 +234,7 @@ func resourceArmCosmosDbCassandraKeyspaceRead(d *schema.ResourceData, meta inter return nil } -func resourceArmCosmosDbCassandraKeyspaceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbCassandraKeyspaceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.CassandraClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -244,16 +244,16 @@ func resourceArmCosmosDbCassandraKeyspaceDelete(d *schema.ResourceData, meta int return err } - future, err := client.DeleteCassandraKeyspace(ctx, id.ResourceGroup, id.Account, id.Name) + future, err := client.DeleteCassandraKeyspace(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error deleting Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Cosmos Cassandra Keyspace %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource_test.go index 8f6937fa144a..5b04abc05de1 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource_test.go @@ -1,178 +1,125 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbCassandraKeyspace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_cassandra_keyspace", "test") +type CosmosDbCassandraKeyspaceResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbCassandraKeyspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosDbCassandraKeyspace_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_cassandra_keyspace", "test") + r := CosmosDbCassandraKeyspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbCassandraKeyspace_complete(t *testing.T) { +func TestAccCosmosDbCassandraKeyspace_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_cassandra_keyspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbCassandraKeyspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_throughput(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "700"), - ), - }, - data.ImportStep(), + r := CosmosDbCassandraKeyspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.throughput(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("700"), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbCassandraKeyspace_update(t *testing.T) { +func TestAccCosmosDbCassandraKeyspace_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_cassandra_keyspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbCassandraKeyspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_throughput(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "700"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_throughput(data, 1700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "1700"), - ), - }, - data.ImportStep(), + r := CosmosDbCassandraKeyspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.throughput(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("700"), + ), }, + data.ImportStep(), + { + Config: r.throughput(data, 1700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("1700"), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbCassandraKeyspace_autoscale(t *testing.T) { +func TestAccCosmosDbCassandraKeyspace_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_cassandra_keyspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbCassandraKeyspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbCassandraKeyspace_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbCassandraKeyspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosDbCassandraKeyspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbCassandraKeyspaceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.CassandraClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_cassandra_keyspace" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetCassandraKeyspace(ctx, resourceGroup, account, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos Cassandra Keyspace %s (account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos Cassandra Keyspace %s (account %s) still exists:\n%#v", name, account, resp) - } +func (t CosmosDbCassandraKeyspaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CassandraKeyspaceID(state.ID) + if err != nil { + return nil, err } - return nil -} - -func testCheckAzureRMCosmosDbCassandraKeyspaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.CassandraClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetCassandraKeyspace(ctx, resourceGroup, account, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos database '%s' (account: '%s') does not exist", name, account) - } - - return nil + resp, err := clients.Cosmos.CassandraClient.GetCassandraKeyspace(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Cassandra Keyspace (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbCassandraKeyspace_basic(data acceptance.TestData) string { +func (CosmosDbCassandraKeyspaceResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -181,10 +128,10 @@ resource "azurerm_cosmosdb_cassandra_keyspace" "test" { resource_group_name = azurerm_cosmosdb_account.test.resource_group_name account_name = azurerm_cosmosdb_account.test.name } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), data.RandomInteger) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), data.RandomInteger) } -func testAccAzureRMCosmosDbCassandraKeyspace_throughput(data acceptance.TestData, throughput int) string { +func (CosmosDbCassandraKeyspaceResource) throughput(data acceptance.TestData, throughput int) string { return fmt.Sprintf(` %[1]s @@ -195,10 +142,10 @@ resource "azurerm_cosmosdb_cassandra_keyspace" "test" { throughput = %[3]d } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), data.RandomInteger, throughput) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), data.RandomInteger, throughput) } -func testAccAzureRMCosmosDbCassandraKeyspace_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosDbCassandraKeyspaceResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -210,5 +157,5 @@ resource "azurerm_cosmosdb_cassandra_keyspace" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), data.RandomInteger, maxThroughput) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableCassandra"}), data.RandomInteger, maxThroughput) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource.go b/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource.go index 73f693e3968f..5782ba45bc40 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosGremlinDatabase() *schema.Resource { +func resourceCosmosGremlinDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosGremlinDatabaseCreate, - Update: resourceArmCosmosGremlinDatabaseUpdate, - Read: resourceArmCosmosGremlinDatabaseRead, - Delete: resourceArmCosmosGremlinDatabaseDelete, + Create: resourceCosmosGremlinDatabaseCreate, + Update: resourceCosmosGremlinDatabaseUpdate, + Read: resourceCosmosGremlinDatabaseRead, + Delete: resourceCosmosGremlinDatabaseDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -75,7 +75,7 @@ func resourceArmCosmosGremlinDatabase() *schema.Resource { } } -func resourceArmCosmosGremlinDatabaseCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosGremlinDatabaseCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,10 +136,10 @@ func resourceArmCosmosGremlinDatabaseCreate(d *schema.ResourceData, meta interfa d.SetId(*resp.ID) - return resourceArmCosmosGremlinDatabaseRead(d, meta) + return resourceCosmosGremlinDatabaseRead(d, meta) } -func resourceArmCosmosGremlinDatabaseUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosGremlinDatabaseUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -151,7 +151,7 @@ func resourceArmCosmosGremlinDatabaseUpdate(d *schema.ResourceData, meta interfa err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos Gremlin Database %q (Account: %q) - %+v", id.Name, id.Account, err) + return fmt.Errorf("Error updating Cosmos Gremlin Database %q (Account: %q) - %+v", id.Name, id.DatabaseAccountName, err) } db := documentdb.GremlinDatabaseCreateUpdateParameters{ @@ -163,38 +163,38 @@ func resourceArmCosmosGremlinDatabaseUpdate(d *schema.ResourceData, meta interfa }, } - future, err := client.CreateUpdateGremlinDatabase(ctx, id.ResourceGroup, id.Account, id.Name, db) + future, err := client.CreateUpdateGremlinDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error issuing create/update request for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateGremlinDatabaseThroughput(ctx, id.ResourceGroup, id.Account, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateGremlinDatabaseThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos Gremlin Database %q (Account: %q): %+v - "+ - "If the collection has not been created with and initial throughput, you cannot configure it later.", id.Name, id.Account, err) + "If the collection has not been created with and initial throughput, you cannot configure it later.", id.Name, id.DatabaseAccountName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Gremlin Database %q (Account: %q, Database %q): %+v", id.Name, id.Account, id.Name, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Gremlin Database %q (Account: %q, Database %q): %+v", id.Name, id.DatabaseAccountName, id.Name, err) } } - if _, err = client.GetGremlinDatabase(ctx, id.ResourceGroup, id.Account, id.Name); err != nil { - return fmt.Errorf("Error making get request for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + if _, err = client.GetGremlinDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name); err != nil { + return fmt.Errorf("Error making get request for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } - return resourceArmCosmosGremlinDatabaseRead(d, meta) + return resourceCosmosGremlinDatabaseRead(d, meta) } -func resourceArmCosmosGremlinDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosGremlinDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -204,29 +204,29 @@ func resourceArmCosmosGremlinDatabaseRead(d *schema.ResourceData, meta interface return err } - resp, err := client.GetGremlinDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + resp, err := client.GetGremlinDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos Gremlin Database %q (Account: %q) - removing from state", id.Name, id.Account) + log.Printf("[INFO] Error reading Cosmos Gremlin Database %q (Account: %q) - removing from state", id.Name, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) + d.Set("account_name", id.DatabaseAccountName) if props := resp.GremlinDatabaseGetProperties; props != nil { if res := props.Resource; res != nil { d.Set("name", res.ID) } } - throughputResp, err := client.GetGremlinDatabaseThroughput(ctx, id.ResourceGroup, id.Account, id.Name) + throughputResp, err := client.GetGremlinDatabaseThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Throughput on Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } else { d.Set("throughput", nil) d.Set("autoscale_settings", nil) @@ -238,7 +238,7 @@ func resourceArmCosmosGremlinDatabaseRead(d *schema.ResourceData, meta interface return nil } -func resourceArmCosmosGremlinDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosGremlinDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -248,15 +248,15 @@ func resourceArmCosmosGremlinDatabaseDelete(d *schema.ResourceData, meta interfa return err } - future, err := client.DeleteGremlinDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + future, err := client.DeleteGremlinDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error deleting Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Cosmos Gremlin Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource_test.go index b9e501cbc813..14b4f7013227 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_gremlin_database_resource_test.go @@ -1,180 +1,127 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosGremlinDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_database", "test") +type CosmosGremlinDatabaseResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosGremlinDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosGremlinDatabase_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosGremlinDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_database", "test") + r := CosmosGremlinDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosGremlinDatabase_requiresImport(t *testing.T) { +func TestAccCosmosGremlinDatabase_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosGremlinDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosGremlinDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMCosmosDatabase_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_cosmosdb_gremlin_database"), - }, + r := CosmosGremlinDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_cosmosdb_gremlin_database"), }, }) } -func TestAccAzureRMCosmosGremlinDatabase_complete(t *testing.T) { +func TestAccCosmosGremlinDatabase_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosGremlinDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosGremlinDatabase_complete(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "700"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosGremlinDatabase_complete(data, 1700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "1700"), - ), - }, - data.ImportStep(), + r := CosmosGremlinDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("700"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data, 1700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("1700"), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosGremlinDatabase_autoscale(t *testing.T) { +func TestAccCosmosGremlinDatabase_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosGremlinDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosGremlinDatabase_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosGremlinDatabase_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosGremlinDatabase_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosGremlinDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosGremlinDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), }, + data.ImportStep(), + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosGremlinDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.GremlinClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_gremlin_database" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetGremlinDatabase(ctx, resourceGroup, account, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos Gremlin Database %s (Account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos Gremlin Database %s (Account %s): still exist:\n%#v", name, account, resp) - } +func (t CosmosGremlinDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.GremlinDatabaseID(state.ID) + if err != nil { + return nil, err } - return nil -} - -func testCheckAzureRMCosmosGremlinDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.GremlinClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetGremlinDatabase(ctx, resourceGroup, account, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos database '%s' (Account: '%s') does not exist", name, account) - } - - return nil + resp, err := clients.Cosmos.GremlinClient.GetGremlinDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Gremlin Database (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosGremlinDatabase_basic(data acceptance.TestData) string { +func (CosmosGremlinDatabaseResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -183,10 +130,10 @@ resource "azurerm_cosmosdb_gremlin_database" "test" { resource_group_name = azurerm_cosmosdb_account.test.resource_group_name account_name = azurerm_cosmosdb_account.test.name } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}), data.RandomInteger) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}), data.RandomInteger) } -func testAccAzureRMCosmosDatabase_requiresImport(data acceptance.TestData) string { +func (r CosmosGremlinDatabaseResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -195,10 +142,10 @@ resource "azurerm_cosmosdb_gremlin_database" "import" { resource_group_name = azurerm_cosmosdb_gremlin_database.test.resource_group_name account_name = azurerm_cosmosdb_gremlin_database.test.account_name } -`, testAccAzureRMCosmosGremlinDatabase_basic(data)) +`, r.basic(data)) } -func testAccAzureRMCosmosGremlinDatabase_complete(data acceptance.TestData, throughput int) string { +func (CosmosGremlinDatabaseResource) complete(data acceptance.TestData, throughput int) string { return fmt.Sprintf(` %[1]s @@ -208,10 +155,10 @@ resource "azurerm_cosmosdb_gremlin_database" "test" { account_name = azurerm_cosmosdb_account.test.name throughput = %[3]d } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}), data.RandomInteger, throughput) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}), data.RandomInteger, throughput) } -func testAccAzureRMCosmosGremlinDatabase_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosGremlinDatabaseResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -223,5 +170,5 @@ resource "azurerm_cosmosdb_gremlin_database" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}), data.RandomInteger, maxThroughput) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableGremlin"}), data.RandomInteger, maxThroughput) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource.go b/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource.go index 0e76f7ca76fe..ab33fac24416 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -21,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbGremlinGraph() *schema.Resource { +func resourceCosmosDbGremlinGraph() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbGremlinGraphCreate, - Read: resourceArmCosmosDbGremlinGraphRead, - Update: resourceArmCosmosDbGremlinGraphUpdate, - Delete: resourceArmCosmosDbGremlinGraphDelete, + Create: resourceCosmosDbGremlinGraphCreate, + Read: resourceCosmosDbGremlinGraphRead, + Update: resourceCosmosDbGremlinGraphUpdate, + Delete: resourceCosmosDbGremlinGraphDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -186,7 +186,7 @@ func resourceArmCosmosDbGremlinGraph() *schema.Resource { } } -func resourceArmCosmosDbGremlinGraphCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbGremlinGraphCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -263,10 +263,10 @@ func resourceArmCosmosDbGremlinGraphCreate(d *schema.ResourceData, meta interfac d.SetId(*resp.ID) - return resourceArmCosmosDbGremlinGraphRead(d, meta) + return resourceCosmosDbGremlinGraphRead(d, meta) } -func resourceArmCosmosDbGremlinGraphUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbGremlinGraphUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -278,7 +278,7 @@ func resourceArmCosmosDbGremlinGraphUpdate(d *schema.ResourceData, meta interfac err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error updating Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.GraphName, id.DatabaseAccountName, id.GremlinDatabaseName, err) } partitionkeypaths := d.Get("partition_key_path").(string) @@ -286,7 +286,7 @@ func resourceArmCosmosDbGremlinGraphUpdate(d *schema.ResourceData, meta interfac db := documentdb.GremlinGraphCreateUpdateParameters{ GremlinGraphCreateUpdateProperties: &documentdb.GremlinGraphCreateUpdateProperties{ Resource: &documentdb.GremlinGraphResource{ - ID: &id.Name, + ID: &id.GraphName, IndexingPolicy: expandAzureRmCosmosDbGrelinGraphIndexingPolicy(d), ConflictResolutionPolicy: expandAzureRmCosmosDbGremlinGraphConflicResolutionPolicy(d), }, @@ -306,34 +306,34 @@ func resourceArmCosmosDbGremlinGraphUpdate(d *schema.ResourceData, meta interfac } } - future, err := client.CreateUpdateGremlinGraph(ctx, id.ResourceGroup, id.Account, id.Database, id.Name, db) + future, err := client.CreateUpdateGremlinGraph(ctx, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error issuing create/update request for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.GraphName, id.DatabaseAccountName, id.GremlinDatabaseName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.GraphName, id.DatabaseAccountName, id.GremlinDatabaseName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateGremlinGraphThroughput(ctx, id.ResourceGroup, id.Account, id.Database, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateGremlinGraphThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v - "+ - "If the graph has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, id.Database, err) + "If the graph has not been created with an initial throughput, you cannot configure it later.", id.GraphName, id.DatabaseAccountName, id.GremlinDatabaseName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Gremlin Graph %q (Account: %q, Database: %q): %+v", id.GraphName, id.DatabaseAccountName, id.GremlinDatabaseName, err) } } - return resourceArmCosmosDbGremlinGraphRead(d, meta) + return resourceCosmosDbGremlinGraphRead(d, meta) } -func resourceArmCosmosDbGremlinGraphRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbGremlinGraphRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -343,21 +343,21 @@ func resourceArmCosmosDbGremlinGraphRead(d *schema.ResourceData, meta interface{ return err } - resp, err := client.GetGremlinGraph(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + resp, err := client.GetGremlinGraph(ctx, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos Gremlin Graph %q (Account: %q) - removing from state", id.Name, id.Account) + log.Printf("[INFO] Error reading Cosmos Gremlin Graph %q (Account: %q) - removing from state", id.GraphName, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos Gremlin Graph %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Cosmos Gremlin Graph %q (Account: %q): %+v", id.GraphName, id.DatabaseAccountName, err) } - d.Set("name", id.Name) + d.Set("name", id.GraphName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) - d.Set("database_name", id.Database) + d.Set("account_name", id.DatabaseAccountName) + d.Set("database_name", id.GremlinDatabaseName) if graphProperties := resp.GremlinGraphGetProperties; graphProperties != nil { if props := graphProperties.Resource; props != nil { @@ -391,10 +391,10 @@ func resourceArmCosmosDbGremlinGraphRead(d *schema.ResourceData, meta interface{ } } - throughputResp, err := client.GetGremlinGraphThroughput(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + throughputResp, err := client.GetGremlinGraphThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName) if err != nil { if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Gremlin Graph %q (Account: %q, Database: %q) ID: %v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error reading Throughput on Gremlin Graph %q (Account: %q, Database: %q) ID: %v", id.GraphName, id.DatabaseAccountName, id.GremlinDatabaseName, err) } else { d.Set("throughput", nil) d.Set("autoscale_settings", nil) @@ -406,7 +406,7 @@ func resourceArmCosmosDbGremlinGraphRead(d *schema.ResourceData, meta interface{ return nil } -func resourceArmCosmosDbGremlinGraphDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbGremlinGraphDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.GremlinClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -416,15 +416,15 @@ func resourceArmCosmosDbGremlinGraphDelete(d *schema.ResourceData, meta interfac return err } - future, err := client.DeleteGremlinGraph(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + future, err := client.DeleteGremlinGraph(ctx, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos Gremlin Graph %q (Account: %q): %+v", id.Database, id.Name, err) + return fmt.Errorf("Error deleting Cosmos Gremlin Graph %q (Account: %q): %+v", id.GremlinDatabaseName, id.GraphName, err) } } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on delete future for Comos Gremlin Graph %q (Account: %q): %+v", id.Database, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Comos Gremlin Graph %q (Account: %q): %+v", id.GremlinDatabaseName, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource_test.go index dc99084da329..8420cdd707b1 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_gremlin_graph_resource_test.go @@ -1,219 +1,156 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbGremlinGraph_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") +type CosmosGremlinGraphResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbGremlinGraphDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbGremlinGraph_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosDbGremlinGraph_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") + r := CosmosGremlinGraphResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbGremlinGraph_requiresImport(t *testing.T) { +func TestAccCosmosDbGremlinGraph_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbGremlinGraphDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbGremlinGraph_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMCosmosDbGremlinGraph_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_cosmosdb_gremlin_graph"), - }, + r := CosmosGremlinGraphResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_cosmosdb_gremlin_graph"), }, }) } -func TestAccAzureRMCosmosDbGremlinGraph_customConflictResolutionPolicy(t *testing.T) { +func TestAccCosmosDbGremlinGraph_customConflictResolutionPolicy(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbGremlinGraphDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbGremlinGraph_customConflictResolutionPolicy(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - ), - }, - data.ImportStep(), + r := CosmosGremlinGraphResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customConflictResolutionPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbGremlinGraph_indexPolicy(t *testing.T) { +func TestAccCosmosDbGremlinGraph_indexPolicy(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbGremlinGraphDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbGremlinGraph_indexPolicy(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - ), - }, - data.ImportStep(), + r := CosmosGremlinGraphResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.indexPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbGremlinGraph_update(t *testing.T) { +func TestAccCosmosDbGremlinGraph_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbGremlinGraphDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbGremlinGraph_update(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "700"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbGremlinGraph_update(data, 1700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "1700"), - ), - }, - data.ImportStep(), + r := CosmosGremlinGraphResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.update(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("700"), + ), }, + data.ImportStep(), + { + Config: r.update(data, 1700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("1700"), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbGremlinGraph_autoscale(t *testing.T) { +func TestAccCosmosDbGremlinGraph_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_gremlin_graph", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbGremlinGraphDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbGremlinGraph_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbGremlinGraph_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbGremlinGraph_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRmCosmosDbGremlinGraphExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosGremlinGraphResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), }, + data.ImportStep(), + { + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbGremlinGraphDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.GremlinClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_gremlin_graph" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - database := rs.Primary.Attributes["database_name"] - - resp, err := client.GetGremlinGraph(ctx, resourceGroup, account, database, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos Gremlin Graph %s (Account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos Gremlin Graph %s (Account %s) still exists:\n%#v", name, account, resp) - } +func (t CosmosGremlinGraphResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.GremlinGraphID(state.ID) + if err != nil { + return nil, err } - return nil -} - -func testCheckAzureRmCosmosDbGremlinGraphExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.GremlinClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not fount: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - database := rs.Primary.Attributes["database_name"] - - resp, err := client.GetGremlinGraph(ctx, resourceGroup, account, database, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos Graph '%s' (Account: '%s') does not exist", name, account) - } - return nil + resp, err := clients.Cosmos.GremlinClient.GetGremlinGraph(ctx, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Gremlin Graph (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbGremlinGraph_basic(data acceptance.TestData) string { +func (CosmosGremlinGraphResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -236,10 +173,10 @@ resource "azurerm_cosmosdb_gremlin_graph" "test" { conflict_resolution_path = "/_ts" } } -`, testAccAzureRMCosmosGremlinDatabase_basic(data), data.RandomInteger) +`, CosmosGremlinDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbGremlinGraph_requiresImport(data acceptance.TestData) string { +func (r CosmosGremlinGraphResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -261,10 +198,10 @@ resource "azurerm_cosmosdb_gremlin_graph" "import" { conflict_resolution_path = "/_ts" } } -`, testAccAzureRMCosmosDbGremlinGraph_basic(data)) +`, r.basic(data)) } -func testAccAzureRMCosmosDbGremlinGraph_customConflictResolutionPolicy(data acceptance.TestData) string { +func (CosmosGremlinGraphResource) customConflictResolutionPolicy(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -287,10 +224,10 @@ resource "azurerm_cosmosdb_gremlin_graph" "test" { conflict_resolution_procedure = "dbs/{0}/colls/{1}/sprocs/{2}" } } -`, testAccAzureRMCosmosGremlinDatabase_basic(data), data.RandomInteger) +`, CosmosGremlinDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbGremlinGraph_indexPolicy(data acceptance.TestData) string { +func (CosmosGremlinGraphResource) indexPolicy(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -311,10 +248,10 @@ resource "azurerm_cosmosdb_gremlin_graph" "test" { conflict_resolution_path = "/_ts" } } -`, testAccAzureRMCosmosGremlinDatabase_basic(data), data.RandomInteger) +`, CosmosGremlinDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbGremlinGraph_update(data acceptance.TestData, throughput int) string { +func (CosmosGremlinGraphResource) update(data acceptance.TestData, throughput int) string { return fmt.Sprintf(` %[1]s @@ -342,10 +279,10 @@ resource "azurerm_cosmosdb_gremlin_graph" "test" { paths = ["/definition/id1", "/definition/id2"] } } -`, testAccAzureRMCosmosGremlinDatabase_basic(data), data.RandomInteger, throughput) +`, CosmosGremlinDatabaseResource{}.basic(data), data.RandomInteger, throughput) } -func testAccAzureRMCosmosDbGremlinGraph_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosGremlinGraphResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -372,5 +309,5 @@ resource "azurerm_cosmosdb_gremlin_graph" "test" { conflict_resolution_path = "/_ts" } } -`, testAccAzureRMCosmosGremlinDatabase_basic(data), data.RandomInteger, maxThroughput) +`, CosmosGremlinDatabaseResource{}.basic(data), data.RandomInteger, maxThroughput) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource.go b/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource.go index 5db8e30c5ace..f174fcae46b8 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbMongoCollection() *schema.Resource { +func resourceCosmosDbMongoCollection() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbMongoCollectionCreate, - Read: resourceArmCosmosDbMongoCollectionRead, - Update: resourceArmCosmosDbMongoCollectionUpdate, - Delete: resourceArmCosmosDbMongoCollectionDelete, + Create: resourceCosmosDbMongoCollectionCreate, + Read: resourceCosmosDbMongoCollectionRead, + Update: resourceCosmosDbMongoCollectionUpdate, + Delete: resourceCosmosDbMongoCollectionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -137,7 +137,7 @@ func resourceArmCosmosDbMongoCollection() *schema.Resource { } } -func resourceArmCosmosDbMongoCollectionCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoCollectionCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -211,22 +211,22 @@ func resourceArmCosmosDbMongoCollectionCreate(d *schema.ResourceData, meta inter d.SetId(*resp.ID) - return resourceArmCosmosDbMongoCollectionRead(d, meta) + return resourceCosmosDbMongoCollectionRead(d, meta) } -func resourceArmCosmosDbMongoCollectionUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoCollectionUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MongoDbCollectionID(d.Id()) + id, err := parse.MongodbCollectionID(d.Id()) if err != nil { return err } err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error updating Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } var ttl *int @@ -237,7 +237,7 @@ func resourceArmCosmosDbMongoCollectionUpdate(d *schema.ResourceData, meta inter db := documentdb.MongoDBCollectionCreateUpdateParameters{ MongoDBCollectionCreateUpdateProperties: &documentdb.MongoDBCollectionCreateUpdateProperties{ Resource: &documentdb.MongoDBCollectionResource{ - ID: &id.Name, + ID: &id.CollectionName, Indexes: expandCosmosMongoCollectionIndex(d.Get("index").(*schema.Set).List(), ttl), }, Options: &documentdb.CreateUpdateOptions{}, @@ -250,57 +250,63 @@ func resourceArmCosmosDbMongoCollectionUpdate(d *schema.ResourceData, meta inter } } - future, err := client.CreateUpdateMongoDBCollection(ctx, id.ResourceGroup, id.Account, id.Database, id.Name, db) + future, err := client.CreateUpdateMongoDBCollection(ctx, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error issuing create/update request for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateMongoDBCollectionThroughput(ctx, id.ResourceGroup, id.Account, id.Database, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateMongoDBCollectionThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos MongoDB Collection %q (Account: %q, Database: %q): %+v - "+ - "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, id.Database, err) + "If the collection has not been created with an initial throughput, you cannot configure it later.", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } } - return resourceArmCosmosDbMongoCollectionRead(d, meta) + return resourceCosmosDbMongoCollectionRead(d, meta) } -func resourceArmCosmosDbMongoCollectionRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoCollectionRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient + accClient := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MongoDbCollectionID(d.Id()) + id, err := parse.MongodbCollectionID(d.Id()) if err != nil { return err } - resp, err := client.GetMongoDBCollection(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + resp, err := client.GetMongoDBCollection(ctx, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos Mongo Collection %q (Account: %q, Database: %q)", id.Name, id.Account, id.Database) + log.Printf("[INFO] Error reading Cosmos Mongo Collection %q (Account: %q, Database: %q)", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error reading Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) - d.Set("database_name", id.Database) + d.Set("account_name", id.DatabaseAccountName) + d.Set("database_name", id.MongodbDatabaseName) + + accResp, err := accClient.Get(ctx, id.ResourceGroup, id.DatabaseAccountName) + if err != nil { + return fmt.Errorf("reading Cosmos Account %q : %+v", id.DatabaseAccountName, err) + } if props := resp.MongoDBCollectionGetProperties; props != nil { if res := props.Resource; res != nil { d.Set("name", res.ID) @@ -313,8 +319,18 @@ func resourceArmCosmosDbMongoCollectionRead(d *schema.ResourceData, meta interfa for k := range res.ShardKey { d.Set("shard_key", k) } + accountIsVersion36 := false + if accProps := accResp.DatabaseAccountGetProperties; accProps != nil { + if capabilities := accProps.Capabilities; capabilities != nil { + for _, v := range *capabilities { + if v.Name != nil && *v.Name == "EnableMongo" { + accountIsVersion36 = true + } + } + } + } - indexes, systemIndexes, ttl := flattenCosmosMongoCollectionIndex(res.Indexes) + indexes, systemIndexes, ttl := flattenCosmosMongoCollectionIndex(res.Indexes, accountIsVersion36) if err := d.Set("default_ttl_seconds", ttl); err != nil { return fmt.Errorf("failed to set `default_ttl_seconds`: %+v", err) } @@ -327,10 +343,10 @@ func resourceArmCosmosDbMongoCollectionRead(d *schema.ResourceData, meta interfa } } - throughputResp, err := client.GetMongoDBCollectionThroughput(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + throughputResp, err := client.GetMongoDBCollectionThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName) if err != nil { if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error reading Throughput on Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } else { d.Set("throughput", nil) d.Set("autoscale_settings", nil) @@ -342,26 +358,26 @@ func resourceArmCosmosDbMongoCollectionRead(d *schema.ResourceData, meta interfa return nil } -func resourceArmCosmosDbMongoCollectionDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoCollectionDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MongoDbCollectionID(d.Id()) + id, err := parse.MongodbCollectionID(d.Id()) if err != nil { return err } - future, err := client.DeleteMongoDBCollection(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + future, err := client.DeleteMongoDBCollection(ctx, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error deleting Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on delete future for Cosmos Mongo Collection %q (Account: %q, Database: %q): %+v", id.CollectionName, id.DatabaseAccountName, id.MongodbDatabaseName, err) } return nil @@ -399,7 +415,7 @@ func expandCosmosMongoCollectionIndex(indexes []interface{}, defaultTtl *int) *[ return &results } -func flattenCosmosMongoCollectionIndex(input *[]documentdb.MongoIndex) (*[]map[string]interface{}, *[]map[string]interface{}, *int32) { +func flattenCosmosMongoCollectionIndex(input *[]documentdb.MongoIndex, accountIsVersion36 bool) (*[]map[string]interface{}, *[]map[string]interface{}, *int32) { indexes := make([]map[string]interface{}, 0) systemIndexes := make([]map[string]interface{}, 0) var ttl *int32 @@ -422,6 +438,13 @@ func flattenCosmosMongoCollectionIndex(input *[]documentdb.MongoIndex) (*[]map[s systemIndex["unique"] = true systemIndexes = append(systemIndexes, systemIndex) + + if accountIsVersion36 { + index["keys"] = utils.FlattenStringSlice(v.Key.Keys) + index["unique"] = true + indexes = append(indexes, index) + } + case "DocumentDBDefaultIndex": // Updating system index `DocumentDBDefaultIndex` is not a supported scenario. systemIndex["keys"] = utils.FlattenStringSlice(v.Key.Keys) diff --git a/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource_test.go index baa182722bae..c61415262e09 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_mongo_collection_resource_test.go @@ -1,243 +1,196 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbMongoCollection_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") +type CosmosMongoCollectionResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoCollectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoCollection_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "400"), - ), - }, - data.ImportStep(), +func TestAccCosmosDbMongoCollection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("400"), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoCollection_complete(t *testing.T) { +func TestAccCosmosDbMongoCollection_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoCollectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoCollection_complete(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "shard_key", "seven"), - resource.TestCheckResourceAttr(data.ResourceName, "default_ttl_seconds", "707"), - ), - }, - data.ImportStep(), + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("shard_key").HasValue("seven"), + check.That(data.ResourceName).Key("default_ttl_seconds").HasValue("707"), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoCollection_update(t *testing.T) { +func TestAccCosmosDbMongoCollection_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoCollectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoCollection_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMCosmosDbMongoCollection_complete(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "shard_key", "seven"), - resource.TestCheckResourceAttr(data.ResourceName, "default_ttl_seconds", "707"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoCollection_updated(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "default_ttl_seconds", "70707"), - ), - }, - data.ImportStep(), + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("shard_key").HasValue("seven"), + check.That(data.ResourceName).Key("default_ttl_seconds").HasValue("707"), + ), }, + data.ImportStep(), + { + Config: r.updated(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("default_ttl_seconds").HasValue("70707"), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoCollection_throughput(t *testing.T) { +func TestAccCosmosDbMongoCollection_throughput(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoCollectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoCollection_throughput(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoCollection_throughput(data, 1400), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoCollection_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.throughput(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.throughput(data, 1400), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoCollection_withIndex(t *testing.T) { +func TestAccCosmosDbMongoCollection_withIndex(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoCollectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoCollection_withIndex(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "default_ttl_seconds", "707"), - resource.TestCheckResourceAttr(data.ResourceName, "index.#", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "system_indexes.#", "2"), - ), - }, - data.ImportStep(), + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withIndex(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("default_ttl_seconds").HasValue("707"), + check.That(data.ResourceName).Key("index.#").HasValue("3"), + check.That(data.ResourceName).Key("system_indexes.#").HasValue("2"), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoCollection_autoscale(t *testing.T) { +func TestAccCosmosDbMongoCollection_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoCollectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoCollection_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoCollection_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoCollection_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbMongoCollectionDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.MongoDbClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_mongo_collection" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - database := rs.Primary.Attributes["database_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetMongoDBCollection(ctx, resourceGroup, account, database, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos Mongo Collection %s (account %s, database %s) still exists:\n%v", name, account, database, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos Mongo Collection %s (account %s) still exists:\n%#v", name, account, resp) - } - } - - return nil +func TestAccCosmosDbMongoCollection_ver36(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_collection", "test") + r := CosmosMongoCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.ver36(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) } -func testCheckAzureRMCosmosDbMongoCollectionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.MongoDbClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - database := rs.Primary.Attributes["database_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetMongoDBCollection(ctx, resourceGroup, account, database, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos database '%s' (account: '%s', database: %s) does not exist", name, account, database) - } +func (t CosmosMongoCollectionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.MongodbCollectionID(state.ID) + if err != nil { + return nil, err + } - return nil + resp, err := clients.Cosmos.MongoDbClient.GetMongoDBCollection(ctx, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Mongo Collection (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbMongoCollection_basic(data acceptance.TestData) string { +func (CosmosMongoCollectionResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -247,10 +200,10 @@ resource "azurerm_cosmosdb_mongo_collection" "test" { account_name = azurerm_cosmosdb_mongo_database.test.account_name database_name = azurerm_cosmosdb_mongo_database.test.name } -`, testAccAzureRMCosmosDbMongoDatabase_basic(data), data.RandomInteger) +`, CosmosMongoDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbMongoCollection_complete(data acceptance.TestData) string { +func (CosmosMongoCollectionResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -263,10 +216,10 @@ resource "azurerm_cosmosdb_mongo_collection" "test" { shard_key = "seven" default_ttl_seconds = 707 } -`, testAccAzureRMCosmosDbMongoDatabase_basic(data), data.RandomInteger) +`, CosmosMongoDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbMongoCollection_updated(data acceptance.TestData) string { +func (CosmosMongoCollectionResource) updated(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -279,10 +232,10 @@ resource "azurerm_cosmosdb_mongo_collection" "test" { shard_key = "seven" default_ttl_seconds = 70707 } -`, testAccAzureRMCosmosDbMongoDatabase_basic(data), data.RandomInteger) +`, CosmosMongoDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbMongoCollection_throughput(data acceptance.TestData, throughput int) string { +func (CosmosMongoCollectionResource) throughput(data acceptance.TestData, throughput int) string { return fmt.Sprintf(` %[1]s @@ -294,10 +247,10 @@ resource "azurerm_cosmosdb_mongo_collection" "test" { throughput = %[3]d } -`, testAccAzureRMCosmosDbMongoDatabase_basic(data), data.RandomInteger, throughput) +`, CosmosMongoDatabaseResource{}.basic(data), data.RandomInteger, throughput) } -func testAccAzureRMCosmosDbMongoCollection_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosMongoCollectionResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -312,10 +265,10 @@ resource "azurerm_cosmosdb_mongo_collection" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDbMongoDatabase_basic(data), data.RandomInteger, maxThroughput) +`, CosmosMongoDatabaseResource{}.basic(data), data.RandomInteger, maxThroughput) } -func testAccAzureRMCosmosDbMongoCollection_withIndex(data acceptance.TestData) string { +func (CosmosMongoCollectionResource) withIndex(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -341,5 +294,29 @@ resource "azurerm_cosmosdb_mongo_collection" "test" { keys = ["month"] } } -`, testAccAzureRMCosmosDbMongoDatabase_basic(data), data.RandomInteger) +`, CosmosMongoDatabaseResource{}.basic(data), data.RandomInteger) +} + +func (CosmosMongoCollectionResource) ver36(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_cosmosdb_mongo_database" "test" { + name = "acctest-%[2]d" + resource_group_name = azurerm_cosmosdb_account.test.resource_group_name + account_name = azurerm_cosmosdb_account.test.name +} + +resource "azurerm_cosmosdb_mongo_collection" "test" { + name = "acctest-%[2]d" + resource_group_name = azurerm_cosmosdb_mongo_database.test.resource_group_name + account_name = azurerm_cosmosdb_mongo_database.test.account_name + database_name = azurerm_cosmosdb_mongo_database.test.name + + index { + keys = ["_id"] + unique = true + } +} +`, CosmosDBAccountResource{}.capabilities(data, documentdb.MongoDB, []string{"EnableMongo"}), data.RandomInteger) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource.go b/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource.go index 9972ef2a68e5..9f9238affb0a 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbMongoDatabase() *schema.Resource { +func resourceCosmosDbMongoDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbMongoDatabaseCreate, - Update: resourceArmCosmosDbMongoDatabaseUpdate, - Read: resourceArmCosmosDbMongoDatabaseRead, - Delete: resourceArmCosmosDbMongoDatabaseDelete, + Create: resourceCosmosDbMongoDatabaseCreate, + Update: resourceCosmosDbMongoDatabaseUpdate, + Read: resourceCosmosDbMongoDatabaseRead, + Delete: resourceCosmosDbMongoDatabaseDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -75,7 +75,7 @@ func resourceArmCosmosDbMongoDatabase() *schema.Resource { } } -func resourceArmCosmosDbMongoDatabaseCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoDatabaseCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,22 +136,22 @@ func resourceArmCosmosDbMongoDatabaseCreate(d *schema.ResourceData, meta interfa d.SetId(*resp.ID) - return resourceArmCosmosDbMongoDatabaseRead(d, meta) + return resourceCosmosDbMongoDatabaseRead(d, meta) } -func resourceArmCosmosDbMongoDatabaseUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoDatabaseUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MongoDbDatabaseID(d.Id()) + id, err := parse.MongodbDatabaseID(d.Id()) if err != nil { return err } err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos Mongo Database %q (Account: %q) - %+v", id.Name, id.Account, err) + return fmt.Errorf("Error updating Cosmos Mongo Database %q (Account: %q) - %+v", id.Name, id.DatabaseAccountName, err) } db := documentdb.MongoDBDatabaseCreateUpdateParameters{ @@ -163,102 +163,124 @@ func resourceArmCosmosDbMongoDatabaseUpdate(d *schema.ResourceData, meta interfa }, } - future, err := client.CreateUpdateMongoDBDatabase(ctx, id.ResourceGroup, id.Account, id.Name, db) + future, err := client.CreateUpdateMongoDBDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error issuing create/update request for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateMongoDBDatabaseThroughput(ctx, id.ResourceGroup, id.Account, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateMongoDBDatabaseThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos MongoDB Database %q (Account: %q): %+v - "+ - "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, err) + "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.DatabaseAccountName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Mongo Database %q (Account: %q, Database %q): %+v", id.Name, id.Account, id.Name, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Mongo Database %q (Account: %q, Database %q): %+v", id.Name, id.DatabaseAccountName, id.Name, err) } } - _, err = client.GetMongoDBDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + _, err = client.GetMongoDBDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { - return fmt.Errorf("Error making get request for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error making get request for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } - return resourceArmCosmosDbMongoDatabaseRead(d, meta) + return resourceCosmosDbMongoDatabaseRead(d, meta) } -func resourceArmCosmosDbMongoDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient + accountClient := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MongoDbDatabaseID(d.Id()) + id, err := parse.MongodbDatabaseID(d.Id()) if err != nil { return err } - resp, err := client.GetMongoDBDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + resp, err := client.GetMongoDBDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos Mongo Database %q (Account: %q) - removing from state", id.Name, id.Account) + log.Printf("[INFO] Error reading Cosmos Mongo Database %q (Account: %q) - removing from state", id.Name, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) + d.Set("account_name", id.DatabaseAccountName) if props := resp.MongoDBDatabaseGetProperties; props != nil { if res := props.Resource; res != nil { d.Set("name", res.ID) } } - throughputResp, err := client.GetMongoDBDatabaseThroughput(ctx, id.ResourceGroup, id.Account, id.Name) + accResp, err := accountClient.Get(ctx, id.ResourceGroup, id.DatabaseAccountName) if err != nil { - if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) - } else { - d.Set("throughput", nil) - d.Set("autoscale_settings", nil) + return fmt.Errorf("reading CosmosDB Account %q (Resource Group %q): %+v", id.DatabaseAccountName, id.ResourceGroup, err) + } + + if accResp.ID == nil || *accResp.ID == "" { + return fmt.Errorf("cosmosDB Account %q (Resource Group %q) ID is empty or nil", id.DatabaseAccountName, id.ResourceGroup) + } + + // if the cosmos Account is serverless, it could not call the get throughput api + if props := accResp.DatabaseAccountGetProperties; props != nil && props.Capabilities != nil { + serverless := false + for _, v := range *props.Capabilities { + if *v.Name == "EnableServerless" { + serverless = true + } + } + + if !serverless { + throughputResp, err := client.GetMongoDBDatabaseThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(throughputResp.Response) { + return fmt.Errorf("Error reading Throughput on Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) + } else { + d.Set("throughput", nil) + d.Set("autoscale_settings", nil) + } + } else { + common.SetResourceDataThroughputFromResponse(throughputResp, d) + } } - } else { - common.SetResourceDataThroughputFromResponse(throughputResp, d) } return nil } -func resourceArmCosmosDbMongoDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbMongoDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.MongoDbClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MongoDbDatabaseID(d.Id()) + id, err := parse.MongodbDatabaseID(d.Id()) if err != nil { return err } - future, err := client.DeleteMongoDBDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + future, err := client.DeleteMongoDBDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error deleting Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Cosmos Mongo Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource_test.go index 6e93843d3670..8503fbfd803f 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_mongo_database_resource_test.go @@ -1,149 +1,115 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbMongoDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_database", "test") +type CosmosMongoDatabaseResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoDatabase_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosDbMongoDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_database", "test") + r := CosmosMongoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoDatabase_complete(t *testing.T) { +func TestAccCosmosDbMongoDatabase_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoDatabase_complete(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), + r := CosmosMongoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbMongoDatabase_autoscale(t *testing.T) { +func TestAccCosmosDbMongoDatabase_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbMongoDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbMongoDatabase_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoDatabase_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbMongoDatabase_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbMongoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosMongoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbMongoDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.MongoDbClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_mongo_database" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetMongoDBDatabase(ctx, resourceGroup, account, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos Mongo Database %s (account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos Mongo Database %s (account %s) still exists:\n%#v", name, account, resp) - } - } - - return nil +func TestAccCosmosDbMongoDatabase_serverless(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_mongo_database", "test") + r := CosmosMongoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serverless(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) } -func testCheckAzureRMCosmosDbMongoDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.MongoDbClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetMongoDBDatabase(ctx, resourceGroup, account, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos database '%s' (account: '%s') does not exist", name, account) - } +func (t CosmosMongoDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.MongodbDatabaseID(state.ID) + if err != nil { + return nil, err + } - return nil + resp, err := clients.Cosmos.MongoDbClient.GetMongoDBDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Mongo Database (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbMongoDatabase_basic(data acceptance.TestData) string { +func (CosmosMongoDatabaseResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -152,10 +118,10 @@ resource "azurerm_cosmosdb_mongo_database" "test" { resource_group_name = azurerm_cosmosdb_account.test.resource_group_name account_name = azurerm_cosmosdb_account.test.name } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.MongoDB, documentdb.Strong), data.RandomInteger) +`, CosmosDBAccountResource{}.basic(data, documentdb.MongoDB, documentdb.Strong), data.RandomInteger) } -func testAccAzureRMCosmosDbMongoDatabase_complete(data acceptance.TestData) string { +func (CosmosMongoDatabaseResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -165,10 +131,10 @@ resource "azurerm_cosmosdb_mongo_database" "test" { account_name = azurerm_cosmosdb_account.test.name throughput = 700 } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.MongoDB, documentdb.Strong), data.RandomInteger) +`, CosmosDBAccountResource{}.basic(data, documentdb.MongoDB, documentdb.Strong), data.RandomInteger) } -func testAccAzureRMCosmosDbMongoDatabase_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosMongoDatabaseResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -180,5 +146,17 @@ resource "azurerm_cosmosdb_mongo_database" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.MongoDB, documentdb.Strong), data.RandomInteger, maxThroughput) +`, CosmosDBAccountResource{}.basic(data, documentdb.MongoDB, documentdb.Strong), data.RandomInteger, maxThroughput) +} + +func (CosmosMongoDatabaseResource) serverless(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_cosmosdb_mongo_database" "test" { + name = "acctest-%[2]d" + resource_group_name = azurerm_cosmosdb_account.test.resource_group_name + account_name = azurerm_cosmosdb_account.test.name +} +`, CosmosDBAccountResource{}.capabilities(data, documentdb.MongoDB, []string{"EnableServerless", "mongoEnableDocLevelTTL", "EnableMongo"}), data.RandomInteger) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource.go b/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource.go index 8a9aa8d0ddee..a50958097bae 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbSQLContainer() *schema.Resource { +func resourceCosmosDbSQLContainer() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbSQLContainerCreate, - Read: resourceArmCosmosDbSQLContainerRead, - Update: resourceArmCosmosDbSQLContainerUpdate, - Delete: resourceArmCosmosDbSQLContainerDelete, + Create: resourceCosmosDbSQLContainerCreate, + Read: resourceCosmosDbSQLContainerRead, + Update: resourceCosmosDbSQLContainerUpdate, + Delete: resourceCosmosDbSQLContainerDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -78,6 +78,13 @@ func resourceArmCosmosDbSQLContainer() *schema.Resource { ValidateFunc: validation.StringIsNotEmpty, }, + "partition_key_version": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(1, 2), + }, + "throughput": { Type: schema.TypeInt, Optional: true, @@ -117,7 +124,7 @@ func resourceArmCosmosDbSQLContainer() *schema.Resource { } } -func resourceArmCosmosDbSQLContainerCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLContainerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -162,6 +169,10 @@ func resourceArmCosmosDbSQLContainerCreate(d *schema.ResourceData, meta interfac Paths: &[]string{partitionkeypaths}, Kind: documentdb.PartitionKindHash, } + + if partitionKeyVersion, ok := d.GetOk("partition_key_version"); ok { + db.SQLContainerCreateUpdateProperties.Resource.PartitionKey.Version = utils.Int32(int32(partitionKeyVersion.(int))) + } } if keys := expandCosmosSQLContainerUniqueKeys(d.Get("unique_key").(*schema.Set)); keys != nil { @@ -204,10 +215,10 @@ func resourceArmCosmosDbSQLContainerCreate(d *schema.ResourceData, meta interfac d.SetId(*resp.ID) - return resourceArmCosmosDbSQLContainerRead(d, meta) + return resourceCosmosDbSQLContainerRead(d, meta) } -func resourceArmCosmosDbSQLContainerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLContainerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -219,7 +230,7 @@ func resourceArmCosmosDbSQLContainerUpdate(d *schema.ResourceData, meta interfac err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error updating Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) } partitionkeypaths := d.Get("partition_key_path").(string) @@ -227,13 +238,13 @@ func resourceArmCosmosDbSQLContainerUpdate(d *schema.ResourceData, meta interfac indexingPolicy := common.ExpandAzureRmCosmosDbIndexingPolicy(d) err = common.ValidateAzureRmCosmosDbIndexingPolicy(indexingPolicy) if err != nil { - return fmt.Errorf("Error updating Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error updating Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) } db := documentdb.SQLContainerCreateUpdateParameters{ SQLContainerCreateUpdateProperties: &documentdb.SQLContainerCreateUpdateProperties{ Resource: &documentdb.SQLContainerResource{ - ID: &id.Name, + ID: &id.ContainerName, IndexingPolicy: indexingPolicy, }, Options: &documentdb.CreateUpdateOptions{}, @@ -245,6 +256,10 @@ func resourceArmCosmosDbSQLContainerUpdate(d *schema.ResourceData, meta interfac Paths: &[]string{partitionkeypaths}, Kind: documentdb.PartitionKindHash, } + + if partitionKeyVersion, ok := d.GetOk("partition_key_version"); ok { + db.SQLContainerCreateUpdateProperties.Resource.PartitionKey.Version = utils.Int32(int32(partitionKeyVersion.(int))) + } } if keys := expandCosmosSQLContainerUniqueKeys(d.Get("unique_key").(*schema.Set)); keys != nil { @@ -257,35 +272,36 @@ func resourceArmCosmosDbSQLContainerUpdate(d *schema.ResourceData, meta interfac db.SQLContainerCreateUpdateProperties.Resource.DefaultTTL = utils.Int32(int32(defaultTTL.(int))) } - future, err := client.CreateUpdateSQLContainer(ctx, id.ResourceGroup, id.Account, id.Database, id.Name, db) + future, err := client.CreateUpdateSQLContainer(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error issuing create/update request for Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos SQL Container %q (Account: %q, Database: %q): %+v", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateSQLContainerThroughput(ctx, id.ResourceGroup, id.Account, id.Database, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateSQLContainerThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos SQL Container %q (Account: %q, Database: %q): %+v - "+ - "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, id.Database, err) + "If the collection has not been created with an initial throughput, you cannot configure it later.", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Container %q (Account: %q, Database: %q): %+v", id.Name, id.Account, id.Database, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Container %q (Account: %q, Database: %q): %+v", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) } } - return resourceArmCosmosDbSQLContainerRead(d, meta) + return resourceCosmosDbSQLContainerRead(d, meta) } -func resourceArmCosmosDbSQLContainerRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLContainerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient + accountClient := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -294,21 +310,21 @@ func resourceArmCosmosDbSQLContainerRead(d *schema.ResourceData, meta interface{ return err } - resp, err := client.GetSQLContainer(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + resp, err := client.GetSQLContainer(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos SQL Container %q (Account: %q) - removing from state", id.Database, id.Name) + log.Printf("[INFO] Error reading Cosmos SQL Container %q (Account: %q) - removing from state", id.SqlDatabaseName, id.ContainerName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos SQL Container %q (Account: %q): %+v", id.Database, id.Name, err) + return fmt.Errorf("Error reading Cosmos SQL Container %q (Account: %q): %+v", id.SqlDatabaseName, id.ContainerName, err) } - d.Set("name", id.Name) + d.Set("name", id.ContainerName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) - d.Set("database_name", id.Database) + d.Set("account_name", id.DatabaseAccountName) + d.Set("database_name", id.SqlDatabaseName) if props := resp.SQLContainerGetProperties; props != nil { if res := props.Resource; res != nil { @@ -320,6 +336,9 @@ func resourceArmCosmosDbSQLContainerRead(d *schema.ResourceData, meta interface{ d.Set("partition_key_path", (*paths)[0]) } } + if version := pk.Version; version != nil { + d.Set("partition_key_version", version) + } } if ukp := res.UniqueKeyPolicy; ukp != nil { @@ -338,22 +357,42 @@ func resourceArmCosmosDbSQLContainerRead(d *schema.ResourceData, meta interface{ } } - throughputResp, err := client.GetSQLContainerThroughput(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + accResp, err := accountClient.Get(ctx, id.ResourceGroup, id.DatabaseAccountName) if err != nil { - if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos SQL Container %s (Account: %q, Database: %q) ID: %v", id.Name, id.Account, id.Database, err) - } else { - d.Set("throughput", nil) - d.Set("autoscale_settings", nil) + return fmt.Errorf("reading CosmosDB Account %q (Resource Group %q): %+v", id.DatabaseAccountName, id.ResourceGroup, err) + } + + if accResp.ID == nil || *accResp.ID == "" { + return fmt.Errorf("cosmosDB Account %q (Resource Group %q) ID is empty or nil", id.DatabaseAccountName, id.ResourceGroup) + } + + if props := accResp.DatabaseAccountGetProperties; props != nil && props.Capabilities != nil { + serverless := false + for _, v := range *props.Capabilities { + if *v.Name == "EnableServerless" { + serverless = true + } + } + + if !serverless { + throughputResp, err := client.GetSQLContainerThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName) + if err != nil { + if !utils.ResponseWasNotFound(throughputResp.Response) { + return fmt.Errorf("Error reading Throughput on Cosmos SQL Container %s (Account: %q, Database: %q) ID: %v", id.ContainerName, id.DatabaseAccountName, id.SqlDatabaseName, err) + } else { + d.Set("throughput", nil) + d.Set("autoscale_settings", nil) + } + } else { + common.SetResourceDataThroughputFromResponse(throughputResp, d) + } } - } else { - common.SetResourceDataThroughputFromResponse(throughputResp, d) } return nil } -func resourceArmCosmosDbSQLContainerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLContainerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -363,16 +402,16 @@ func resourceArmCosmosDbSQLContainerDelete(d *schema.ResourceData, meta interfac return err } - future, err := client.DeleteSQLContainer(ctx, id.ResourceGroup, id.Account, id.Database, id.Name) + future, err := client.DeleteSQLContainer(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos SQL Container %q (Account: %q): %+v", id.Database, id.Name, err) + return fmt.Errorf("Error deleting Cosmos SQL Container %q (Account: %q): %+v", id.SqlDatabaseName, id.ContainerName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos SQL Container %q (Account: %q): %+v", id.Database, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Cosmos SQL Container %q (Account: %q): %+v", id.SqlDatabaseName, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource_test.go index 7bc96e4bf0f6..0ce47d05fec2 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_sql_container_resource_test.go @@ -1,223 +1,197 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbSqlContainer_basic(t *testing.T) { +type CosmosSqlContainerResource struct { +} + +func TestAccCosmosDbSqlContainer_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlContainerDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlContainer_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - ), - }, - data.ImportStep(), + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlContainer_complete(t *testing.T) { +func TestAccCosmosDbSqlContainer_basic_serverless(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlContainerDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlContainer_complete(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - ), - }, - data.ImportStep(), + data.ResourceTest(t, r, []resource.TestStep{ + { + + Config: r.basic_serverless(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlContainer_update(t *testing.T) { +func TestAccCosmosDbSqlContainer_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlContainerDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlContainer_complete(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "default_ttl", "500"), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "600"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlContainer_update(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "default_ttl", "1000"), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "400"), - ), - }, - data.ImportStep(), + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlContainer_autoscale(t *testing.T) { +func TestAccCosmosDbSqlContainer_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlContainerDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlContainer_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlContainer_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlContainer_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("default_ttl").HasValue("500"), + check.That(data.ResourceName).Key("throughput").HasValue("600"), + ), }, + data.ImportStep(), + { + + Config: r.update(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("default_ttl").HasValue("1000"), + check.That(data.ResourceName).Key("throughput").HasValue("400"), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlContainer_indexing_policy(t *testing.T) { +func TestAccCosmosDbSqlContainer_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlContainerDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlContainer_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlContainer_indexing_policy(data, "/includedPath01/*", "/excludedPath01/?"), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlContainer_indexing_policy(data, "/includedPath02/*", "/excludedPath02/?"), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlContainerExists(data.ResourceName), - ), - }, - data.ImportStep(), + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), }, + data.ImportStep(), + { + + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbSqlContainerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.SqlClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_sql_container" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - database := rs.Primary.Attributes["database_name"] - - resp, err := client.GetSQLContainer(ctx, resourceGroup, account, database, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos SQL Container %s (account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos SQL Container %s (account %s) still exists:\n%#v", name, account, resp) - } - } +func TestAccCosmosDbSqlContainer_indexing_policy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} - return nil -} + data.ResourceTest(t, r, []resource.TestStep{ + { -func testCheckAzureRMCosmosDbSqlContainerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.SqlClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } + Config: r.indexing_policy(data, "/includedPath01/*", "/excludedPath01/?"), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - database := rs.Primary.Attributes["database_name"] + Config: r.indexing_policy(data, "/includedPath02/*", "/excludedPath02/?"), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} - resp, err := client.GetSQLContainer(ctx, resourceGroup, account, database, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } +func TestAccCosmosDbSqlContainer_partition_key_version(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_container", "test") + r := CosmosSqlContainerResource{} - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos Container '%s' (account: '%s') does not exist", name, account) - } + data.ResourceTest(t, r, []resource.TestStep{ + { - return nil + Config: r.partition_key_version(data, 2), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("partition_key_version").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func (t CosmosSqlContainerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SqlContainerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Cosmos.SqlClient.GetSQLContainer(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName) + if err != nil { + return nil, fmt.Errorf("reading Cosmos SQL Container (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbSqlContainer_basic(data acceptance.TestData) string { +func (CosmosSqlContainerResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -227,10 +201,23 @@ resource "azurerm_cosmosdb_sql_container" "test" { account_name = azurerm_cosmosdb_account.test.name database_name = azurerm_cosmosdb_sql_database.test.name } -`, testAccAzureRMCosmosDbSqlDatabase_basic(data), data.RandomInteger) +`, CosmosSqlDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbSqlContainer_complete(data acceptance.TestData) string { +func (CosmosSqlContainerResource) basic_serverless(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_cosmosdb_sql_container" "test" { + name = "acctest-CSQLC-%[2]d" + resource_group_name = azurerm_cosmosdb_account.test.resource_group_name + account_name = azurerm_cosmosdb_account.test.name + database_name = azurerm_cosmosdb_sql_database.test.name +} +`, CosmosSqlDatabaseResource{}.serverless(data), data.RandomInteger) +} + +func (CosmosSqlContainerResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -282,10 +269,10 @@ resource "azurerm_cosmosdb_sql_container" "test" { } } } -`, testAccAzureRMCosmosDbSqlDatabase_basic(data), data.RandomInteger) +`, CosmosSqlDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbSqlContainer_update(data acceptance.TestData) string { +func (CosmosSqlContainerResource) update(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -338,10 +325,10 @@ resource "azurerm_cosmosdb_sql_container" "test" { } } } -`, testAccAzureRMCosmosDbSqlDatabase_basic(data), data.RandomInteger) +`, CosmosSqlDatabaseResource{}.basic(data), data.RandomInteger) } -func testAccAzureRMCosmosDbSqlContainer_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosSqlContainerResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s resource "azurerm_cosmosdb_sql_container" "test" { @@ -354,10 +341,10 @@ resource "azurerm_cosmosdb_sql_container" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDbSqlDatabase_basic(data), data.RandomInteger, maxThroughput) +`, CosmosSqlDatabaseResource{}.basic(data), data.RandomInteger, maxThroughput) } -func testAccAzureRMCosmosDbSqlContainer_indexing_policy(data acceptance.TestData, includedPath, excludedPath string) string { +func (CosmosSqlContainerResource) indexing_policy(data acceptance.TestData, includedPath, excludedPath string) string { return fmt.Sprintf(` %[1]s @@ -405,5 +392,19 @@ resource "azurerm_cosmosdb_sql_container" "test" { } } } -`, testAccAzureRMCosmosDbSqlDatabase_basic(data), data.RandomInteger, includedPath, excludedPath) +`, CosmosSqlDatabaseResource{}.basic(data), data.RandomInteger, includedPath, excludedPath) +} + +func (CosmosSqlContainerResource) partition_key_version(data acceptance.TestData, version int) string { + return fmt.Sprintf(` +%[1]s +resource "azurerm_cosmosdb_sql_container" "test" { + name = "acctest-CSQLC-%[2]d" + resource_group_name = azurerm_cosmosdb_account.test.resource_group_name + account_name = azurerm_cosmosdb_account.test.name + database_name = azurerm_cosmosdb_sql_database.test.name + partition_key_path = "/definition/id" + partition_key_version = %[3]d +} +`, CosmosSqlDatabaseResource{}.basic(data), data.RandomInteger, version) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource.go b/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource.go index c96020286c7a..685a87fc73b0 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbSQLDatabase() *schema.Resource { +func resourceCosmosDbSQLDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbSQLDatabaseCreate, - Read: resourceArmCosmosDbSQLDatabaseRead, - Update: resourceArmCosmosDbSQLDatabaseUpdate, - Delete: resourceArmCosmosDbSQLDatabaseDelete, + Create: resourceCosmosDbSQLDatabaseCreate, + Read: resourceCosmosDbSQLDatabaseRead, + Update: resourceCosmosDbSQLDatabaseUpdate, + Delete: resourceCosmosDbSQLDatabaseDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -75,7 +75,7 @@ func resourceArmCosmosDbSQLDatabase() *schema.Resource { } } -func resourceArmCosmosDbSQLDatabaseCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLDatabaseCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,10 +136,10 @@ func resourceArmCosmosDbSQLDatabaseCreate(d *schema.ResourceData, meta interface d.SetId(*resp.ID) - return resourceArmCosmosDbSQLDatabaseRead(d, meta) + return resourceCosmosDbSQLDatabaseRead(d, meta) } -func resourceArmCosmosDbSQLDatabaseUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLDatabaseUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -151,7 +151,7 @@ func resourceArmCosmosDbSQLDatabaseUpdate(d *schema.ResourceData, meta interface err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos SQL Database %q (Account: %q) - %+v", id.Name, id.Account, err) + return fmt.Errorf("Error updating Cosmos SQL Database %q (Account: %q) - %+v", id.Name, id.DatabaseAccountName, err) } db := documentdb.SQLDatabaseCreateUpdateParameters{ @@ -163,35 +163,36 @@ func resourceArmCosmosDbSQLDatabaseUpdate(d *schema.ResourceData, meta interface }, } - future, err := client.CreateUpdateSQLDatabase(ctx, id.ResourceGroup, id.Account, id.Name, db) + future, err := client.CreateUpdateSQLDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error issuing create/update request for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateSQLDatabaseThroughput(ctx, id.ResourceGroup, id.Account, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateSQLDatabaseThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos SQL Database %q (Account: %q) %+v - "+ - "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, err) + "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.DatabaseAccountName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } - return resourceArmCosmosDbSQLDatabaseRead(d, meta) + return resourceCosmosDbSQLDatabaseRead(d, meta) } -func resourceArmCosmosDbSQLDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient + accountClient := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -200,41 +201,61 @@ func resourceArmCosmosDbSQLDatabaseRead(d *schema.ResourceData, meta interface{} return err } - resp, err := client.GetSQLDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + resp, err := client.GetSQLDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos SQL Database %q (Account: %q) - removing from state", id.Name, id.Account) + log.Printf("[INFO] Error reading Cosmos SQL Database %q (Account: %q) - removing from state", id.Name, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) + d.Set("account_name", id.DatabaseAccountName) if props := resp.SQLDatabaseGetProperties; props != nil { if res := props.Resource; res != nil { d.Set("name", res.ID) } } - throughputResp, err := client.GetSQLDatabaseThroughput(ctx, id.ResourceGroup, id.Account, id.Name) + accResp, err := accountClient.Get(ctx, id.ResourceGroup, id.DatabaseAccountName) if err != nil { - if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos SQL Database %q (Account: %q) ID: %v", id.Name, id.Account, err) - } else { - d.Set("throughput", nil) - d.Set("autoscale_settings", nil) + return fmt.Errorf("reading CosmosDB Account %q (Resource Group %q): %+v", id.DatabaseAccountName, id.ResourceGroup, err) + } + + if accResp.ID == nil || *accResp.ID == "" { + return fmt.Errorf("cosmosDB Account %q (Resource Group %q) ID is empty or nil", id.DatabaseAccountName, id.ResourceGroup) + } + + if props := accResp.DatabaseAccountGetProperties; props != nil && props.Capabilities != nil { + serverless := false + for _, v := range *props.Capabilities { + if *v.Name == "EnableServerless" { + serverless = true + } + } + + if !serverless { + throughputResp, err := client.GetSQLDatabaseThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(throughputResp.Response) { + return fmt.Errorf("Error reading Throughput on Cosmos SQL Database %q (Account: %q) ID: %v", id.Name, id.DatabaseAccountName, err) + } else { + d.Set("throughput", nil) + d.Set("autoscale_settings", nil) + } + } else { + common.SetResourceDataThroughputFromResponse(throughputResp, d) + } } - } else { - common.SetResourceDataThroughputFromResponse(throughputResp, d) } return nil } -func resourceArmCosmosDbSQLDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -244,16 +265,16 @@ func resourceArmCosmosDbSQLDatabaseDelete(d *schema.ResourceData, meta interface return err } - future, err := client.DeleteSQLDatabase(ctx, id.ResourceGroup, id.Account, id.Name) + future, err := client.DeleteSQLDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error deleting Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Cosmos SQL Database %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource_test.go index ffac4f85f9b5..f1428589b82f 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_sql_database_resource_test.go @@ -1,162 +1,128 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbSqlDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_database", "test") +type CosmosSqlDatabaseResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbSqlDatabase_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosDbSqlDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_database", "test") + r := CosmosSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlDatabase_update(t *testing.T) { +func TestAccCosmosDbSqlDatabase_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_database", "test") + r := CosmosSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlDatabase_throughput(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "700"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlDatabase_throughput(data, 1700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "1700"), - ), - }, - data.ImportStep(), + Config: r.throughput(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("700"), + ), }, + data.ImportStep(), + { + + Config: r.throughput(data, 1700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("1700"), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlDatabase_autoscale(t *testing.T) { +func TestAccCosmosDbSqlDatabase_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbSqlDatabase_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlDatabase_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlDatabase_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), }, + data.ImportStep(), + { + + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbSqlDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.SqlClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_sql_database" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetSQLDatabase(ctx, resourceGroup, account, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos SQL Database %s (account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos SQL Database %s (account %s) still exists:\n%#v", name, account, resp) - } - } - - return nil +func TestAccCosmosDbSqlDatabase_serverless(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_database", "test") + r := CosmosSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serverless(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) } -func testCheckAzureRMCosmosDbSqlDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.SqlClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetSQLDatabase(ctx, resourceGroup, account, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos database '%s' (account: '%s') does not exist", name, account) - } +func (t CosmosSqlDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SqlDatabaseID(state.ID) + if err != nil { + return nil, err + } - return nil + resp, err := clients.Cosmos.SqlClient.GetSQLDatabase(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Cosmos SQL Database (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbSqlDatabase_basic(data acceptance.TestData) string { +func (CosmosSqlDatabaseResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -165,10 +131,10 @@ resource "azurerm_cosmosdb_sql_database" "test" { resource_group_name = azurerm_cosmosdb_account.test.resource_group_name account_name = azurerm_cosmosdb_account.test.name } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.GlobalDocumentDB, documentdb.Strong), data.RandomInteger) +`, CosmosDBAccountResource{}.basic(data, documentdb.GlobalDocumentDB, documentdb.Strong), data.RandomInteger) } -func testAccAzureRMCosmosDbSqlDatabase_throughput(data acceptance.TestData, throughput int) string { +func (CosmosSqlDatabaseResource) throughput(data acceptance.TestData, throughput int) string { return fmt.Sprintf(` %[1]s @@ -178,10 +144,10 @@ resource "azurerm_cosmosdb_sql_database" "test" { account_name = azurerm_cosmosdb_account.test.name throughput = %[3]d } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.GlobalDocumentDB, documentdb.Strong), data.RandomInteger, throughput) +`, CosmosDBAccountResource{}.basic(data, documentdb.GlobalDocumentDB, documentdb.Strong), data.RandomInteger, throughput) } -func testAccAzureRMCosmosDbSqlDatabase_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosSqlDatabaseResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -193,5 +159,16 @@ resource "azurerm_cosmosdb_sql_database" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDBAccount_basic(data, documentdb.GlobalDocumentDB, documentdb.Strong), data.RandomInteger, maxThroughput) +`, CosmosDBAccountResource{}.basic(data, documentdb.GlobalDocumentDB, documentdb.Strong), data.RandomInteger, maxThroughput) +} + +func (CosmosSqlDatabaseResource) serverless(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s +resource "azurerm_cosmosdb_sql_database" "test" { + name = "acctest-%[2]d" + resource_group_name = azurerm_cosmosdb_account.test.resource_group_name + account_name = azurerm_cosmosdb_account.test.name +} +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableServerless"}), data.RandomInteger) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource.go b/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource.go index 55ef63b15b50..c0f1eeecd648 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource.go @@ -5,14 +5,13 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - azuread "github.com/terraform-providers/terraform-provider-azuread/azuread/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" @@ -22,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbSQLStoredProcedure() *schema.Resource { +func resourceCosmosDbSQLStoredProcedure() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbSQLStoredProcedureCreate, - Read: resourceArmCosmosDbSQLStoredProcedureRead, - Update: resourceArmCosmosDbSQLStoredProcedureUpdate, - Delete: resourceArmCosmosDbSQLStoredProcedureDelete, + Create: resourceCosmosDbSQLStoredProcedureCreate, + Read: resourceCosmosDbSQLStoredProcedureRead, + Update: resourceCosmosDbSQLStoredProcedureUpdate, + Delete: resourceCosmosDbSQLStoredProcedureDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -45,7 +44,7 @@ func resourceArmCosmosDbSQLStoredProcedure() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azuread.NoEmptyStrings, + ValidateFunc: validation.StringIsNotEmpty, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -60,7 +59,7 @@ func resourceArmCosmosDbSQLStoredProcedure() *schema.Resource { "body": { Type: schema.TypeString, Required: true, - ValidateFunc: azuread.NoEmptyStrings, + ValidateFunc: validation.StringIsNotEmpty, }, "container_name": { @@ -80,7 +79,7 @@ func resourceArmCosmosDbSQLStoredProcedure() *schema.Resource { } } -func resourceArmCosmosDbSQLStoredProcedureCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLStoredProcedureCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -131,23 +130,23 @@ func resourceArmCosmosDbSQLStoredProcedureCreate(d *schema.ResourceData, meta in d.SetId(*resp.ID) - return resourceArmCosmosDbSQLStoredProcedureRead(d, meta) + return resourceCosmosDbSQLStoredProcedureRead(d, meta) } -func resourceArmCosmosDbSQLStoredProcedureUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLStoredProcedureUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StoredProcedureID(d.Id()) + id, err := parse.SqlStoredProcedureID(d.Id()) if err != nil { return err } - containerName := id.Container - databaseName := id.Database - accountName := id.Account - name := id.Name + containerName := id.ContainerName + databaseName := id.SqlDatabaseName + accountName := id.DatabaseAccountName + name := id.StoredProcedureName storedProcParams := documentdb.SQLStoredProcedureCreateUpdateParameters{ SQLStoredProcedureCreateUpdateProperties: &documentdb.SQLStoredProcedureCreateUpdateProperties{ @@ -168,35 +167,35 @@ func resourceArmCosmosDbSQLStoredProcedureUpdate(d *schema.ResourceData, meta in return fmt.Errorf("Error waiting for update of SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", name, containerName, databaseName, accountName, err) } - return resourceArmCosmosDbSQLStoredProcedureRead(d, meta) + return resourceCosmosDbSQLStoredProcedureRead(d, meta) } -func resourceArmCosmosDbSQLStoredProcedureRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLStoredProcedureRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StoredProcedureID(d.Id()) + id, err := parse.SqlStoredProcedureID(d.Id()) if err != nil { return err } - resp, err := client.GetSQLStoredProcedure(ctx, id.ResourceGroup, id.Account, id.Database, id.Container, id.Name) + resp, err := client.GetSQLStoredProcedure(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName, id.StoredProcedureName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] SQL Stored Procedure %q (Container %q / Database %q / Account %q) was not found - removing from state", id.Name, id.Container, id.Database, id.Account) + log.Printf("[INFO] SQL Stored Procedure %q (Container %q / Database %q / Account %q) was not found - removing from state", id.StoredProcedureName, id.ContainerName, id.SqlDatabaseName, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error retrieving SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", id.Name, id.Container, id.Database, id.Account, err) + return fmt.Errorf("Error retrieving SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", id.StoredProcedureName, id.ContainerName, id.SqlDatabaseName, id.DatabaseAccountName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) - d.Set("database_name", id.Database) - d.Set("container_name", id.Container) - d.Set("name", id.Name) + d.Set("account_name", id.DatabaseAccountName) + d.Set("database_name", id.SqlDatabaseName) + d.Set("container_name", id.ContainerName) + d.Set("name", id.StoredProcedureName) if props := resp.SQLStoredProcedureGetProperties; props != nil { if resource := props.Resource; resource != nil { @@ -207,26 +206,26 @@ func resourceArmCosmosDbSQLStoredProcedureRead(d *schema.ResourceData, meta inte return nil } -func resourceArmCosmosDbSQLStoredProcedureDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbSQLStoredProcedureDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.SqlClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StoredProcedureID(d.Id()) + id, err := parse.SqlStoredProcedureID(d.Id()) if err != nil { return err } - future, err := client.DeleteSQLStoredProcedure(ctx, id.ResourceGroup, id.Account, id.Database, id.Container, id.Name) + future, err := client.DeleteSQLStoredProcedure(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName, id.StoredProcedureName) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", id.Name, id.Container, id.Database, id.Account, err) + return fmt.Errorf("Error deleting SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", id.StoredProcedureName, id.ContainerName, id.SqlDatabaseName, id.DatabaseAccountName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting for deletion of SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", id.Name, id.Container, id.Database, id.Account, err) + return fmt.Errorf("Error waiting for deletion of SQL Stored Procedure %q (Container %q / Database %q / Account %q): %+v", id.StoredProcedureName, id.ContainerName, id.SqlDatabaseName, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource_test.go index eff3998d6be3..2daa800d55ed 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_sql_stored_procedure_resource_test.go @@ -1,128 +1,80 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" + + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbSqlStoredProcedure_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_stored_procedure", "test") +type CosmosSqlStoredProcedureResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlStoredProcedureDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbSqlStoredProcedure_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlStoredProcedureExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosDbSqlStoredProcedure_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_stored_procedure", "test") + r := CosmosSqlStoredProcedureResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbSqlStoredProcedure_update(t *testing.T) { +func TestAccCosmosDbSqlStoredProcedure_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_sql_stored_procedure", "test") + r := CosmosSqlStoredProcedureResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbSqlStoredProcedureDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbSqlStoredProcedure_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlStoredProcedureExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbSqlStoredProcedure_update(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbSqlStoredProcedureExists(data.ResourceName), - ), - }, - data.ImportStep(), + Config: r.update(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbSqlStoredProcedureDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.SqlClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_sql_stored_procedure" { - continue - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - databaseName := rs.Primary.Attributes["database_name"] - containerName := rs.Primary.Attributes["container_name"] - - resp, err := client.GetSQLStoredProcedure(ctx, resourceGroupName, accountName, databaseName, containerName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos SQL Stored Procedure %s (account %s) still exists:\n%v", name, accountName, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos SQL Stored Procedure %s (account %s) still exists:\n%#v", name, accountName, resp) - } +func (t CosmosSqlStoredProcedureResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SqlStoredProcedureID(state.ID) + if err != nil { + return nil, err } - return nil -} - -func testCheckAzureRMCosmosDbSqlStoredProcedureExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.SqlClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - databaseName := rs.Primary.Attributes["database_name"] - containerName := rs.Primary.Attributes["container_name"] - - resp, err := client.GetSQLStoredProcedure(ctx, resourceGroupName, accountName, databaseName, containerName, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos Stored Procedure '%s' (account: '%s') does not exist", name, accountName) - } - - return nil + resp, err := clients.Cosmos.SqlClient.GetSQLStoredProcedure(ctx, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName, id.StoredProcedureName) + if err != nil { + return nil, fmt.Errorf("reading Cosmos SQL Stored Procedure (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbSqlStoredProcedure_base(data acceptance.TestData) string { +func (CosmosSqlStoredProcedureResource) base(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -165,9 +117,7 @@ resource "azurerm_cosmosdb_sql_container" "test" { `, data.Locations.Primary, data.RandomInteger, string(documentdb.GlobalDocumentDB), string(documentdb.Session)) } -func testAccAzureRMCosmosDbSqlStoredProcedure_basic(data acceptance.TestData) string { - template := testAccAzureRMCosmosDbSqlStoredProcedure_base(data) - +func (r CosmosSqlStoredProcedureResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -186,12 +136,10 @@ resource "azurerm_cosmosdb_sql_stored_procedure" "test" { } BODY } -`, template, data.RandomInteger) +`, r.base(data), data.RandomInteger) } -func testAccAzureRMCosmosDbSqlStoredProcedure_update(data acceptance.TestData) string { - template := testAccAzureRMCosmosDbSqlStoredProcedure_base(data) - +func (r CosmosSqlStoredProcedureResource) update(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -210,5 +158,5 @@ resource "azurerm_cosmosdb_sql_stored_procedure" "test" { } BODY } -`, template, data.RandomInteger) +`, r.base(data), data.RandomInteger) } diff --git a/azurerm/internal/services/cosmos/cosmosdb_table_resource.go b/azurerm/internal/services/cosmos/cosmosdb_table_resource.go index 685c1df01c4e..a734a9ffabac 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_table_resource.go +++ b/azurerm/internal/services/cosmos/cosmosdb_table_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCosmosDbTable() *schema.Resource { +func resourceCosmosDbTable() *schema.Resource { return &schema.Resource{ - Create: resourceArmCosmosDbTableCreate, - Read: resourceArmCosmosDbTableRead, - Update: resourceArmCosmosDbTableUpdate, - Delete: resourceArmCosmosDbTableDelete, + Create: resourceCosmosDbTableCreate, + Read: resourceCosmosDbTableRead, + Update: resourceCosmosDbTableUpdate, + Delete: resourceCosmosDbTableDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -75,7 +75,7 @@ func resourceArmCosmosDbTable() *schema.Resource { } } -func resourceArmCosmosDbTableCreate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbTableCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.TableClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,10 +136,10 @@ func resourceArmCosmosDbTableCreate(d *schema.ResourceData, meta interface{}) er d.SetId(*resp.ID) - return resourceArmCosmosDbTableRead(d, meta) + return resourceCosmosDbTableRead(d, meta) } -func resourceArmCosmosDbTableUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbTableUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.TableClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -151,7 +151,7 @@ func resourceArmCosmosDbTableUpdate(d *schema.ResourceData, meta interface{}) er err = common.CheckForChangeFromAutoscaleAndManualThroughput(d) if err != nil { - return fmt.Errorf("Error updating Cosmos Table %q (Account: %q) - %+v", id.Name, id.Account, err) + return fmt.Errorf("Error updating Cosmos Table %q (Account: %q) - %+v", id.Name, id.DatabaseAccountName, err) } db := documentdb.TableCreateUpdateParameters{ @@ -163,35 +163,36 @@ func resourceArmCosmosDbTableUpdate(d *schema.ResourceData, meta interface{}) er }, } - future, err := client.CreateUpdateTable(ctx, id.ResourceGroup, id.Account, id.Name, db) + future, err := client.CreateUpdateTable(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, db) if err != nil { - return fmt.Errorf("Error issuing create/update request for Cosmos Table %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error issuing create/update request for Cosmos Table %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on create/update future for Cosmos Table %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on create/update future for Cosmos Table %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } if common.HasThroughputChange(d) { throughputParameters := common.ExpandCosmosDBThroughputSettingsUpdateParameters(d) - throughputFuture, err := client.UpdateTableThroughput(ctx, id.ResourceGroup, id.Account, id.Name, *throughputParameters) + throughputFuture, err := client.UpdateTableThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name, *throughputParameters) if err != nil { if response.WasNotFound(throughputFuture.Response()) { return fmt.Errorf("Error setting Throughput for Cosmos Table %q (Account: %q): %+v - "+ - "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.Account, err) + "If the collection has not been created with an initial throughput, you cannot configure it later.", id.Name, id.DatabaseAccountName, err) } } if err = throughputFuture.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Table %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on ThroughputUpdate future for Cosmos Table %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } - return resourceArmCosmosDbTableRead(d, meta) + return resourceCosmosDbTableRead(d, meta) } -func resourceArmCosmosDbTableRead(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbTableRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.TableClient + accountClient := meta.(*clients.Client).Cosmos.DatabaseClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -200,41 +201,61 @@ func resourceArmCosmosDbTableRead(d *schema.ResourceData, meta interface{}) erro return err } - resp, err := client.GetTable(ctx, id.ResourceGroup, id.Account, id.Name) + resp, err := client.GetTable(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading Cosmos Table %q (Account: %q) - removing from state", id.Name, id.Account) + log.Printf("[INFO] Error reading Cosmos Table %q (Account: %q) - removing from state", id.Name, id.DatabaseAccountName) d.SetId("") return nil } - return fmt.Errorf("Error reading Cosmos Table %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error reading Cosmos Table %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.Account) + d.Set("account_name", id.DatabaseAccountName) if props := resp.TableGetProperties; props != nil { if res := props.Resource; res != nil { d.Set("name", res.ID) } } - throughputResp, err := client.GetTableThroughput(ctx, id.ResourceGroup, id.Account, id.Name) + accResp, err := accountClient.Get(ctx, id.ResourceGroup, id.DatabaseAccountName) if err != nil { - if !utils.ResponseWasNotFound(throughputResp.Response) { - return fmt.Errorf("Error reading Throughput on Cosmos Table %q (Account: %q) ID: %v", id.Name, id.Account, err) - } else { - d.Set("throughput", nil) - d.Set("autoscale_settings", nil) + return fmt.Errorf("reading CosmosDB Account %q (Resource Group %q): %+v", id.DatabaseAccountName, id.ResourceGroup, err) + } + + if accResp.ID == nil || *accResp.ID == "" { + return fmt.Errorf("cosmosDB Account %q (Resource Group %q) ID is empty or nil", id.DatabaseAccountName, id.ResourceGroup) + } + + if props := accResp.DatabaseAccountGetProperties; props != nil && props.Capabilities != nil { + serverless := false + for _, v := range *props.Capabilities { + if *v.Name == "EnableServerless" { + serverless = true + } + } + + if !serverless { + throughputResp, err := client.GetTableThroughput(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(throughputResp.Response) { + return fmt.Errorf("Error reading Throughput on Cosmos Table %q (Account: %q) ID: %v", id.Name, id.DatabaseAccountName, err) + } else { + d.Set("throughput", nil) + d.Set("autoscale_settings", nil) + } + } else { + common.SetResourceDataThroughputFromResponse(throughputResp, d) + } } - } else { - common.SetResourceDataThroughputFromResponse(throughputResp, d) } return nil } -func resourceArmCosmosDbTableDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCosmosDbTableDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Cosmos.TableClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -244,16 +265,16 @@ func resourceArmCosmosDbTableDelete(d *schema.ResourceData, meta interface{}) er return err } - future, err := client.DeleteTable(ctx, id.ResourceGroup, id.Account, id.Name) + future, err := client.DeleteTable(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Cosmos Table %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error deleting Cosmos Table %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } } err = future.WaitForCompletionRef(ctx, client.Client) if err != nil { - return fmt.Errorf("Error waiting on delete future for Cosmos Table %q (Account: %q): %+v", id.Name, id.Account, err) + return fmt.Errorf("Error waiting on delete future for Cosmos Table %q (Account: %q): %+v", id.Name, id.DatabaseAccountName, err) } return nil diff --git a/azurerm/internal/services/cosmos/cosmosdb_table_resource_test.go b/azurerm/internal/services/cosmos/cosmosdb_table_resource_test.go index 810211a7d089..ef1430e5c0b7 100644 --- a/azurerm/internal/services/cosmos/cosmosdb_table_resource_test.go +++ b/azurerm/internal/services/cosmos/cosmosdb_table_resource_test.go @@ -1,160 +1,126 @@ package cosmos_test import ( + "context" "fmt" - "net/http" "testing" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMCosmosDbTable_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cosmosdb_table", "test") +type CosmosTableResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbTableDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbTable_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbTableExists(data.ResourceName), - ), - }, - data.ImportStep(), +func TestAccCosmosDbTable_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_table", "test") + r := CosmosTableResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbTable_update(t *testing.T) { +func TestAccCosmosDbTable_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_table", "test") + r := CosmosTableResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbTableDestroy, - Steps: []resource.TestStep{ - { - - Config: testAccAzureRMCosmosDbTable_throughput(data, 700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbTableExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "700"), - ), - }, - data.ImportStep(), - { - - Config: testAccAzureRMCosmosDbTable_throughput(data, 1700), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbTableExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "throughput", "1700"), - ), - }, - data.ImportStep(), + Config: r.throughput(data, 700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("700"), + ), }, + data.ImportStep(), + { + + Config: r.throughput(data, 1700), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("throughput").HasValue("1700"), + ), + }, + data.ImportStep(), }) } -func TestAccAzureRMCosmosDbTable_autoscale(t *testing.T) { +func TestAccCosmosDbTable_autoscale(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_cosmosdb_table", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCosmosDbTableDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCosmosDbTable_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbTableExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbTable_autoscale(data, 5000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbTableExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCosmosDbTable_autoscale(data, 4000), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMCosmosDbTableExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "autoscale_settings.0.max_throughput", "4000"), - ), - }, - data.ImportStep(), + r := CosmosTableResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 5000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("5000"), + ), + }, + data.ImportStep(), + { + Config: r.autoscale(data, 4000), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("autoscale_settings.0.max_throughput").HasValue("4000"), + ), }, + data.ImportStep(), }) } -func testCheckAzureRMCosmosDbTableDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.TableClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cosmosdb_table" { - continue - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetTable(ctx, resourceGroup, account, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Error checking destroy for Cosmos Table %s (account %s) still exists:\n%v", name, account, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Cosmos Table %s (account %s) still exists:\n%#v", name, account, resp) - } - } - - return nil +func TestAccCosmosDbTable_serverless(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cosmosdb_table", "test") + r := CosmosTableResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serverless(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) } -func testCheckAzureRMCosmosDbTableExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Cosmos.TableClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - account := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetTable(ctx, resourceGroup, account, name) - if err != nil { - return fmt.Errorf("Bad: Get on cosmosAccountsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Cosmos Table '%s' (account: '%s') does not exist", name, account) - } +func (t CosmosTableResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.TableID(state.ID) + if err != nil { + return nil, err + } - return nil + resp, err := clients.Cosmos.TableClient.GetTable(ctx, id.ResourceGroup, id.DatabaseAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Cosmos Table (%s): %+v", id.String(), err) } + + return utils.Bool(resp.ID != nil), nil } -func testAccAzureRMCosmosDbTable_basic(data acceptance.TestData) string { +func (CosmosTableResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %[1]s @@ -163,10 +129,10 @@ resource "azurerm_cosmosdb_table" "test" { resource_group_name = azurerm_cosmosdb_account.test.resource_group_name account_name = azurerm_cosmosdb_account.test.name } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable"}), data.RandomInteger) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable"}), data.RandomInteger) } -func testAccAzureRMCosmosDbTable_throughput(data acceptance.TestData, throughput int) string { +func (CosmosTableResource) throughput(data acceptance.TestData, throughput int) string { return fmt.Sprintf(` %[1]s @@ -176,10 +142,10 @@ resource "azurerm_cosmosdb_table" "test" { account_name = azurerm_cosmosdb_account.test.name throughput = %[3]d } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable"}), data.RandomInteger, throughput) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable"}), data.RandomInteger, throughput) } -func testAccAzureRMCosmosDbTable_autoscale(data acceptance.TestData, maxThroughput int) string { +func (CosmosTableResource) autoscale(data acceptance.TestData, maxThroughput int) string { return fmt.Sprintf(` %[1]s @@ -191,5 +157,17 @@ resource "azurerm_cosmosdb_table" "test" { max_throughput = %[3]d } } -`, testAccAzureRMCosmosDBAccount_capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable"}), data.RandomInteger, maxThroughput) +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableTable"}), data.RandomInteger, maxThroughput) +} + +func (CosmosTableResource) serverless(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_cosmosdb_table" "test" { + name = "acctest-%[2]d" + resource_group_name = azurerm_cosmosdb_account.test.resource_group_name + account_name = azurerm_cosmosdb_account.test.name +} +`, CosmosDBAccountResource{}.capabilities(data, documentdb.GlobalDocumentDB, []string{"EnableServerless", "EnableTable"}), data.RandomInteger) } diff --git a/azurerm/internal/services/cosmos/migration/gremlin_graph.go b/azurerm/internal/services/cosmos/migration/gremlin_graph.go index 9998abcdeab9..9e11a51b18c5 100644 --- a/azurerm/internal/services/cosmos/migration/gremlin_graph.go +++ b/azurerm/internal/services/cosmos/migration/gremlin_graph.go @@ -4,7 +4,7 @@ import ( "log" "strings" - "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2020-04-01/documentdb" + "github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" diff --git a/azurerm/internal/services/cosmos/parse/account.go b/azurerm/internal/services/cosmos/parse/account.go deleted file mode 100644 index 24ac8a59a284..000000000000 --- a/azurerm/internal/services/cosmos/parse/account.go +++ /dev/null @@ -1,29 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DatabaseAccountId struct { - ResourceGroup string - Name string -} - -func DatabaseAccountID(input string) (*DatabaseAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Account ID %q: %+v", input, err) - } - - databaseAccount := DatabaseAccountId{ - ResourceGroup: id.ResourceGroup, - } - - if databaseAccount.Name, err = id.PopSegment("databaseAccounts"); err != nil { - return nil, err - } - - return &databaseAccount, nil -} diff --git a/azurerm/internal/services/cosmos/parse/account_test.go b/azurerm/internal/services/cosmos/parse/account_test.go deleted file mode 100644 index ed97a554849a..000000000000 --- a/azurerm/internal/services/cosmos/parse/account_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDatabaseAccountID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *DatabaseAccountId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Error: true, - }, - { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", - Error: true, - }, - { - Name: "Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1", - Error: false, - Expect: &DatabaseAccountId{ - ResourceGroup: "resGroup1", - Name: "acc1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/DatabaseAccounts/acc1/", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DatabaseAccountID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/cosmos/parse/cassandra_keyspace.go b/azurerm/internal/services/cosmos/parse/cassandra_keyspace.go index 5c11689bd712..a523e6340a3b 100644 --- a/azurerm/internal/services/cosmos/parse/cassandra_keyspace.go +++ b/azurerm/internal/services/cosmos/parse/cassandra_keyspace.go @@ -1,34 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type CassandraKeyspaceId struct { - ResourceGroup string - Account string - Name string + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + Name string +} + +func NewCassandraKeyspaceID(subscriptionId, resourceGroup, databaseAccountName, name string) CassandraKeyspaceId { + return CassandraKeyspaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + Name: name, + } +} + +func (id CassandraKeyspaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cassandra Keyspace", segmentsStr) +} + +func (id CassandraKeyspaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/cassandraKeyspaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.Name) } +// CassandraKeyspaceID parses a CassandraKeyspace ID into an CassandraKeyspaceId struct func CassandraKeyspaceID(input string) (*CassandraKeyspaceId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Cassandra Keyspace ID %q: %+v", input, err) + return nil, err + } + + resourceId := CassandraKeyspaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - cassandraKeyspace := CassandraKeyspaceId{ - ResourceGroup: id.ResourceGroup, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if cassandraKeyspace.Account, err = id.PopSegment("databaseAccounts"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("cassandraKeyspaces"); err != nil { return nil, err } - if cassandraKeyspace.Name, err = id.PopSegment("cassandraKeyspaces"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &cassandraKeyspace, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/cassandra_keyspace_test.go b/azurerm/internal/services/cosmos/parse/cassandra_keyspace_test.go index 36f1dba8a5f2..e29d92d58901 100644 --- a/azurerm/internal/services/cosmos/parse/cassandra_keyspace_test.go +++ b/azurerm/internal/services/cosmos/parse/cassandra_keyspace_test.go @@ -1,65 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = CassandraKeyspaceId{} + +func TestCassandraKeyspaceIDFormatter(t *testing.T) { + actual := NewCassandraKeyspaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "keyspace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/keyspace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestCassandraKeyspaceID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *CassandraKeyspaceId + Input string + Error bool + Expected *CassandraKeyspaceId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", Error: true, }, + { - Name: "Missing Keyspace Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", Error: true, }, + { - Name: "Keyspace ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/keyspace1", - Error: false, - Expect: &CassandraKeyspaceId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Name: "keyspace1", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/keyspace1", + Expected: &CassandraKeyspaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + Name: "keyspace1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/CassandraKeyspaces/keyspace1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/CASSANDRAKEYSPACES/KEYSPACE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := CassandraKeyspaceID(v.Input) if err != nil { @@ -67,19 +106,23 @@ func TestCassandraKeyspaceID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/cosmos/parse/database_account.go b/azurerm/internal/services/cosmos/parse/database_account.go new file mode 100644 index 000000000000..6a73870ccb21 --- /dev/null +++ b/azurerm/internal/services/cosmos/parse/database_account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabaseAccountId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewDatabaseAccountID(subscriptionId, resourceGroup, name string) DatabaseAccountId { + return DatabaseAccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id DatabaseAccountId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database Account", segmentsStr) +} + +func (id DatabaseAccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DatabaseAccountID parses a DatabaseAccount ID into an DatabaseAccountId struct +func DatabaseAccountID(input string) (*DatabaseAccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabaseAccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/cosmos/parse/database_account_test.go b/azurerm/internal/services/cosmos/parse/database_account_test.go new file mode 100644 index 000000000000..49949ab09cae --- /dev/null +++ b/azurerm/internal/services/cosmos/parse/database_account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabaseAccountId{} + +func TestDatabaseAccountIDFormatter(t *testing.T) { + actual := NewDatabaseAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabaseAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabaseAccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1", + Expected: &DatabaseAccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "acc1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabaseAccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/cosmos/parse/gremlin_database.go b/azurerm/internal/services/cosmos/parse/gremlin_database.go index 843510a7c7bd..97ac72ed49e6 100644 --- a/azurerm/internal/services/cosmos/parse/gremlin_database.go +++ b/azurerm/internal/services/cosmos/parse/gremlin_database.go @@ -1,34 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type GremlinDatabaseId struct { - ResourceGroup string - Account string - Name string + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + Name string +} + +func NewGremlinDatabaseID(subscriptionId, resourceGroup, databaseAccountName, name string) GremlinDatabaseId { + return GremlinDatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + Name: name, + } +} + +func (id GremlinDatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Gremlin Database", segmentsStr) +} + +func (id GremlinDatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/gremlinDatabases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.Name) } +// GremlinDatabaseID parses a GremlinDatabase ID into an GremlinDatabaseId struct func GremlinDatabaseID(input string) (*GremlinDatabaseId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Gremlin Database ID %q: %+v", input, err) + return nil, err + } + + resourceId := GremlinDatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - gremlinDatabase := GremlinDatabaseId{ - ResourceGroup: id.ResourceGroup, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if gremlinDatabase.Account, err = id.PopSegment("databaseAccounts"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("gremlinDatabases"); err != nil { return nil, err } - if gremlinDatabase.Name, err = id.PopSegment("gremlinDatabases"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &gremlinDatabase, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/gremlin_database_test.go b/azurerm/internal/services/cosmos/parse/gremlin_database_test.go index 779ee7945b22..7ababdc268c8 100644 --- a/azurerm/internal/services/cosmos/parse/gremlin_database_test.go +++ b/azurerm/internal/services/cosmos/parse/gremlin_database_test.go @@ -1,65 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = GremlinDatabaseId{} + +func TestGremlinDatabaseIDFormatter(t *testing.T) { + actual := NewGremlinDatabaseID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "database1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestGremlinDatabaseID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *GremlinDatabaseId + Input string + Error bool + Expected *GremlinDatabaseId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", Error: true, }, + { - Name: "Missing Gremlin Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", Error: true, }, + { - Name: "Gremlin Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1", - Error: false, - Expect: &GremlinDatabaseId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Name: "database1", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1", + Expected: &GremlinDatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + Name: "database1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/GremlinDatabases/database1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/GREMLINDATABASES/DATABASE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := GremlinDatabaseID(v.Input) if err != nil { @@ -67,19 +106,23 @@ func TestGremlinDatabaseID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/cosmos/parse/gremlin_graph.go b/azurerm/internal/services/cosmos/parse/gremlin_graph.go index 88c78982634e..983e73d4d766 100644 --- a/azurerm/internal/services/cosmos/parse/gremlin_graph.go +++ b/azurerm/internal/services/cosmos/parse/gremlin_graph.go @@ -1,39 +1,81 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type GremlinGraphId struct { - ResourceGroup string - Account string - Database string - Name string + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + GremlinDatabaseName string + GraphName string +} + +func NewGremlinGraphID(subscriptionId, resourceGroup, databaseAccountName, gremlinDatabaseName, graphName string) GremlinGraphId { + return GremlinGraphId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + GremlinDatabaseName: gremlinDatabaseName, + GraphName: graphName, + } +} + +func (id GremlinGraphId) String() string { + segments := []string{ + fmt.Sprintf("Graph Name %q", id.GraphName), + fmt.Sprintf("Gremlin Database Name %q", id.GremlinDatabaseName), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Gremlin Graph", segmentsStr) +} + +func (id GremlinGraphId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/gremlinDatabases/%s/graphs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.GremlinDatabaseName, id.GraphName) } +// GremlinGraphID parses a GremlinGraph ID into an GremlinGraphId struct func GremlinGraphID(input string) (*GremlinGraphId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Gremlin Graph ID %q: %+v", input, err) + return nil, err } - gremlinGraph := GremlinGraphId{ - ResourceGroup: id.ResourceGroup, + resourceId := GremlinGraphId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if gremlinGraph.Account, err = id.PopSegment("databaseAccounts"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if gremlinGraph.Database, err = id.PopSegment("gremlinDatabases"); err != nil { + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.GremlinDatabaseName, err = id.PopSegment("gremlinDatabases"); err != nil { + return nil, err + } + if resourceId.GraphName, err = id.PopSegment("graphs"); err != nil { return nil, err } - if gremlinGraph.Name, err = id.PopSegment("graphs"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &gremlinGraph, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/gremlin_graph_test.go b/azurerm/internal/services/cosmos/parse/gremlin_graph_test.go index 24fb3fc360fe..690b89dbd81f 100644 --- a/azurerm/internal/services/cosmos/parse/gremlin_graph_test.go +++ b/azurerm/internal/services/cosmos/parse/gremlin_graph_test.go @@ -1,71 +1,117 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = GremlinGraphId{} + +func TestGremlinGraphIDFormatter(t *testing.T) { + actual := NewGremlinGraphID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "database1", "graph1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/graph1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestGremlinGraphID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *GremlinGraphId + Input string + Error bool + Expected *GremlinGraphId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", Error: true, }, + + { + // missing GremlinDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Error: true, + }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for GremlinDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/", Error: true, }, + { - Name: "Missing Gremlin Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/", + // missing GraphName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/", Error: true, }, + { - Name: "Missing Gremlin Graph Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/graphs", + // missing value for GraphName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/", Error: true, }, + { - Name: "Gremlin Graph ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/graph1", - Error: false, - Expect: &GremlinGraphId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Database: "database1", - Name: "graph1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/graph1", + Expected: &GremlinGraphId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + GremlinDatabaseName: "database1", + GraphName: "graph1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/Graphs/graph1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/GREMLINDATABASES/DATABASE1/GRAPHS/GRAPH1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := GremlinGraphID(v.Input) if err != nil { @@ -73,23 +119,26 @@ func TestGremlinGraphID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Database != v.Expect.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expect.Database, actual.Database) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.GremlinDatabaseName != v.Expected.GremlinDatabaseName { + t.Fatalf("Expected %q but got %q for GremlinDatabaseName", v.Expected.GremlinDatabaseName, actual.GremlinDatabaseName) + } + if actual.GraphName != v.Expected.GraphName { + t.Fatalf("Expected %q but got %q for GraphName", v.Expected.GraphName, actual.GraphName) } } } diff --git a/azurerm/internal/services/cosmos/parse/mongo_collection.go b/azurerm/internal/services/cosmos/parse/mongo_collection.go deleted file mode 100644 index 03e5b5b8c403..000000000000 --- a/azurerm/internal/services/cosmos/parse/mongo_collection.go +++ /dev/null @@ -1,39 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MongoDbCollectionId struct { - ResourceGroup string - Account string - Database string - Name string -} - -func MongoDbCollectionID(input string) (*MongoDbCollectionId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse MongoDb Collection ID %q: %+v", input, err) - } - - mongodbCollection := MongoDbCollectionId{ - ResourceGroup: id.ResourceGroup, - } - - if mongodbCollection.Account, err = id.PopSegment("databaseAccounts"); err != nil { - return nil, err - } - - if mongodbCollection.Database, err = id.PopSegment("mongodbDatabases"); err != nil { - return nil, err - } - - if mongodbCollection.Name, err = id.PopSegment("collections"); err != nil { - return nil, err - } - - return &mongodbCollection, nil -} diff --git a/azurerm/internal/services/cosmos/parse/mongo_collection_test.go b/azurerm/internal/services/cosmos/parse/mongo_collection_test.go deleted file mode 100644 index d794009b988d..000000000000 --- a/azurerm/internal/services/cosmos/parse/mongo_collection_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestMongoDbCollectionId(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *MongoDbCollectionId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Error: true, - }, - { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", - Error: true, - }, - { - Name: "Missing MongoDB Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/", - Error: true, - }, - { - Name: "Missing Collection Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/", - Error: true, - }, - { - Name: "MongoDB Collection ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/coll1", - Error: false, - Expect: &MongoDbCollectionId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Database: "db1", - Name: "coll1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/MongodbDatabases/db1/Collections/coll1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MongoDbCollectionID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - - if actual.Database != v.Expect.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expect.Database, actual.Database) - } - - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/cosmos/parse/mongo_database.go b/azurerm/internal/services/cosmos/parse/mongo_database.go deleted file mode 100644 index b046f935b526..000000000000 --- a/azurerm/internal/services/cosmos/parse/mongo_database.go +++ /dev/null @@ -1,34 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MongoDbDatabaseId struct { - ResourceGroup string - Account string - Name string -} - -func MongoDbDatabaseID(input string) (*MongoDbDatabaseId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse MongoDb Database ID %q: %+v", input, err) - } - - mongodbDatabase := MongoDbDatabaseId{ - ResourceGroup: id.ResourceGroup, - } - - if mongodbDatabase.Account, err = id.PopSegment("databaseAccounts"); err != nil { - return nil, err - } - - if mongodbDatabase.Name, err = id.PopSegment("mongodbDatabases"); err != nil { - return nil, err - } - - return &mongodbDatabase, nil -} diff --git a/azurerm/internal/services/cosmos/parse/mongo_database_test.go b/azurerm/internal/services/cosmos/parse/mongo_database_test.go deleted file mode 100644 index 51807008ce9d..000000000000 --- a/azurerm/internal/services/cosmos/parse/mongo_database_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestMongoDbDatabaseID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *MongoDbDatabaseId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Error: true, - }, - { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", - Error: true, - }, - { - Name: "Missing MongoDB Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/", - Error: true, - }, - { - Name: "MongoDB Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1", - Error: false, - Expect: &MongoDbDatabaseId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Name: "db1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/MongodbDatabases/db1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MongoDbDatabaseID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/cosmos/parse/mongodb_collection.go b/azurerm/internal/services/cosmos/parse/mongodb_collection.go new file mode 100644 index 000000000000..9551b431bcb9 --- /dev/null +++ b/azurerm/internal/services/cosmos/parse/mongodb_collection.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type MongodbCollectionId struct { + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + MongodbDatabaseName string + CollectionName string +} + +func NewMongodbCollectionID(subscriptionId, resourceGroup, databaseAccountName, mongodbDatabaseName, collectionName string) MongodbCollectionId { + return MongodbCollectionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + MongodbDatabaseName: mongodbDatabaseName, + CollectionName: collectionName, + } +} + +func (id MongodbCollectionId) String() string { + segments := []string{ + fmt.Sprintf("Collection Name %q", id.CollectionName), + fmt.Sprintf("Mongodb Database Name %q", id.MongodbDatabaseName), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Mongodb Collection", segmentsStr) +} + +func (id MongodbCollectionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/mongodbDatabases/%s/collections/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.MongodbDatabaseName, id.CollectionName) +} + +// MongodbCollectionID parses a MongodbCollection ID into an MongodbCollectionId struct +func MongodbCollectionID(input string) (*MongodbCollectionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := MongodbCollectionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.MongodbDatabaseName, err = id.PopSegment("mongodbDatabases"); err != nil { + return nil, err + } + if resourceId.CollectionName, err = id.PopSegment("collections"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/cosmos/parse/mongodb_collection_test.go b/azurerm/internal/services/cosmos/parse/mongodb_collection_test.go new file mode 100644 index 000000000000..e38dc6232b04 --- /dev/null +++ b/azurerm/internal/services/cosmos/parse/mongodb_collection_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = MongodbCollectionId{} + +func TestMongodbCollectionIDFormatter(t *testing.T) { + actual := NewMongodbCollectionID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "db1", "coll1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/coll1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestMongodbCollectionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *MongodbCollectionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Error: true, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Error: true, + }, + + { + // missing MongodbDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Error: true, + }, + + { + // missing value for MongodbDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/", + Error: true, + }, + + { + // missing CollectionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/", + Error: true, + }, + + { + // missing value for CollectionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/coll1", + Expected: &MongodbCollectionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + MongodbDatabaseName: "db1", + CollectionName: "coll1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/MONGODBDATABASES/DB1/COLLECTIONS/COLL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := MongodbCollectionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.MongodbDatabaseName != v.Expected.MongodbDatabaseName { + t.Fatalf("Expected %q but got %q for MongodbDatabaseName", v.Expected.MongodbDatabaseName, actual.MongodbDatabaseName) + } + if actual.CollectionName != v.Expected.CollectionName { + t.Fatalf("Expected %q but got %q for CollectionName", v.Expected.CollectionName, actual.CollectionName) + } + } +} diff --git a/azurerm/internal/services/cosmos/parse/mongodb_database.go b/azurerm/internal/services/cosmos/parse/mongodb_database.go new file mode 100644 index 000000000000..12ae894dcda1 --- /dev/null +++ b/azurerm/internal/services/cosmos/parse/mongodb_database.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type MongodbDatabaseId struct { + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + Name string +} + +func NewMongodbDatabaseID(subscriptionId, resourceGroup, databaseAccountName, name string) MongodbDatabaseId { + return MongodbDatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + Name: name, + } +} + +func (id MongodbDatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Mongodb Database", segmentsStr) +} + +func (id MongodbDatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/mongodbDatabases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.Name) +} + +// MongodbDatabaseID parses a MongodbDatabase ID into an MongodbDatabaseId struct +func MongodbDatabaseID(input string) (*MongodbDatabaseId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := MongodbDatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("mongodbDatabases"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/cosmos/parse/mongodb_database_test.go b/azurerm/internal/services/cosmos/parse/mongodb_database_test.go new file mode 100644 index 000000000000..d382b5918e9a --- /dev/null +++ b/azurerm/internal/services/cosmos/parse/mongodb_database_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = MongodbDatabaseId{} + +func TestMongodbDatabaseIDFormatter(t *testing.T) { + actual := NewMongodbDatabaseID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "db1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestMongodbDatabaseID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *MongodbDatabaseId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Error: true, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1", + Expected: &MongodbDatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + Name: "db1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/MONGODBDATABASES/DB1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := MongodbDatabaseID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/cosmos/parse/sql_container.go b/azurerm/internal/services/cosmos/parse/sql_container.go index aa353e37582c..03d23d745d4b 100644 --- a/azurerm/internal/services/cosmos/parse/sql_container.go +++ b/azurerm/internal/services/cosmos/parse/sql_container.go @@ -1,39 +1,81 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SqlContainerId struct { - ResourceGroup string - Account string - Database string - Name string + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + SqlDatabaseName string + ContainerName string +} + +func NewSqlContainerID(subscriptionId, resourceGroup, databaseAccountName, sqlDatabaseName, containerName string) SqlContainerId { + return SqlContainerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + SqlDatabaseName: sqlDatabaseName, + ContainerName: containerName, + } +} + +func (id SqlContainerId) String() string { + segments := []string{ + fmt.Sprintf("Container Name %q", id.ContainerName), + fmt.Sprintf("Sql Database Name %q", id.SqlDatabaseName), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Sql Container", segmentsStr) +} + +func (id SqlContainerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/sqlDatabases/%s/containers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName) } +// SqlContainerID parses a SqlContainer ID into an SqlContainerId struct func SqlContainerID(input string) (*SqlContainerId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse SQL Container ID %q: %+v", input, err) + return nil, err } - sqlContainer := SqlContainerId{ - ResourceGroup: id.ResourceGroup, + resourceId := SqlContainerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if sqlContainer.Account, err = id.PopSegment("databaseAccounts"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if sqlContainer.Database, err = id.PopSegment("sqlDatabases"); err != nil { + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.SqlDatabaseName, err = id.PopSegment("sqlDatabases"); err != nil { + return nil, err + } + if resourceId.ContainerName, err = id.PopSegment("containers"); err != nil { return nil, err } - if sqlContainer.Name, err = id.PopSegment("containers"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &sqlContainer, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/sql_container_test.go b/azurerm/internal/services/cosmos/parse/sql_container_test.go index 958b64cfbb52..d14fb4be9b73 100644 --- a/azurerm/internal/services/cosmos/parse/sql_container_test.go +++ b/azurerm/internal/services/cosmos/parse/sql_container_test.go @@ -1,71 +1,117 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = SqlContainerId{} + +func TestSqlContainerIDFormatter(t *testing.T) { + actual := NewSqlContainerID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "db1", "container1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestSqlContainerID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *SqlContainerId + Input string + Error bool + Expected *SqlContainerId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", Error: true, }, + + { + // missing SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Error: true, + }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", Error: true, }, + { - Name: "Missing SQL Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + // missing ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/", Error: true, }, + { - Name: "Missing SQL Container Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/", + // missing value for ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/", Error: true, }, + { - Name: "SQL Container ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1", - Error: false, - Expect: &SqlContainerId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Database: "db1", - Name: "container1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1", + Expected: &SqlContainerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + SqlDatabaseName: "db1", + ContainerName: "container1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/Containers/container1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/SQLDATABASES/DB1/CONTAINERS/CONTAINER1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SqlContainerID(v.Input) if err != nil { @@ -73,23 +119,26 @@ func TestSqlContainerID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Database != v.Expect.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expect.Database, actual.Database) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.SqlDatabaseName != v.Expected.SqlDatabaseName { + t.Fatalf("Expected %q but got %q for SqlDatabaseName", v.Expected.SqlDatabaseName, actual.SqlDatabaseName) + } + if actual.ContainerName != v.Expected.ContainerName { + t.Fatalf("Expected %q but got %q for ContainerName", v.Expected.ContainerName, actual.ContainerName) } } } diff --git a/azurerm/internal/services/cosmos/parse/sql_database.go b/azurerm/internal/services/cosmos/parse/sql_database.go index 8fa75b5e6fce..679334007931 100644 --- a/azurerm/internal/services/cosmos/parse/sql_database.go +++ b/azurerm/internal/services/cosmos/parse/sql_database.go @@ -1,34 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SqlDatabaseId struct { - ResourceGroup string - Account string - Name string + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + Name string +} + +func NewSqlDatabaseID(subscriptionId, resourceGroup, databaseAccountName, name string) SqlDatabaseId { + return SqlDatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + Name: name, + } +} + +func (id SqlDatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Sql Database", segmentsStr) +} + +func (id SqlDatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/sqlDatabases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.Name) } +// SqlDatabaseID parses a SqlDatabase ID into an SqlDatabaseId struct func SqlDatabaseID(input string) (*SqlDatabaseId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse SQL Database ID %q: %+v", input, err) + return nil, err + } + + resourceId := SqlDatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - sqlDatabase := SqlDatabaseId{ - ResourceGroup: id.ResourceGroup, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if sqlDatabase.Account, err = id.PopSegment("databaseAccounts"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("sqlDatabases"); err != nil { return nil, err } - if sqlDatabase.Name, err = id.PopSegment("sqlDatabases"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &sqlDatabase, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/sql_database_test.go b/azurerm/internal/services/cosmos/parse/sql_database_test.go index 808d63b0a1df..59edf72bb891 100644 --- a/azurerm/internal/services/cosmos/parse/sql_database_test.go +++ b/azurerm/internal/services/cosmos/parse/sql_database_test.go @@ -1,65 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = SqlDatabaseId{} + +func TestSqlDatabaseIDFormatter(t *testing.T) { + actual := NewSqlDatabaseID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "db1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestSqlDatabaseID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *SqlDatabaseId + Input string + Error bool + Expected *SqlDatabaseId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", Error: true, }, + { - Name: "Missing SQL Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", Error: true, }, + { - Name: "SQL Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1", - Error: false, - Expect: &SqlDatabaseId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Name: "db1", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1", + Expected: &SqlDatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + Name: "db1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/SqlDatabases/db1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/SQLDATABASES/DB1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SqlDatabaseID(v.Input) if err != nil { @@ -67,19 +106,23 @@ func TestSqlDatabaseID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/cosmos/parse/sql_stored_procedure.go b/azurerm/internal/services/cosmos/parse/sql_stored_procedure.go index 7e4a45999db1..1128abe3dbf6 100644 --- a/azurerm/internal/services/cosmos/parse/sql_stored_procedure.go +++ b/azurerm/internal/services/cosmos/parse/sql_stored_procedure.go @@ -1,44 +1,87 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) -type StoredProcedureId struct { - ResourceGroup string - Account string - Database string - Container string - Name string +type SqlStoredProcedureId struct { + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + SqlDatabaseName string + ContainerName string + StoredProcedureName string +} + +func NewSqlStoredProcedureID(subscriptionId, resourceGroup, databaseAccountName, sqlDatabaseName, containerName, storedProcedureName string) SqlStoredProcedureId { + return SqlStoredProcedureId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + SqlDatabaseName: sqlDatabaseName, + ContainerName: containerName, + StoredProcedureName: storedProcedureName, + } +} + +func (id SqlStoredProcedureId) String() string { + segments := []string{ + fmt.Sprintf("Stored Procedure Name %q", id.StoredProcedureName), + fmt.Sprintf("Container Name %q", id.ContainerName), + fmt.Sprintf("Sql Database Name %q", id.SqlDatabaseName), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Sql Stored Procedure", segmentsStr) +} + +func (id SqlStoredProcedureId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/sqlDatabases/%s/containers/%s/storedProcedures/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.SqlDatabaseName, id.ContainerName, id.StoredProcedureName) } -func StoredProcedureID(input string) (*StoredProcedureId, error) { +// SqlStoredProcedureID parses a SqlStoredProcedure ID into an SqlStoredProcedureId struct +func SqlStoredProcedureID(input string) (*SqlStoredProcedureId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Stored Procedure ID %q: %+v", input, err) + return nil, err } - storedProcedure := StoredProcedureId{ - ResourceGroup: id.ResourceGroup, + resourceId := SqlStoredProcedureId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if storedProcedure.Account, err = id.PopSegment("databaseAccounts"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if storedProcedure.Database, err = id.PopSegment("sqlDatabases"); err != nil { - return nil, err + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if storedProcedure.Container, err = id.PopSegment("containers"); err != nil { + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.SqlDatabaseName, err = id.PopSegment("sqlDatabases"); err != nil { + return nil, err + } + if resourceId.ContainerName, err = id.PopSegment("containers"); err != nil { + return nil, err + } + if resourceId.StoredProcedureName, err = id.PopSegment("storedProcedures"); err != nil { return nil, err } - if storedProcedure.Name, err = id.PopSegment("storedProcedures"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &storedProcedure, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/sql_stored_procedure_test.go b/azurerm/internal/services/cosmos/parse/sql_stored_procedure_test.go index dbb0723badc5..4ca9d167a1b6 100644 --- a/azurerm/internal/services/cosmos/parse/sql_stored_procedure_test.go +++ b/azurerm/internal/services/cosmos/parse/sql_stored_procedure_test.go @@ -1,105 +1,160 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) -func TestStoredProcedureID(t *testing.T) { +var _ resourceid.Formatter = SqlStoredProcedureId{} + +func TestSqlStoredProcedureIDFormatter(t *testing.T) { + actual := NewSqlStoredProcedureID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "db1", "container1", "sproc1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/sproc1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSqlStoredProcedureID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *StoredProcedureId + Input string + Error bool + Expected *SqlStoredProcedureId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", Error: true, }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", Error: true, }, + { - Name: "Missing SQL Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + // missing ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/", Error: true, }, + { - Name: "Missing Container Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/", + // missing value for ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/", Error: true, }, + { - Name: "Missing Stored Procedure Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/", + // missing StoredProcedureName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/", Error: true, }, + { - Name: "Stored Procedure ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/sproc1", - Error: false, - Expect: &StoredProcedureId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Database: "db1", - Container: "container1", - Name: "sproc1", + // missing value for StoredProcedureName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/sproc1", + Expected: &SqlStoredProcedureId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + SqlDatabaseName: "db1", + ContainerName: "container1", + StoredProcedureName: "sproc1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/StoredProcedures/sproc1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/SQLDATABASES/DB1/CONTAINERS/CONTAINER1/STOREDPROCEDURES/SPROC1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) - actual, err := StoredProcedureID(v.Input) + actual, err := SqlStoredProcedureID(v.Input) if err != nil { if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Container != v.Expect.Container { - t.Fatalf("Expected %q but got %q for Container", v.Expect.Container, actual.Container) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.Database != v.Expect.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expect.Database, actual.Database) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SqlDatabaseName != v.Expected.SqlDatabaseName { + t.Fatalf("Expected %q but got %q for SqlDatabaseName", v.Expected.SqlDatabaseName, actual.SqlDatabaseName) + } + if actual.ContainerName != v.Expected.ContainerName { + t.Fatalf("Expected %q but got %q for ContainerName", v.Expected.ContainerName, actual.ContainerName) + } + if actual.StoredProcedureName != v.Expected.StoredProcedureName { + t.Fatalf("Expected %q but got %q for StoredProcedureName", v.Expected.StoredProcedureName, actual.StoredProcedureName) } } } diff --git a/azurerm/internal/services/cosmos/parse/table.go b/azurerm/internal/services/cosmos/parse/table.go index c58e728b3740..17b726188f85 100644 --- a/azurerm/internal/services/cosmos/parse/table.go +++ b/azurerm/internal/services/cosmos/parse/table.go @@ -1,34 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type TableId struct { - ResourceGroup string - Account string - Name string + SubscriptionId string + ResourceGroup string + DatabaseAccountName string + Name string +} + +func NewTableID(subscriptionId, resourceGroup, databaseAccountName, name string) TableId { + return TableId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DatabaseAccountName: databaseAccountName, + Name: name, + } +} + +func (id TableId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Database Account Name %q", id.DatabaseAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Table", segmentsStr) +} + +func (id TableId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DocumentDB/databaseAccounts/%s/tables/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DatabaseAccountName, id.Name) } +// TableID parses a Table ID into an TableId struct func TableID(input string) (*TableId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Table ID %q: %+v", input, err) + return nil, err + } + + resourceId := TableId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - table := TableId{ - ResourceGroup: id.ResourceGroup, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if table.Account, err = id.PopSegment("databaseAccounts"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DatabaseAccountName, err = id.PopSegment("databaseAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("tables"); err != nil { return nil, err } - if table.Name, err = id.PopSegment("tables"); err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &table, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/cosmos/parse/table_test.go b/azurerm/internal/services/cosmos/parse/table_test.go index 443057be11b2..5c9ee185615d 100644 --- a/azurerm/internal/services/cosmos/parse/table_test.go +++ b/azurerm/internal/services/cosmos/parse/table_test.go @@ -1,75 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = TableId{} + +func TestTableIDFormatter(t *testing.T) { + actual := NewTableID("12345678-1234-9876-4563-123456789012", "resGroup1", "acc1", "table1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/table1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestTableID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *TableId + Input string + Error bool + Expected *TableId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Database Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing Table Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/", + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", Error: true, }, + { - Name: "Table ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/table1", - Error: false, - Expect: &TableId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Name: "table1", - }, + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Error: true, }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/", + Error: true, + }, + { - Name: "Existing 2015-04-08 SDK Table ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/apis/table/tables/table1", - Error: false, - Expect: &TableId{ - ResourceGroup: "resGroup1", - Account: "acc1", - Name: "table1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/table1", + Expected: &TableId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DatabaseAccountName: "acc1", + Name: "table1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/Tables/table1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/TABLES/TABLE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := TableID(v.Input) if err != nil { @@ -77,19 +106,23 @@ func TestTableID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Account != v.Expect.Account { - t.Fatalf("Expected %q but got %q for Account", v.Expect.Account, actual.Account) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DatabaseAccountName != v.Expected.DatabaseAccountName { + t.Fatalf("Expected %q but got %q for DatabaseAccountName", v.Expected.DatabaseAccountName, actual.DatabaseAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/cosmos/registration.go b/azurerm/internal/services/cosmos/registration.go index 7cd09cad3646..0d0ad4f2a2c5 100644 --- a/azurerm/internal/services/cosmos/registration.go +++ b/azurerm/internal/services/cosmos/registration.go @@ -21,22 +21,22 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_cosmosdb_account": dataSourceArmCosmosDbAccount(), + "azurerm_cosmosdb_account": dataSourceCosmosDbAccount(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_cosmosdb_account": resourceArmCosmosDbAccount(), - "azurerm_cosmosdb_cassandra_keyspace": resourceArmCosmosDbCassandraKeyspace(), - "azurerm_cosmosdb_gremlin_database": resourceArmCosmosGremlinDatabase(), - "azurerm_cosmosdb_gremlin_graph": resourceArmCosmosDbGremlinGraph(), - "azurerm_cosmosdb_mongo_collection": resourceArmCosmosDbMongoCollection(), - "azurerm_cosmosdb_mongo_database": resourceArmCosmosDbMongoDatabase(), - "azurerm_cosmosdb_sql_container": resourceArmCosmosDbSQLContainer(), - "azurerm_cosmosdb_sql_database": resourceArmCosmosDbSQLDatabase(), - "azurerm_cosmosdb_sql_stored_procedure": resourceArmCosmosDbSQLStoredProcedure(), - "azurerm_cosmosdb_table": resourceArmCosmosDbTable(), + "azurerm_cosmosdb_account": resourceCosmosDbAccount(), + "azurerm_cosmosdb_cassandra_keyspace": resourceCosmosDbCassandraKeyspace(), + "azurerm_cosmosdb_gremlin_database": resourceCosmosGremlinDatabase(), + "azurerm_cosmosdb_gremlin_graph": resourceCosmosDbGremlinGraph(), + "azurerm_cosmosdb_mongo_collection": resourceCosmosDbMongoCollection(), + "azurerm_cosmosdb_mongo_database": resourceCosmosDbMongoDatabase(), + "azurerm_cosmosdb_sql_container": resourceCosmosDbSQLContainer(), + "azurerm_cosmosdb_sql_database": resourceCosmosDbSQLDatabase(), + "azurerm_cosmosdb_sql_stored_procedure": resourceCosmosDbSQLStoredProcedure(), + "azurerm_cosmosdb_table": resourceCosmosDbTable(), } } diff --git a/azurerm/internal/services/cosmos/resourceids.go b/azurerm/internal/services/cosmos/resourceids.go new file mode 100644 index 000000000000..c2e160d37212 --- /dev/null +++ b/azurerm/internal/services/cosmos/resourceids.go @@ -0,0 +1,12 @@ +package cosmos + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=CassandraKeyspace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/keyspace1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DatabaseAccount -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=GremlinDatabase -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=GremlinGraph -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/graph1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=MongodbCollection -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/coll1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=MongodbDatabase -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SqlContainer -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SqlDatabase -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SqlStoredProcedure -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/sproc1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Table -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/table1 diff --git a/azurerm/internal/services/cosmos/validate/cassandra_keyspace_id.go b/azurerm/internal/services/cosmos/validate/cassandra_keyspace_id.go new file mode 100644 index 000000000000..c18f2b899dd5 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/cassandra_keyspace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func CassandraKeyspaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CassandraKeyspaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/cassandra_keyspace_id_test.go b/azurerm/internal/services/cosmos/validate/cassandra_keyspace_id_test.go new file mode 100644 index 000000000000..6f17280c248e --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/cassandra_keyspace_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCassandraKeyspaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/cassandraKeyspaces/keyspace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/CASSANDRAKEYSPACES/KEYSPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CassandraKeyspaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/database_account_id.go b/azurerm/internal/services/cosmos/validate/database_account_id.go new file mode 100644 index 000000000000..67f7e2f6322f --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/database_account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func DatabaseAccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabaseAccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/database_account_id_test.go b/azurerm/internal/services/cosmos/validate/database_account_id_test.go new file mode 100644 index 000000000000..2b0d049c77c5 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/database_account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabaseAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabaseAccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/gremlin_database_id.go b/azurerm/internal/services/cosmos/validate/gremlin_database_id.go new file mode 100644 index 000000000000..47a45f8ec201 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/gremlin_database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func GremlinDatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.GremlinDatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/gremlin_database_id_test.go b/azurerm/internal/services/cosmos/validate/gremlin_database_id_test.go new file mode 100644 index 000000000000..82452e3b9755 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/gremlin_database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestGremlinDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/GREMLINDATABASES/DATABASE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := GremlinDatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/gremlin_graph_id.go b/azurerm/internal/services/cosmos/validate/gremlin_graph_id.go new file mode 100644 index 000000000000..65240b8be9cb --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/gremlin_graph_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func GremlinGraphID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.GremlinGraphID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/gremlin_graph_id_test.go b/azurerm/internal/services/cosmos/validate/gremlin_graph_id_test.go new file mode 100644 index 000000000000..3ff10780047f --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/gremlin_graph_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestGremlinGraphID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing GremlinDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for GremlinDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/", + Valid: false, + }, + + { + // missing GraphName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/", + Valid: false, + }, + + { + // missing value for GraphName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/gremlinDatabases/database1/graphs/graph1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/GREMLINDATABASES/DATABASE1/GRAPHS/GRAPH1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := GremlinGraphID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/mongodb_collection_id.go b/azurerm/internal/services/cosmos/validate/mongodb_collection_id.go new file mode 100644 index 000000000000..f1fc63f3cff4 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/mongodb_collection_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func MongodbCollectionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.MongodbCollectionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/mongodb_collection_id_test.go b/azurerm/internal/services/cosmos/validate/mongodb_collection_id_test.go new file mode 100644 index 000000000000..d5033e7e9e5c --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/mongodb_collection_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestMongodbCollectionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing MongodbDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for MongodbDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/", + Valid: false, + }, + + { + // missing CollectionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/", + Valid: false, + }, + + { + // missing value for CollectionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1/collections/coll1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/MONGODBDATABASES/DB1/COLLECTIONS/COLL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := MongodbCollectionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/mongodb_database_id.go b/azurerm/internal/services/cosmos/validate/mongodb_database_id.go new file mode 100644 index 000000000000..92ebe2f51ef9 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/mongodb_database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func MongodbDatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.MongodbDatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/mongodb_database_id_test.go b/azurerm/internal/services/cosmos/validate/mongodb_database_id_test.go new file mode 100644 index 000000000000..dc27263d07bc --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/mongodb_database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestMongodbDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/mongodbDatabases/db1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/MONGODBDATABASES/DB1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := MongodbDatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/sql_container_id.go b/azurerm/internal/services/cosmos/validate/sql_container_id.go new file mode 100644 index 000000000000..25feb31514c7 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/sql_container_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func SqlContainerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SqlContainerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/sql_container_id_test.go b/azurerm/internal/services/cosmos/validate/sql_container_id_test.go new file mode 100644 index 000000000000..76a70f6ff398 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/sql_container_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSqlContainerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + Valid: false, + }, + + { + // missing ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/", + Valid: false, + }, + + { + // missing value for ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/SQLDATABASES/DB1/CONTAINERS/CONTAINER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SqlContainerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/sql_database_id.go b/azurerm/internal/services/cosmos/validate/sql_database_id.go new file mode 100644 index 000000000000..8ee93ed7f04b --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/sql_database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func SqlDatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SqlDatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/sql_database_id_test.go b/azurerm/internal/services/cosmos/validate/sql_database_id_test.go new file mode 100644 index 000000000000..1864eecac4f0 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/sql_database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSqlDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/SQLDATABASES/DB1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SqlDatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/sql_stored_procedure_id.go b/azurerm/internal/services/cosmos/validate/sql_stored_procedure_id.go new file mode 100644 index 000000000000..12ade7f5a58f --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/sql_stored_procedure_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func SqlStoredProcedureID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SqlStoredProcedureID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/sql_stored_procedure_id_test.go b/azurerm/internal/services/cosmos/validate/sql_stored_procedure_id_test.go new file mode 100644 index 000000000000..566db6b2e619 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/sql_stored_procedure_id_test.go @@ -0,0 +1,112 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSqlStoredProcedureID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for SqlDatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/", + Valid: false, + }, + + { + // missing ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/", + Valid: false, + }, + + { + // missing value for ContainerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/", + Valid: false, + }, + + { + // missing StoredProcedureName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/", + Valid: false, + }, + + { + // missing value for StoredProcedureName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/sqlDatabases/db1/containers/container1/storedProcedures/sproc1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/SQLDATABASES/DB1/CONTAINERS/CONTAINER1/STOREDPROCEDURES/SPROC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SqlStoredProcedureID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/cosmos/validate/table_id.go b/azurerm/internal/services/cosmos/validate/table_id.go new file mode 100644 index 000000000000..3f818da2c833 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/table_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse" +) + +func TableID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.TableID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/cosmos/validate/table_id_test.go b/azurerm/internal/services/cosmos/validate/table_id_test.go new file mode 100644 index 000000000000..d969b89f2529 --- /dev/null +++ b/azurerm/internal/services/cosmos/validate/table_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestTableID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/", + Valid: false, + }, + + { + // missing value for DatabaseAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DocumentDB/databaseAccounts/acc1/tables/table1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DOCUMENTDB/DATABASEACCOUNTS/ACC1/TABLES/TABLE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := TableID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource.go b/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource.go index 4c3a170e8aac..e3568df48ac1 100644 --- a/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource.go +++ b/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCostManagementExportResourceGroup() *schema.Resource { +func resourceCostManagementExportResourceGroup() *schema.Resource { return &schema.Resource{ - Create: resourceArmCostManagementExportResourceGroupCreateUpdate, - Read: resourceArmCostManagementExportResourceGroupRead, - Update: resourceArmCostManagementExportResourceGroupCreateUpdate, - Delete: resourceArmCostManagementExportResourceGroupDelete, + Create: resourceCostManagementExportResourceGroupCreateUpdate, + Read: resourceCostManagementExportResourceGroupRead, + Update: resourceCostManagementExportResourceGroupCreateUpdate, + Delete: resourceCostManagementExportResourceGroupDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.CostManagementExportResourceGroupID(id) return err @@ -140,7 +140,7 @@ func resourceArmCostManagementExportResourceGroup() *schema.Resource { } } -func resourceArmCostManagementExportResourceGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCostManagementExportResourceGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).CostManagement.ExportClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -208,10 +208,10 @@ func resourceArmCostManagementExportResourceGroupCreateUpdate(d *schema.Resource d.SetId(id) - return resourceArmCostManagementExportResourceGroupRead(d, meta) + return resourceCostManagementExportResourceGroupRead(d, meta) } -func resourceArmCostManagementExportResourceGroupRead(d *schema.ResourceData, meta interface{}) error { +func resourceCostManagementExportResourceGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).CostManagement.ExportClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -257,7 +257,7 @@ func resourceArmCostManagementExportResourceGroupRead(d *schema.ResourceData, me return nil } -func resourceArmCostManagementExportResourceGroupDelete(d *schema.ResourceData, meta interface{}) error { +func resourceCostManagementExportResourceGroupDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).CostManagement.ExportClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource_test.go b/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource_test.go new file mode 100644 index 000000000000..0d865e668f3a --- /dev/null +++ b/azurerm/internal/services/costmanagement/cost_management_export_resource_group_resource_test.go @@ -0,0 +1,165 @@ +package costmanagement_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/costmanagement/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type CostManagementExportResourceGroupResource struct { +} + +func TestAccCostManagementExportResourceGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cost_management_export_resource_group", "test") + r := CostManagementExportResourceGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCostManagementExportResourceGroup_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_cost_management_export_resource_group", "test") + r := CostManagementExportResourceGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t CostManagementExportResourceGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CostManagementExportResourceGroupID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.CostManagement.ExportClient.Get(ctx, id.ResourceId, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Cost Management Export ResourceGroup %q (resource group: %q) does not exist", id.Name, id.ResourceId) + } + + return utils.Bool(resp.ExportProperties != nil), nil +} + +func (CostManagementExportResourceGroupResource) basic(data acceptance.TestData) string { + start := time.Now().AddDate(0, 1, 0).Format("2006-02") + end := time.Now().AddDate(0, 2, 0).Format("2006-02") + + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cm-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_cost_management_export_resource_group" "test" { + name = "accrg%d" + resource_group_id = azurerm_resource_group.test.id + recurrence_type = "Monthly" + recurrence_period_start = "%s-18T00:00:00Z" + recurrence_period_end = "%s-18T00:00:00Z" + + delivery_info { + storage_account_id = azurerm_storage_account.test.id + container_name = "acctestcontainer" + root_folder_path = "/root" + } + + query { + type = "Usage" + time_frame = "TheLastMonth" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, start, end) +} + +func (CostManagementExportResourceGroupResource) update(data acceptance.TestData) string { + start := time.Now().AddDate(0, 3, 0).Format("2006-02") + end := time.Now().AddDate(0, 4, 0).Format("2006-02") + + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-cm-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_cost_management_export_resource_group" "test" { + name = "accrg%d" + resource_group_id = azurerm_resource_group.test.id + recurrence_type = "Monthly" + recurrence_period_start = "%s-18T00:00:00Z" + recurrence_period_end = "%s-18T00:00:00Z" + + delivery_info { + storage_account_id = azurerm_storage_account.test.id + container_name = "acctestcontainer" + root_folder_path = "/root/updated" + } + + query { + type = "Usage" + time_frame = "WeekToDate" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, start, end) +} diff --git a/azurerm/internal/services/costmanagement/registration.go b/azurerm/internal/services/costmanagement/registration.go index 7c394489234e..1dbf5cc08ec7 100644 --- a/azurerm/internal/services/costmanagement/registration.go +++ b/azurerm/internal/services/costmanagement/registration.go @@ -26,6 +26,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_cost_management_export_resource_group": resourceArmCostManagementExportResourceGroup(), + "azurerm_cost_management_export_resource_group": resourceCostManagementExportResourceGroup(), } } diff --git a/azurerm/internal/services/costmanagement/tests/cost_management_export_resource_group_resource_test.go b/azurerm/internal/services/costmanagement/tests/cost_management_export_resource_group_resource_test.go deleted file mode 100644 index dc90fbda6ff2..000000000000 --- a/azurerm/internal/services/costmanagement/tests/cost_management_export_resource_group_resource_test.go +++ /dev/null @@ -1,204 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/costmanagement/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMCostManagementExportResourceGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cost_management_export_resource_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCostManagementExportResourceGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCostManagementExportResourceGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCostManagementExportResourceGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCostManagementExportResourceGroup_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_cost_management_export_resource_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCostManagementExportResourceGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCostManagementExportResourceGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCostManagementExportResourceGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCostManagementExportResourceGroup_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCostManagementExportResourceGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCostManagementExportResourceGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCostManagementExportResourceGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMCostManagementExportResourceGroupExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).CostManagement.ExportClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.CostManagementExportResourceGroupID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceId, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on costManagementExportResourceGroupClient: %+v", err) - } - - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Cost Management Export ResourceGroup %q (resource group: %q) does not exist", id.Name, id.ResourceId) - } - - return nil - } -} - -func testCheckAzureRMCostManagementExportResourceGroupDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).CostManagement.ExportClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_cost_management_export_resource_group" { - continue - } - - id, err := parse.CostManagementExportResourceGroupID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceId, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Cost Management Export ResourceGroup still exists: %q", id.Name) - } - } - - return nil -} - -func testAccAzureRMCostManagementExportResourceGroup_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cm-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_cost_management_export_resource_group" "test" { - name = "accrg%d" - resource_group_id = azurerm_resource_group.test.id - recurrence_type = "Monthly" - recurrence_period_start = "2020-06-18T00:00:00Z" - recurrence_period_end = "2020-07-18T00:00:00Z" - - delivery_info { - storage_account_id = azurerm_storage_account.test.id - container_name = "acctestcontainer" - root_folder_path = "/root" - } - - query { - type = "Usage" - time_frame = "TheLastMonth" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMCostManagementExportResourceGroup_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-cm-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_cost_management_export_resource_group" "test" { - name = "accrg%d" - resource_group_id = azurerm_resource_group.test.id - recurrence_type = "Monthly" - recurrence_period_start = "2020-08-18T00:00:00Z" - recurrence_period_end = "2020-09-18T00:00:00Z" - - delivery_info { - storage_account_id = azurerm_storage_account.test.id - container_name = "acctestcontainer" - root_folder_path = "/root/updated" - } - - query { - type = "Usage" - time_frame = "WeekToDate" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} diff --git a/azurerm/internal/services/customproviders/custom_provider_resource.go b/azurerm/internal/services/customproviders/custom_provider_resource.go index 53979c213176..7b8b3f4ef423 100644 --- a/azurerm/internal/services/customproviders/custom_provider_resource.go +++ b/azurerm/internal/services/customproviders/custom_provider_resource.go @@ -18,14 +18,14 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmCustomProvider() *schema.Resource { +func resourceCustomProvider() *schema.Resource { return &schema.Resource{ - Create: resourceArmCustomProviderCreateUpdate, - Read: resourceArmCustomProviderRead, - Update: resourceArmCustomProviderCreateUpdate, - Delete: resourceArmCustomProviderDelete, + Create: resourceCustomProviderCreateUpdate, + Read: resourceCustomProviderRead, + Update: resourceCustomProviderCreateUpdate, + Delete: resourceCustomProviderDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.CustomProviderID(id) + _, err := parse.ResourceProviderID(id) return err }), @@ -116,7 +116,7 @@ func resourceArmCustomProvider() *schema.Resource { } } -func resourceArmCustomProviderCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceCustomProviderCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).CustomProviders.CustomProviderClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -167,15 +167,15 @@ func resourceArmCustomProviderCreateUpdate(d *schema.ResourceData, meta interfac } d.SetId(*resp.ID) - return resourceArmCustomProviderRead(d, meta) + return resourceCustomProviderRead(d, meta) } -func resourceArmCustomProviderRead(d *schema.ResourceData, meta interface{}) error { +func resourceCustomProviderRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).CustomProviders.CustomProviderClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CustomProviderID(d.Id()) + id, err := parse.ResourceProviderID(d.Id()) if err != nil { return err } @@ -210,12 +210,13 @@ func resourceArmCustomProviderRead(d *schema.ResourceData, meta interface{}) err return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmCustomProviderDelete(d *schema.ResourceData, meta interface{}) error { + +func resourceCustomProviderDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).CustomProviders.CustomProviderClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.CustomProviderID(d.Id()) + id, err := parse.ResourceProviderID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/customproviders/custom_provider_resource_test.go b/azurerm/internal/services/customproviders/custom_provider_resource_test.go new file mode 100644 index 000000000000..8c17d650b9f9 --- /dev/null +++ b/azurerm/internal/services/customproviders/custom_provider_resource_test.go @@ -0,0 +1,203 @@ +package customproviders_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/customproviders/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type CustomProviderResource struct{} + +func TestAccCustomProvider_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_custom_provider", "test") + r := CustomProviderResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCustomProvider_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_custom_provider", "test") + r := CustomProviderResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccCustomProvider_action(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_custom_provider", "test") + r := CustomProviderResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.action(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.actionUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.action(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r CustomProviderResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ResourceProviderID(state.ID) + if err != nil { + return nil, err + } + resp, err := client.CustomProviders.CustomProviderClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Custom Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(true), nil +} + +func (r CustomProviderResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-cp-%d" + location = "%s" +} +resource "azurerm_custom_provider" "test" { + name = "accTEst_saa%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + resource_type { + name = "dEf1" + endpoint = "https://testendpoint.com/" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CustomProviderResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-cp-%d" + location = "%s" +} +resource "azurerm_custom_provider" "test" { + name = "accTEst_saa%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + resource_type { + name = "dEf1" + endpoint = "https://testendpoint.com/" + } + + action { + name = "dEf2" + endpoint = "https://example.com/" + } + + validation { + specification = "https://raw.githubusercontent.com/Azure/azure-custom-providers/master/CustomRPWithSwagger/Artifacts/Swagger/pingaction.json" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CustomProviderResource) action(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-cp-%d" + location = "%s" +} +resource "azurerm_custom_provider" "test" { + name = "accTEst_saa%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + action { + name = "dEf1" + endpoint = "https://testendpoint.com/" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r CustomProviderResource) actionUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-cp-%d" + location = "%s" +} +resource "azurerm_custom_provider" "test" { + name = "accTEst_saa%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + action { + name = "dEf2" + endpoint = "https://example.com/" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/customproviders/parse/custom_provider.go b/azurerm/internal/services/customproviders/parse/custom_provider.go deleted file mode 100644 index 6cef8b90c2fc..000000000000 --- a/azurerm/internal/services/customproviders/parse/custom_provider.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type CustomProviderId struct { - ResourceGroup string - Name string -} - -func CustomProviderID(input string) (*CustomProviderId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Custom Resource Provider ID %q: %+v", input, err) - } - - service := CustomProviderId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("resourceproviders"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/customproviders/parse/custom_provider_test.go b/azurerm/internal/services/customproviders/parse/custom_provider_test.go deleted file mode 100644 index edaae769afb3..000000000000 --- a/azurerm/internal/services/customproviders/parse/custom_provider_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestCustomResourceProviderId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *CustomProviderId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Search Services Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/", - Expected: nil, - }, - { - Name: "Search Service ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/Provider1", - Expected: &CustomProviderId{ - Name: "Provider1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/ResourceProviders/Service1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := CustomProviderID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/customproviders/parse/resource_provider.go b/azurerm/internal/services/customproviders/parse/resource_provider.go new file mode 100644 index 000000000000..d25280891b85 --- /dev/null +++ b/azurerm/internal/services/customproviders/parse/resource_provider.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ResourceProviderId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewResourceProviderID(subscriptionId, resourceGroup, name string) ResourceProviderId { + return ResourceProviderId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ResourceProviderId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Resource Provider", segmentsStr) +} + +func (id ResourceProviderId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.CustomProviders/resourceproviders/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ResourceProviderID parses a ResourceProvider ID into an ResourceProviderId struct +func ResourceProviderID(input string) (*ResourceProviderId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ResourceProviderId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("resourceproviders"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/customproviders/parse/resource_provider_test.go b/azurerm/internal/services/customproviders/parse/resource_provider_test.go new file mode 100644 index 000000000000..7edd439838a8 --- /dev/null +++ b/azurerm/internal/services/customproviders/parse/resource_provider_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ResourceProviderId{} + +func TestResourceProviderIDFormatter(t *testing.T) { + actual := NewResourceProviderID("12345678-1234-9876-4563-123456789012", "resGroup1", "provider1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/provider1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestResourceProviderID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ResourceProviderId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/provider1", + Expected: &ResourceProviderId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "provider1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CUSTOMPROVIDERS/RESOURCEPROVIDERS/PROVIDER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ResourceProviderID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/customproviders/registration.go b/azurerm/internal/services/customproviders/registration.go index 7bc5eb72f8a4..353f582ed4d2 100644 --- a/azurerm/internal/services/customproviders/registration.go +++ b/azurerm/internal/services/customproviders/registration.go @@ -26,6 +26,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_custom_provider": resourceArmCustomProvider(), + "azurerm_custom_provider": resourceCustomProvider(), } } diff --git a/azurerm/internal/services/customproviders/resourceids.go b/azurerm/internal/services/customproviders/resourceids.go new file mode 100644 index 000000000000..360eb690ac59 --- /dev/null +++ b/azurerm/internal/services/customproviders/resourceids.go @@ -0,0 +1,3 @@ +package customproviders + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ResourceProvider -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/provider1 diff --git a/azurerm/internal/services/customproviders/tests/custom_provider_resource_test.go b/azurerm/internal/services/customproviders/tests/custom_provider_resource_test.go deleted file mode 100644 index 5e3699df5f91..000000000000 --- a/azurerm/internal/services/customproviders/tests/custom_provider_resource_test.go +++ /dev/null @@ -1,252 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/customproviders/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMCustomProvider_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_custom_provider", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCustomProviderDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCustomProvider_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCustomProvider_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_custom_provider", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCustomProviderDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCustomProvider_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCustomProvider_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCustomProvider_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMCustomProvider_action(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_custom_provider", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMCustomProviderDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMCustomProvider_action(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCustomProvider_actionUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMCustomProvider_action(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMCustomProviderExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMCustomProviderExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).CustomProviders.CustomProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.CustomProviderID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on CustomProviderClient: %+v", err) - } - - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Custom Provider %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMCustomProviderDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).CustomProviders.CustomProviderClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_custom_provider" { - continue - } - - id, err := parse.CustomProviderID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Custom Provider still exists: %q", id.Name) - } - } - - return nil -} - -func testAccAzureRMCustomProvider_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-cp-%d" - location = "%s" -} -resource "azurerm_custom_provider" "test" { - name = "accTEst_saa%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - resource_type { - name = "dEf1" - endpoint = "https://testendpoint.com/" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCustomProvider_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-cp-%d" - location = "%s" -} -resource "azurerm_custom_provider" "test" { - name = "accTEst_saa%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - resource_type { - name = "dEf1" - endpoint = "https://testendpoint.com/" - } - - action { - name = "dEf2" - endpoint = "https://example.com/" - } - - validation { - specification = "https://raw.githubusercontent.com/Azure/azure-custom-providers/master/CustomRPWithSwagger/Artifacts/Swagger/pingaction.json" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCustomProvider_action(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-cp-%d" - location = "%s" -} -resource "azurerm_custom_provider" "test" { - name = "accTEst_saa%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - action { - name = "dEf1" - endpoint = "https://testendpoint.com/" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMCustomProvider_actionUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-cp-%d" - location = "%s" -} -resource "azurerm_custom_provider" "test" { - name = "accTEst_saa%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - action { - name = "dEf2" - endpoint = "https://example.com/" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/customproviders/validate/custom_provider.go b/azurerm/internal/services/customproviders/validate/custom_provider_name.go similarity index 100% rename from azurerm/internal/services/customproviders/validate/custom_provider.go rename to azurerm/internal/services/customproviders/validate/custom_provider_name.go diff --git a/azurerm/internal/services/customproviders/validate/resource_provider_id.go b/azurerm/internal/services/customproviders/validate/resource_provider_id.go new file mode 100644 index 000000000000..5cbcb46e2713 --- /dev/null +++ b/azurerm/internal/services/customproviders/validate/resource_provider_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/customproviders/parse" +) + +func ResourceProviderID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ResourceProviderID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/customproviders/validate/resource_provider_id_test.go b/azurerm/internal/services/customproviders/validate/resource_provider_id_test.go new file mode 100644 index 000000000000..aa874d18081e --- /dev/null +++ b/azurerm/internal/services/customproviders/validate/resource_provider_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestResourceProviderID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.CustomProviders/resourceproviders/provider1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CUSTOMPROVIDERS/RESOURCEPROVIDERS/PROVIDER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ResourceProviderID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/databasemigration/database_migration_project_data_source.go b/azurerm/internal/services/databasemigration/database_migration_project_data_source.go index ad7aed0b9253..efa16b7abe4c 100644 --- a/azurerm/internal/services/databasemigration/database_migration_project_data_source.go +++ b/azurerm/internal/services/databasemigration/database_migration_project_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmDatabaseMigrationProject() *schema.Resource { +func dataSourceDatabaseMigrationProject() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmDatabaseMigrationProjectRead, + Read: dataSourceDatabaseMigrationProjectRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -52,7 +52,7 @@ func dataSourceArmDatabaseMigrationProject() *schema.Resource { } } -func dataSourceArmDatabaseMigrationProjectRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceDatabaseMigrationProjectRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ProjectsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/databasemigration/database_migration_project_data_source_test.go b/azurerm/internal/services/databasemigration/database_migration_project_data_source_test.go new file mode 100644 index 000000000000..cb2d8f1c1aae --- /dev/null +++ b/azurerm/internal/services/databasemigration/database_migration_project_data_source_test.go @@ -0,0 +1,40 @@ +package databasemigration_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DatabaseMigrationProjectDataSource struct { +} + +func TestAccDatabaseMigrationProjectDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") + r := DatabaseMigrationProjectDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("source_platform").HasValue("SQL"), + check.That(data.ResourceName).Key("target_platform").HasValue("SQLDB"), + ), + }, + }) +} + +func (DatabaseMigrationProjectDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_database_migration_project" "test" { + name = azurerm_database_migration_project.test.name + service_name = azurerm_database_migration_project.test.service_name + resource_group_name = azurerm_database_migration_project.test.resource_group_name +} +`, DatabaseMigrationProjectResource{}.basic(data)) +} diff --git a/azurerm/internal/services/databasemigration/database_migration_project_resource.go b/azurerm/internal/services/databasemigration/database_migration_project_resource.go index 4fc8020b5874..7db03fa14093 100644 --- a/azurerm/internal/services/databasemigration/database_migration_project_resource.go +++ b/azurerm/internal/services/databasemigration/database_migration_project_resource.go @@ -18,15 +18,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDatabaseMigrationProject() *schema.Resource { +func resourceDatabaseMigrationProject() *schema.Resource { return &schema.Resource{ - Create: resourceArmDatabaseMigrationProjectCreateUpdate, - Read: resourceArmDatabaseMigrationProjectRead, - Update: resourceArmDatabaseMigrationProjectCreateUpdate, - Delete: resourceArmDatabaseMigrationProjectDelete, + Create: resourceDatabaseMigrationProjectCreateUpdate, + Read: resourceDatabaseMigrationProjectRead, + Update: resourceDatabaseMigrationProjectCreateUpdate, + Delete: resourceDatabaseMigrationProjectDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DatabaseMigrationProjectID(id) + _, err := parse.ProjectID(id) return err }), @@ -81,7 +81,7 @@ func resourceArmDatabaseMigrationProject() *schema.Resource { } } -func resourceArmDatabaseMigrationProjectCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationProjectCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ProjectsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -129,31 +129,31 @@ func resourceArmDatabaseMigrationProjectCreateUpdate(d *schema.ResourceData, met } d.SetId(*resp.ID) - return resourceArmDatabaseMigrationProjectRead(d, meta) + return resourceDatabaseMigrationProjectRead(d, meta) } -func resourceArmDatabaseMigrationProjectRead(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationProjectRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ProjectsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabaseMigrationProjectID(d.Id()) + id, err := parse.ProjectID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Service, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Database Migration Project %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("Error reading Database Migration Project (Project Name %q / Service Name %q / Group Name %q): %+v", id.Name, id.Service, id.ResourceGroup, err) + return fmt.Errorf("Error reading Database Migration Project (Project Name %q / Service Name %q / Group Name %q): %+v", id.Name, id.ServiceName, id.ResourceGroup, err) } d.Set("name", resp.Name) - d.Set("service_name", id.Service) + d.Set("service_name", id.ServiceName) d.Set("resource_group_name", id.ResourceGroup) location := "" @@ -170,19 +170,19 @@ func resourceArmDatabaseMigrationProjectRead(d *schema.ResourceData, meta interf return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDatabaseMigrationProjectDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationProjectDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ProjectsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabaseMigrationProjectID(d.Id()) + id, err := parse.ProjectID(d.Id()) if err != nil { return err } deleteRunningTasks := false - if _, err := client.Delete(ctx, id.ResourceGroup, id.Service, id.Name, &deleteRunningTasks); err != nil { - return fmt.Errorf("Error deleting Database Migration Project (Project Name %q / Service Name %q / Group Name %q): %+v", id.Name, id.Service, id.ResourceGroup, err) + if _, err := client.Delete(ctx, id.ResourceGroup, id.ServiceName, id.Name, &deleteRunningTasks); err != nil { + return fmt.Errorf("Error deleting Database Migration Project (Project Name %q / Service Name %q / Group Name %q): %+v", id.Name, id.ServiceName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/databasemigration/database_migration_project_resource_test.go b/azurerm/internal/services/databasemigration/database_migration_project_resource_test.go new file mode 100644 index 000000000000..66e3e7bcb8f4 --- /dev/null +++ b/azurerm/internal/services/databasemigration/database_migration_project_resource_test.go @@ -0,0 +1,159 @@ +package databasemigration_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databasemigration/parse" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DatabaseMigrationProjectResource struct { +} + +func TestAccDatabaseMigrationProject_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") + r := DatabaseMigrationProjectResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("source_platform").HasValue("SQL"), + check.That(data.ResourceName).Key("target_platform").HasValue("SQLDB"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDatabaseMigrationProject_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") + r := DatabaseMigrationProjectResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("source_platform").HasValue("SQL"), + check.That(data.ResourceName).Key("target_platform").HasValue("SQLDB"), + check.That(data.ResourceName).Key("tags.name").HasValue("Test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDatabaseMigrationProject_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") + r := DatabaseMigrationProjectResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDatabaseMigrationProject_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") + r := DatabaseMigrationProjectResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.name").HasValue("Test"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t DatabaseMigrationProjectResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ProjectID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DatabaseMigration.ProjectsClient.Get(ctx, id.ResourceGroup, id.ServiceName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Database Migration Project (Project Name %q / Service Name %q / Group Name %q) does not exist", id.Name, id.ServiceName, id.ResourceGroup) + } + + return utils.Bool(resp.ProjectProperties != nil), nil +} + +func (DatabaseMigrationProjectResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_database_migration_project" "test" { + name = "acctestDbmsProject-%d" + service_name = azurerm_database_migration_service.test.name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + source_platform = "SQL" + target_platform = "SQLDB" +} +`, DatabaseMigrationServiceResource{}.basic(data), data.RandomInteger) +} + +func (DatabaseMigrationProjectResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_database_migration_project" "test" { + name = "acctestDbmsProject-%d" + service_name = azurerm_database_migration_service.test.name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + source_platform = "SQL" + target_platform = "SQLDB" + tags = { + name = "Test" + } +} +`, DatabaseMigrationServiceResource{}.basic(data), data.RandomInteger) +} + +func (DatabaseMigrationProjectResource) requiresImport(data acceptance.TestData) string { + template := DatabaseMigrationProjectResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_database_migration_project" "import" { + name = azurerm_database_migration_project.test.name + service_name = azurerm_database_migration_project.test.service_name + resource_group_name = azurerm_database_migration_project.test.resource_group_name + location = azurerm_database_migration_project.test.location + source_platform = azurerm_database_migration_project.test.source_platform + target_platform = azurerm_database_migration_project.test.target_platform +} +`, template) +} diff --git a/azurerm/internal/services/databasemigration/database_migration_service_data_source.go b/azurerm/internal/services/databasemigration/database_migration_service_data_source.go index 0ea63e668a5b..78bdcbde59a2 100644 --- a/azurerm/internal/services/databasemigration/database_migration_service_data_source.go +++ b/azurerm/internal/services/databasemigration/database_migration_service_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmDatabaseMigrationService() *schema.Resource { +func dataSourceDatabaseMigrationService() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmDatabaseMigrationServiceRead, + Read: dataSourceDatabaseMigrationServiceRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -47,7 +47,7 @@ func dataSourceArmDatabaseMigrationService() *schema.Resource { } } -func dataSourceArmDatabaseMigrationServiceRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceDatabaseMigrationServiceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ServicesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/databasemigration/database_migration_service_data_source_test.go b/azurerm/internal/services/databasemigration/database_migration_service_data_source_test.go new file mode 100644 index 000000000000..d611c88d5694 --- /dev/null +++ b/azurerm/internal/services/databasemigration/database_migration_service_data_source_test.go @@ -0,0 +1,39 @@ +package databasemigration_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DatabaseMigrationServiceDataSource struct { +} + +func TestAccDatabaseMigrationServiceDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_database_migration_service", "test") + r := DatabaseMigrationServiceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("Standard_1vCores"), + ), + }, + }) +} + +func (DatabaseMigrationServiceDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_database_migration_service" "test" { + resource_group_name = azurerm_database_migration_service.test.resource_group_name + name = azurerm_database_migration_service.test.name +} +`, DatabaseMigrationServiceResource{}.basic(data)) +} diff --git a/azurerm/internal/services/databasemigration/database_migration_service_resource.go b/azurerm/internal/services/databasemigration/database_migration_service_resource.go index 87bac87c0f79..5f074821572b 100644 --- a/azurerm/internal/services/databasemigration/database_migration_service_resource.go +++ b/azurerm/internal/services/databasemigration/database_migration_service_resource.go @@ -20,15 +20,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDatabaseMigrationService() *schema.Resource { +func resourceDatabaseMigrationService() *schema.Resource { return &schema.Resource{ - Create: resourceArmDatabaseMigrationServiceCreate, - Read: resourceArmDatabaseMigrationServiceRead, - Update: resourceArmDatabaseMigrationServiceUpdate, - Delete: resourceArmDatabaseMigrationServiceDelete, + Create: resourceDatabaseMigrationServiceCreate, + Read: resourceDatabaseMigrationServiceRead, + Update: resourceDatabaseMigrationServiceUpdate, + Delete: resourceDatabaseMigrationServiceDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DatabaseMigrationServiceID(id) + _, err := parse.ServiceID(id) return err }), @@ -77,7 +77,7 @@ func resourceArmDatabaseMigrationService() *schema.Resource { } } -func resourceArmDatabaseMigrationServiceCreate(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationServiceCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ServicesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -132,15 +132,15 @@ func resourceArmDatabaseMigrationServiceCreate(d *schema.ResourceData, meta inte } d.SetId(*resp.ID) - return resourceArmDatabaseMigrationServiceRead(d, meta) + return resourceDatabaseMigrationServiceRead(d, meta) } -func resourceArmDatabaseMigrationServiceRead(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationServiceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ServicesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabaseMigrationServiceID(d.Id()) + id, err := parse.ServiceID(d.Id()) if err != nil { return err } @@ -175,12 +175,12 @@ func resourceArmDatabaseMigrationServiceRead(d *schema.ResourceData, meta interf return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDatabaseMigrationServiceUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationServiceUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ServicesClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabaseMigrationServiceID(d.Id()) + id, err := parse.ServiceID(d.Id()) if err != nil { return err } @@ -197,15 +197,15 @@ func resourceArmDatabaseMigrationServiceUpdate(d *schema.ResourceData, meta inte return fmt.Errorf("Error waiting for update of Database Migration Service (Service Name %q / Group Name %q): %+v", id.Name, id.ResourceGroup, err) } - return resourceArmDatabaseMigrationServiceRead(d, meta) + return resourceDatabaseMigrationServiceRead(d, meta) } -func resourceArmDatabaseMigrationServiceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDatabaseMigrationServiceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DatabaseMigration.ServicesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabaseMigrationServiceID(d.Id()) + id, err := parse.ServiceID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/databasemigration/database_migration_service_resource_test.go b/azurerm/internal/services/databasemigration/database_migration_service_resource_test.go new file mode 100644 index 000000000000..f1f695faf09e --- /dev/null +++ b/azurerm/internal/services/databasemigration/database_migration_service_resource_test.go @@ -0,0 +1,188 @@ +package databasemigration_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databasemigration/parse" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DatabaseMigrationServiceResource struct { +} + +func TestAccDatabaseMigrationService_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") + r := DatabaseMigrationServiceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subnet_id").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("Standard_1vCores"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDatabaseMigrationService_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") + r := DatabaseMigrationServiceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subnet_id").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("Standard_1vCores"), + check.That(data.ResourceName).Key("tags.name").HasValue("test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDatabaseMigrationService_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") + r := DatabaseMigrationServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDatabaseMigrationService_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") + r := DatabaseMigrationServiceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.name").HasValue("test"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t DatabaseMigrationServiceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServiceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DatabaseMigration.ServicesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Database Migration Service (Service Name %q / Group Name %q) does not exist", id.Name, id.ResourceGroup) + } + + return utils.Bool(resp.ServiceProperties != nil), nil +} + +func (DatabaseMigrationServiceResource) base(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-dbms-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestVnet-dbms-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestSubnet-dbms-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DatabaseMigrationServiceResource) basic(data acceptance.TestData) string { + template := DatabaseMigrationServiceResource{}.base(data) + + return fmt.Sprintf(` +%s + +resource "azurerm_database_migration_service" "test" { + name = "acctestDbms-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + subnet_id = azurerm_subnet.test.id + sku_name = "Standard_1vCores" +} +`, template, data.RandomInteger) +} + +func (DatabaseMigrationServiceResource) complete(data acceptance.TestData) string { + template := DatabaseMigrationServiceResource{}.base(data) + + return fmt.Sprintf(` +%s + +resource "azurerm_database_migration_service" "test" { + name = "acctestDbms-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + subnet_id = azurerm_subnet.test.id + sku_name = "Standard_1vCores" + tags = { + name = "test" + } +} +`, template, data.RandomInteger) +} + +func (DatabaseMigrationServiceResource) requiresImport(data acceptance.TestData) string { + template := DatabaseMigrationServiceResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_database_migration_service" "import" { + name = azurerm_database_migration_service.test.name + location = azurerm_database_migration_service.test.location + resource_group_name = azurerm_database_migration_service.test.resource_group_name + subnet_id = azurerm_database_migration_service.test.subnet_id + sku_name = azurerm_database_migration_service.test.sku_name +} +`, template) +} diff --git a/azurerm/internal/services/databasemigration/parse/datamigration_project.go b/azurerm/internal/services/databasemigration/parse/datamigration_project.go deleted file mode 100644 index d047ab962f44..000000000000 --- a/azurerm/internal/services/databasemigration/parse/datamigration_project.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DatabaseMigrationProjectId struct { - ResourceGroup string - Service string - Name string -} - -func DatabaseMigrationProjectID(input string) (*DatabaseMigrationProjectId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Database Migration Project ID %q: %+v", input, err) - } - - project := DatabaseMigrationProjectId{ - ResourceGroup: id.ResourceGroup, - } - - if project.Service, err = id.PopSegment("services"); err != nil { - return nil, err - } - - if project.Name, err = id.PopSegment("projects"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &project, nil -} diff --git a/azurerm/internal/services/databasemigration/parse/datamigration_project_test.go b/azurerm/internal/services/databasemigration/parse/datamigration_project_test.go deleted file mode 100644 index 53b0505fd01c..000000000000 --- a/azurerm/internal/services/databasemigration/parse/datamigration_project_test.go +++ /dev/null @@ -1,89 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDatabaseMigrationProjectID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *DatabaseMigrationProjectId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "No Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Error: true, - }, - { - Name: "Missing service name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/services", - Error: true, - }, - { - Name: "No Projects Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/services/service1", - Error: true, - }, - { - Name: "Missing project name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/services/service1/projects", - Error: true, - }, - { - Name: "Service name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Datamigration/services/service1/projects/project1", - Expect: &DatabaseMigrationProjectId{ - ResourceGroup: "resGroup1", - Service: "service1", - Name: "project1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Datamigration/Services/service1/projects/project1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DatabaseMigrationProjectID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Service != v.Expect.Service { - t.Fatalf("Expected %q but got %q for Service", v.Expect.Service, actual.Service) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/databasemigration/parse/datamigration_service.go b/azurerm/internal/services/databasemigration/parse/datamigration_service.go deleted file mode 100644 index afb7961865cd..000000000000 --- a/azurerm/internal/services/databasemigration/parse/datamigration_service.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DatabaseMigrationServiceId struct { - ResourceGroup string - Name string -} - -func DatabaseMigrationServiceID(input string) (*DatabaseMigrationServiceId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Database Migration Service ID %q: %+v", input, err) - } - - server := DatabaseMigrationServiceId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("services"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/databasemigration/parse/datamigration_service_test.go b/azurerm/internal/services/databasemigration/parse/datamigration_service_test.go deleted file mode 100644 index 08b84daea08a..000000000000 --- a/azurerm/internal/services/databasemigration/parse/datamigration_service_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDatabaseMigrationServiceID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *DatabaseMigrationServiceId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "No Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Error: true, - }, - { - Name: "Missing service name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/services", - Error: true, - }, - { - Name: "Service name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Datamigration/services/service1", - Expect: &DatabaseMigrationServiceId{ - ResourceGroup: "resGroup1", - Name: "service1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Datamigration/Services/service1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DatabaseMigrationServiceID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/databasemigration/parse/project.go b/azurerm/internal/services/databasemigration/parse/project.go new file mode 100644 index 000000000000..1ce1b2d23838 --- /dev/null +++ b/azurerm/internal/services/databasemigration/parse/project.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ProjectId struct { + SubscriptionId string + ResourceGroup string + ServiceName string + Name string +} + +func NewProjectID(subscriptionId, resourceGroup, serviceName, name string) ProjectId { + return ProjectId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceName: serviceName, + Name: name, + } +} + +func (id ProjectId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Service Name %q", id.ServiceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Project", segmentsStr) +} + +func (id ProjectId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataMigration/services/%s/projects/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceName, id.Name) +} + +// ProjectID parses a Project ID into an ProjectId struct +func ProjectID(input string) (*ProjectId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ProjectId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceName, err = id.PopSegment("services"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("projects"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/databasemigration/parse/project_test.go b/azurerm/internal/services/databasemigration/parse/project_test.go new file mode 100644 index 000000000000..915b8a22c148 --- /dev/null +++ b/azurerm/internal/services/databasemigration/parse/project_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ProjectId{} + +func TestProjectIDFormatter(t *testing.T) { + actual := NewProjectID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1", "project1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/projects/project1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestProjectID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ProjectId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/", + Error: true, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/projects/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/projects/project1", + Expected: &ProjectId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceName: "service1", + Name: "project1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAMIGRATION/SERVICES/SERVICE1/PROJECTS/PROJECT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ProjectID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceName != v.Expected.ServiceName { + t.Fatalf("Expected %q but got %q for ServiceName", v.Expected.ServiceName, actual.ServiceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/databasemigration/parse/service.go b/azurerm/internal/services/databasemigration/parse/service.go new file mode 100644 index 000000000000..c71abbeb8dbd --- /dev/null +++ b/azurerm/internal/services/databasemigration/parse/service.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServiceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServiceID(subscriptionId, resourceGroup, name string) ServiceId { + return ServiceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServiceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Service", segmentsStr) +} + +func (id ServiceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataMigration/services/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServiceID parses a Service ID into an ServiceId struct +func ServiceID(input string) (*ServiceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServiceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("services"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/databasemigration/parse/service_test.go b/azurerm/internal/services/databasemigration/parse/service_test.go new file mode 100644 index 000000000000..7fd0aea84557 --- /dev/null +++ b/azurerm/internal/services/databasemigration/parse/service_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServiceId{} + +func TestServiceIDFormatter(t *testing.T) { + actual := NewServiceID("12345678-1234-9876-4563-123456789012", "resGroup1", "service1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServiceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServiceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1", + Expected: &ServiceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "service1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAMIGRATION/SERVICES/SERVICE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServiceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/databasemigration/registration.go b/azurerm/internal/services/databasemigration/registration.go index 7a2c32aae2a3..6ef733b5ec82 100644 --- a/azurerm/internal/services/databasemigration/registration.go +++ b/azurerm/internal/services/databasemigration/registration.go @@ -21,16 +21,16 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_database_migration_service": dataSourceArmDatabaseMigrationService(), - "azurerm_database_migration_project": dataSourceArmDatabaseMigrationProject(), + "azurerm_database_migration_service": dataSourceDatabaseMigrationService(), + "azurerm_database_migration_project": dataSourceDatabaseMigrationProject(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { resources := map[string]*schema.Resource{ - "azurerm_database_migration_service": resourceArmDatabaseMigrationService(), - "azurerm_database_migration_project": resourceArmDatabaseMigrationProject(), + "azurerm_database_migration_service": resourceDatabaseMigrationService(), + "azurerm_database_migration_project": resourceDatabaseMigrationProject(), } return resources diff --git a/azurerm/internal/services/databasemigration/resourceids.go b/azurerm/internal/services/databasemigration/resourceids.go new file mode 100644 index 000000000000..0aa7280c161c --- /dev/null +++ b/azurerm/internal/services/databasemigration/resourceids.go @@ -0,0 +1,4 @@ +package databasemigration + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Project -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/projects/project1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Service -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1 diff --git a/azurerm/internal/services/databasemigration/tests/database_migration_project_data_source_test.go b/azurerm/internal/services/databasemigration/tests/database_migration_project_data_source_test.go deleted file mode 100644 index cf3f20fcf62d..000000000000 --- a/azurerm/internal/services/databasemigration/tests/database_migration_project_data_source_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package azurerm - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDatabaseMigrationProject_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDatabaseMigrationProject_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "source_platform", "SQL"), - resource.TestCheckResourceAttr(data.ResourceName, "target_platform", "SQLDB"), - ), - }, - }, - }) -} - -func testAccDataSourceDatabaseMigrationProject_basic(data acceptance.TestData) string { - config := testAccAzureRMDatabaseMigrationProject_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_database_migration_project" "test" { - name = azurerm_database_migration_project.test.name - service_name = azurerm_database_migration_project.test.service_name - resource_group_name = azurerm_database_migration_project.test.resource_group_name -} -`, config) -} diff --git a/azurerm/internal/services/databasemigration/tests/database_migration_project_resource_test.go b/azurerm/internal/services/databasemigration/tests/database_migration_project_resource_test.go deleted file mode 100644 index e921e0f28d02..000000000000 --- a/azurerm/internal/services/databasemigration/tests/database_migration_project_resource_test.go +++ /dev/null @@ -1,210 +0,0 @@ -package azurerm - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databasemigration/parse" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMDatabaseMigrationProject_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationProjectDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationProject_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationProjectExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "source_platform", "SQL"), - resource.TestCheckResourceAttr(data.ResourceName, "target_platform", "SQLDB"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDatabaseMigrationProject_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationProjectDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationProject_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationProjectExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "source_platform", "SQL"), - resource.TestCheckResourceAttr(data.ResourceName, "target_platform", "SQLDB"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.name", "Test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDatabaseMigrationProject_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationProjectDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationProject_basic(data), - }, - data.RequiresImportErrorStep(testAccAzureRMDatabaseMigrationProject_requiresImport), - }, - }) -} - -func TestAccAzureRMDatabaseMigrationProject_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_project", "test") - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationProjectDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationProject_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationProjectExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDatabaseMigrationProject_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationProjectExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.name", "Test"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDatabaseMigrationProject_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationProjectExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDatabaseMigrationProjectExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Database Migration Project not found: %s", resourceName) - } - - id, err := parse.DatabaseMigrationProjectID(rs.Primary.ID) - if err != nil { - return err - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).DatabaseMigration.ProjectsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Service, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Database Migration Project (Project Name %q / Service Name %q / Group Name %q) does not exist", id.Name, id.Service, id.ResourceGroup) - } - return fmt.Errorf("Bad: Get on ProjectsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMDatabaseMigrationProjectDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DatabaseMigration.ProjectsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_database_migration_project" { - continue - } - - id, err := parse.DatabaseMigrationProjectID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Service, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on ProjectsClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMDatabaseMigrationProject_basic(data acceptance.TestData) string { - template := testAccAzureRMDatabaseMigrationService_basic(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_database_migration_project" "test" { - name = "acctestDbmsProject-%d" - service_name = azurerm_database_migration_service.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - source_platform = "SQL" - target_platform = "SQLDB" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDatabaseMigrationProject_complete(data acceptance.TestData) string { - template := testAccAzureRMDatabaseMigrationService_basic(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_database_migration_project" "test" { - name = "acctestDbmsProject-%d" - service_name = azurerm_database_migration_service.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - source_platform = "SQL" - target_platform = "SQLDB" - tags = { - name = "Test" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDatabaseMigrationProject_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDatabaseMigrationProject_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_database_migration_project" "import" { - name = azurerm_database_migration_project.test.name - service_name = azurerm_database_migration_project.test.service_name - resource_group_name = azurerm_database_migration_project.test.resource_group_name - location = azurerm_database_migration_project.test.location - source_platform = azurerm_database_migration_project.test.source_platform - target_platform = azurerm_database_migration_project.test.target_platform -} -`, template) -} diff --git a/azurerm/internal/services/databasemigration/tests/database_migration_service_data_source_test.go b/azurerm/internal/services/databasemigration/tests/database_migration_service_data_source_test.go deleted file mode 100644 index 16f0b9fec590..000000000000 --- a/azurerm/internal/services/databasemigration/tests/database_migration_service_data_source_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package azurerm - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDatabaseMigrationService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_database_migration_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDatabaseMigrationService_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "Standard_1vCores"), - ), - }, - }, - }) -} - -func testAccDataSourceDatabaseMigrationService_basic(data acceptance.TestData) string { - config := testAccAzureRMDatabaseMigrationService_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_database_migration_service" "test" { - resource_group_name = azurerm_database_migration_service.test.resource_group_name - name = azurerm_database_migration_service.test.name -} -`, config) -} diff --git a/azurerm/internal/services/databasemigration/tests/database_migration_service_resource_test.go b/azurerm/internal/services/databasemigration/tests/database_migration_service_resource_test.go deleted file mode 100644 index af446d8e6c00..000000000000 --- a/azurerm/internal/services/databasemigration/tests/database_migration_service_resource_test.go +++ /dev/null @@ -1,237 +0,0 @@ -package azurerm - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databasemigration/parse" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMDatabaseMigrationService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationServiceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "subnet_id"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "Standard_1vCores"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDatabaseMigrationService_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationService_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationServiceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "subnet_id"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "Standard_1vCores"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.name", "test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDatabaseMigrationService_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationServiceExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDatabaseMigrationService_requiresImport), - }, - }) -} - -func TestAccAzureRMDatabaseMigrationService_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_database_migration_service", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabaseMigrationServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabaseMigrationService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDatabaseMigrationService_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationServiceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.name", "test"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDatabaseMigrationService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabaseMigrationServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDatabaseMigrationServiceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Database Migration Service not found: %s", resourceName) - } - - id, err := parse.DatabaseMigrationServiceID(rs.Primary.ID) - if err != nil { - return err - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).DatabaseMigration.ServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Database Migration Service (Service Name %q / Group Name %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("Bad: Get on ServicesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMDatabaseMigrationServiceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DatabaseMigration.ServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_database_migration_service" { - continue - } - - id, err := parse.DatabaseMigrationServiceID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on ServicesClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMDatabaseMigrationService_base(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-dbms-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestVnet-dbms-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestSubnet-dbms-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDatabaseMigrationService_basic(data acceptance.TestData) string { - template := testAccAzureRMDatabaseMigrationService_base(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_database_migration_service" "test" { - name = "acctestDbms-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - subnet_id = azurerm_subnet.test.id - sku_name = "Standard_1vCores" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDatabaseMigrationService_complete(data acceptance.TestData) string { - template := testAccAzureRMDatabaseMigrationService_base(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_database_migration_service" "test" { - name = "acctestDbms-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - subnet_id = azurerm_subnet.test.id - sku_name = "Standard_1vCores" - tags = { - name = "test" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDatabaseMigrationService_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDatabaseMigrationService_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_database_migration_service" "import" { - name = azurerm_database_migration_service.test.name - location = azurerm_database_migration_service.test.location - resource_group_name = azurerm_database_migration_service.test.resource_group_name - subnet_id = azurerm_database_migration_service.test.subnet_id - sku_name = azurerm_database_migration_service.test.sku_name -} -`, template) -} diff --git a/azurerm/internal/services/databasemigration/validate/project_id.go b/azurerm/internal/services/databasemigration/validate/project_id.go new file mode 100644 index 000000000000..f0b47a7396a5 --- /dev/null +++ b/azurerm/internal/services/databasemigration/validate/project_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databasemigration/parse" +) + +func ProjectID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ProjectID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/databasemigration/validate/project_id_test.go b/azurerm/internal/services/databasemigration/validate/project_id_test.go new file mode 100644 index 000000000000..ac706153baec --- /dev/null +++ b/azurerm/internal/services/databasemigration/validate/project_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestProjectID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/", + Valid: false, + }, + + { + // missing value for ServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/projects/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1/projects/project1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAMIGRATION/SERVICES/SERVICE1/PROJECTS/PROJECT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ProjectID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/databasemigration/validate/service_id.go b/azurerm/internal/services/databasemigration/validate/service_id.go new file mode 100644 index 000000000000..c00b892736cc --- /dev/null +++ b/azurerm/internal/services/databasemigration/validate/service_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databasemigration/parse" +) + +func ServiceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServiceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/databasemigration/validate/service_id_test.go b/azurerm/internal/services/databasemigration/validate/service_id_test.go new file mode 100644 index 000000000000..b3a6be9e3a90 --- /dev/null +++ b/azurerm/internal/services/databasemigration/validate/service_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServiceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataMigration/services/service1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAMIGRATION/SERVICES/SERVICE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServiceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/databricks/databricks_workspace_data_source.go b/azurerm/internal/services/databricks/databricks_workspace_data_source.go index 51e78046ab6d..4a1d64f3cf30 100644 --- a/azurerm/internal/services/databricks/databricks_workspace_data_source.go +++ b/azurerm/internal/services/databricks/databricks_workspace_data_source.go @@ -7,11 +7,12 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmDatabricksWorkspace() *schema.Resource { +func dataSourceDatabricksWorkspace() *schema.Resource { return &schema.Resource{ Read: dataSourceDatabricksWorkspaceRead, @@ -41,6 +42,8 @@ func dataSourceArmDatabricksWorkspace() *schema.Resource { Type: schema.TypeString, Computed: true, }, + + "tags": tags.Schema(), }, } } @@ -72,5 +75,5 @@ func dataSourceDatabricksWorkspaceRead(d *schema.ResourceData, meta interface{}) d.Set("workspace_url", props.WorkspaceURL) } - return nil + return tags.FlattenAndSet(d, resp.Tags) } diff --git a/azurerm/internal/services/databricks/databricks_workspace_data_source_test.go b/azurerm/internal/services/databricks/databricks_workspace_data_source_test.go new file mode 100644 index 000000000000..38255b5ad0fb --- /dev/null +++ b/azurerm/internal/services/databricks/databricks_workspace_data_source_test.go @@ -0,0 +1,54 @@ +package databricks_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DatabricksWorkspaceDataSource struct { +} + +func TestAccDatabricksWorkspaceDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_databricks_workspace", "test") + r := DatabricksWorkspaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + resource.TestMatchResourceAttr(data.ResourceName, "workspace_url", regexp.MustCompile("azuredatabricks.net")), + check.That(data.ResourceName).Key("workspace_id").Exists(), + ), + }, + }) +} + +func (DatabricksWorkspaceDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-databricks-%d" + location = "%s" +} + +resource "azurerm_databricks_workspace" "test" { + name = "acctestDBW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "standard" +} + +data "azurerm_databricks_workspace" "test" { + name = azurerm_databricks_workspace.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/databricks/databricks_workspace_resource.go b/azurerm/internal/services/databricks/databricks_workspace_resource.go index 1b626c115780..acae6d1d93df 100644 --- a/azurerm/internal/services/databricks/databricks_workspace_resource.go +++ b/azurerm/internal/services/databricks/databricks_workspace_resource.go @@ -14,18 +14,19 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks/parse" + resourcesParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDatabricksWorkspace() *schema.Resource { +func resourceDatabricksWorkspace() *schema.Resource { return &schema.Resource{ - Create: resourceArmDatabricksWorkspaceCreateUpdate, - Read: resourceArmDatabricksWorkspaceRead, - Update: resourceArmDatabricksWorkspaceCreateUpdate, - Delete: resourceArmDatabricksWorkspaceDelete, + Create: resourceDatabricksWorkspaceCreateUpdate, + Read: resourceDatabricksWorkspaceRead, + Update: resourceDatabricksWorkspaceCreateUpdate, + Delete: resourceDatabricksWorkspaceDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -35,7 +36,7 @@ func resourceArmDatabricksWorkspace() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DatabricksWorkspaceID(id) + _, err := parse.WorkspaceID(id) return err }), @@ -125,7 +126,7 @@ func resourceArmDatabricksWorkspace() *schema.Resource { } } -func resourceArmDatabricksWorkspaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDatabricksWorkspaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataBricks.WorkspacesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -200,21 +201,20 @@ func resourceArmDatabricksWorkspaceCreateUpdate(d *schema.ResourceData, meta int d.SetId(*read.ID) - return resourceArmDatabricksWorkspaceRead(d, meta) + return resourceDatabricksWorkspaceRead(d, meta) } -func resourceArmDatabricksWorkspaceRead(d *schema.ResourceData, meta interface{}) error { +func resourceDatabricksWorkspaceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataBricks.WorkspacesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabricksWorkspaceID(d.Id()) + id, err := parse.WorkspaceID(d.Id()) if err != nil { return err } resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Databricks Workspace %q was not found in Resource Group %q - removing from state", id.Name, id.ResourceGroup) @@ -237,7 +237,7 @@ func resourceArmDatabricksWorkspaceRead(d *schema.ResourceData, meta interface{} } if props := resp.WorkspaceProperties; props != nil { - managedResourceGroupID, err := azure.ParseAzureResourceID(*props.ManagedResourceGroupID) + managedResourceGroupID, err := resourcesParse.ResourceGroupID(*props.ManagedResourceGroupID) if err != nil { return err } @@ -245,7 +245,7 @@ func resourceArmDatabricksWorkspaceRead(d *schema.ResourceData, meta interface{} d.Set("managed_resource_group_name", managedResourceGroupID.ResourceGroup) if err := d.Set("custom_parameters", flattenWorkspaceCustomParameters(props.Parameters)); err != nil { - return fmt.Errorf("Error setting `custom_parameters`: %+v", err) + return fmt.Errorf("setting `custom_parameters`: %+v", err) } d.Set("workspace_url", props.WorkspaceURL) @@ -255,12 +255,12 @@ func resourceArmDatabricksWorkspaceRead(d *schema.ResourceData, meta interface{} return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDatabricksWorkspaceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDatabricksWorkspaceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataBricks.WorkspacesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DatabricksWorkspaceID(d.Id()) + id, err := parse.WorkspaceID(d.Id()) if err != nil { return err } @@ -360,7 +360,7 @@ func ValidateDatabricksWorkspaceName(i interface{}, k string) (warnings []string // 1) Cannot be empty if len(v) == 0 { errors = append(errors, fmt.Errorf("%q cannot be an empty string: %q", k, v)) - // Treating this as a special case and returning early to match Azure Portal behavior. + // Treating this as a special case and returning early to match Azure Portal behaviour. return warnings, errors } diff --git a/azurerm/internal/services/databricks/databricks_workspace_resource_test.go b/azurerm/internal/services/databricks/databricks_workspace_resource_test.go new file mode 100644 index 000000000000..53cfbefb78d1 --- /dev/null +++ b/azurerm/internal/services/databricks/databricks_workspace_resource_test.go @@ -0,0 +1,370 @@ +package databricks_test + +import ( + "context" + "fmt" + "regexp" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DatabricksWorkspaceResource struct { +} + +func TestAzureRMDatabrickWorkspaceName(t *testing.T) { + const errEmpty = "cannot be an empty string" + const errMinLen = "must be at least 3 characters" + const errMaxLen = "must be no more than 30 characters" + const errAllowList = "can contain only alphanumeric characters, underscores, and hyphens" + + cases := []struct { + Name string + Input string + ExpectedErrors []string + }{ + // Happy paths: + { + Name: "Entire character allow-list", + Input: "aZ09_-", + }, + { + Name: "Minimum character length", + Input: "---", + }, + { + Name: "Maximum character length", + Input: "012345678901234567890123456789", // 30 chars + }, + + // Simple negative cases: + { + Name: "Introduce a non-allowed character", + Input: "aZ09_-$", // dollar sign + ExpectedErrors: []string{errAllowList}, + }, + { + Name: "Below minimum character length", + Input: "--", + ExpectedErrors: []string{errMinLen}, + }, + { + Name: "Above maximum character length", + Input: "0123456789012345678901234567890", // 31 chars + ExpectedErrors: []string{errMaxLen}, + }, + { + Name: "Specifically test for emptiness", + Input: "", + ExpectedErrors: []string{errEmpty}, + }, + + // Complex negative cases + { + Name: "Too short and non-allowed char", + Input: "*^", + ExpectedErrors: []string{errMinLen, errAllowList}, + }, + { + Name: "Too long and non-allowed char", + Input: "012345678901234567890123456789ß", + ExpectedErrors: []string{errMaxLen, errAllowList}, + }, + } + + errsContain := func(errors []error, text string) bool { + for _, err := range errors { + if strings.Contains(err.Error(), text) { + return true + } + } + return false + } + + t.Parallel() + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + _, errors := databricks.ValidateDatabricksWorkspaceName(tc.Input, "azurerm_databricks_workspace.test.name") + + if len(errors) != len(tc.ExpectedErrors) { + t.Fatalf("Expected %d errors but got %d for %q: %v", len(tc.ExpectedErrors), len(errors), tc.Input, errors) + } + + for _, expectedError := range tc.ExpectedErrors { + if !errsContain(errors, expectedError) { + t.Fatalf("Errors did not contain expected error: %s", expectedError) + } + } + }) + } +} + +func TestAccDatabricksWorkspace_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") + r := DatabricksWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("managed_resource_group_id").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "workspace_url", regexp.MustCompile("azuredatabricks.net")), + check.That(data.ResourceName).Key("workspace_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDatabricksWorkspace_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") + r := DatabricksWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDatabricksWorkspace_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") + r := DatabricksWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("managed_resource_group_id").Exists(), + check.That(data.ResourceName).Key("managed_resource_group_name").Exists(), + check.That(data.ResourceName).Key("custom_parameters.0.virtual_network_id").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.Environment").HasValue("Production"), + check.That(data.ResourceName).Key("tags.Pricing").HasValue("Standard"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDatabricksWorkspace_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") + r := DatabricksWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("managed_resource_group_id").Exists(), + check.That(data.ResourceName).Key("managed_resource_group_name").Exists(), + check.That(data.ResourceName).Key("custom_parameters.0.virtual_network_id").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.Environment").HasValue("Production"), + check.That(data.ResourceName).Key("tags.Pricing").HasValue("Standard"), + ), + }, + data.ImportStep(), + { + Config: r.completeUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("managed_resource_group_id").Exists(), + check.That(data.ResourceName).Key("managed_resource_group_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Pricing").HasValue("Standard"), + ), + }, + data.ImportStep(), + }) +} + +func (DatabricksWorkspaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.WorkspaceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataBricks.WorkspacesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Analysis Services Server %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.WorkspaceProperties != nil), nil +} + +func (DatabricksWorkspaceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-db-%d" + location = "%s" +} + +resource "azurerm_databricks_workspace" "test" { + name = "acctestDBW-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (DatabricksWorkspaceResource) requiresImport(data acceptance.TestData) string { + template := DatabricksWorkspaceResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_databricks_workspace" "import" { + name = azurerm_databricks_workspace.test.name + resource_group_name = azurerm_databricks_workspace.test.resource_group_name + location = azurerm_databricks_workspace.test.location + sku = azurerm_databricks_workspace.test.sku +} +`, template) +} + +func (DatabricksWorkspaceResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-db-%[1]d" + + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-vnet-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "public" { + name = "acctest-sn-public-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" + + delegation { + name = "acctest" + + service_delegation { + name = "Microsoft.Databricks/workspaces" + + actions = [ + "Microsoft.Network/virtualNetworks/subnets/join/action", + "Microsoft.Network/virtualNetworks/subnets/prepareNetworkPolicies/action", + "Microsoft.Network/virtualNetworks/subnets/unprepareNetworkPolicies/action", + ] + } + } +} + +resource "azurerm_subnet" "private" { + name = "acctest-sn-private-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + + delegation { + name = "acctest" + + service_delegation { + name = "Microsoft.Databricks/workspaces" + + actions = [ + "Microsoft.Network/virtualNetworks/subnets/join/action", + "Microsoft.Network/virtualNetworks/subnets/prepareNetworkPolicies/action", + "Microsoft.Network/virtualNetworks/subnets/unprepareNetworkPolicies/action", + ] + } + } +} + +resource "azurerm_network_security_group" "nsg" { + name = "acctest-nsg-private-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet_network_security_group_association" "public" { + subnet_id = azurerm_subnet.public.id + network_security_group_id = azurerm_network_security_group.nsg.id +} + +resource "azurerm_subnet_network_security_group_association" "private" { + subnet_id = azurerm_subnet.private.id + network_security_group_id = azurerm_network_security_group.nsg.id +} + +resource "azurerm_databricks_workspace" "test" { + name = "acctestDBW-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" + managed_resource_group_name = "acctestRG-DBW-%[1]d-managed" + + custom_parameters { + no_public_ip = true + public_subnet_name = azurerm_subnet.public.name + private_subnet_name = azurerm_subnet.private.name + virtual_network_id = azurerm_virtual_network.test.id + } + + tags = { + Environment = "Production" + Pricing = "Standard" + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (DatabricksWorkspaceResource) completeUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-db-%d" + location = "%s" +} + +resource "azurerm_databricks_workspace" "test" { + name = "acctestDBW-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "standard" + managed_resource_group_name = "acctestRG-DBW-%d-managed" + + tags = { + Pricing = "Standard" + } + + custom_parameters { + no_public_ip = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/databricks/parse/databricks_workspace.go b/azurerm/internal/services/databricks/parse/databricks_workspace.go deleted file mode 100644 index 3f947913c500..000000000000 --- a/azurerm/internal/services/databricks/parse/databricks_workspace.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DatabricksWorkspaceId struct { - ResourceGroup string - Name string -} - -func DatabricksWorkspaceID(input string) (*DatabricksWorkspaceId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Databricks Workspace ID %q: %+v", input, err) - } - - workspace := DatabricksWorkspaceId{ - ResourceGroup: id.ResourceGroup, - } - - if workspace.Name, err = id.PopSegment("workspaces"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &workspace, nil -} diff --git a/azurerm/internal/services/databricks/parse/databricks_workspace_test.go b/azurerm/internal/services/databricks/parse/databricks_workspace_test.go deleted file mode 100644 index a5169505db1b..000000000000 --- a/azurerm/internal/services/databricks/parse/databricks_workspace_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDatabricksWorkspaceId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DatabricksWorkspaceId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Workspaces Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/", - Expected: nil, - }, - { - Name: "Databricks Workspace ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/Workspace1", - Expected: &DatabricksWorkspaceId{ - Name: "Workspace1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Databricks/Workspaces/", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DatabricksWorkspaceID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/databricks/parse/workspace.go b/azurerm/internal/services/databricks/parse/workspace.go new file mode 100644 index 000000000000..49171494c3c0 --- /dev/null +++ b/azurerm/internal/services/databricks/parse/workspace.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type WorkspaceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewWorkspaceID(subscriptionId, resourceGroup, name string) WorkspaceId { + return WorkspaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id WorkspaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Workspace", segmentsStr) +} + +func (id WorkspaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Databricks/workspaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// WorkspaceID parses a Workspace ID into an WorkspaceId struct +func WorkspaceID(input string) (*WorkspaceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := WorkspaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("workspaces"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/databricks/parse/workspace_test.go b/azurerm/internal/services/databricks/parse/workspace_test.go new file mode 100644 index 000000000000..213f0c6b081b --- /dev/null +++ b/azurerm/internal/services/databricks/parse/workspace_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = WorkspaceId{} + +func TestWorkspaceIDFormatter(t *testing.T) { + actual := NewWorkspaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/workspace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestWorkspaceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *WorkspaceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/workspace1", + Expected: &WorkspaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "workspace1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATABRICKS/WORKSPACES/WORKSPACE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := WorkspaceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/databricks/registration.go b/azurerm/internal/services/databricks/registration.go index e7ecf6aac761..0e0f5fde0858 100644 --- a/azurerm/internal/services/databricks/registration.go +++ b/azurerm/internal/services/databricks/registration.go @@ -21,13 +21,13 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_databricks_workspace": dataSourceArmDatabricksWorkspace(), + "azurerm_databricks_workspace": dataSourceDatabricksWorkspace(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_databricks_workspace": resourceArmDatabricksWorkspace(), + "azurerm_databricks_workspace": resourceDatabricksWorkspace(), } } diff --git a/azurerm/internal/services/databricks/resourceids.go b/azurerm/internal/services/databricks/resourceids.go new file mode 100644 index 000000000000..86f381b624ce --- /dev/null +++ b/azurerm/internal/services/databricks/resourceids.go @@ -0,0 +1,3 @@ +package databricks + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Workspace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/workspace1 diff --git a/azurerm/internal/services/databricks/tests/databricks_workspace_data_source_test.go b/azurerm/internal/services/databricks/tests/databricks_workspace_data_source_test.go deleted file mode 100644 index 58b34542a3e2..000000000000 --- a/azurerm/internal/services/databricks/tests/databricks_workspace_data_source_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDatabricksWorkspace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_databricks_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDatabricksWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestMatchResourceAttr(data.ResourceName, "workspace_url", regexp.MustCompile("azuredatabricks.net")), - resource.TestCheckResourceAttrSet(data.ResourceName, "workspace_id"), - ), - }, - }, - }) -} - -func testAccDataSourceDatabricksWorkspace_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-databricks-%d" - location = "%s" -} - -resource "azurerm_databricks_workspace" "test" { - name = "acctestDBW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "standard" -} - -data "azurerm_databricks_workspace" "test" { - name = azurerm_databricks_workspace.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/databricks/tests/databricks_workspace_resource_test.go b/azurerm/internal/services/databricks/tests/databricks_workspace_resource_test.go deleted file mode 100644 index 3e39a1ded1bb..000000000000 --- a/azurerm/internal/services/databricks/tests/databricks_workspace_resource_test.go +++ /dev/null @@ -1,423 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "regexp" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks/parse" -) - -func TestAzureRMDatabrickWorkspaceName(t *testing.T) { - const errEmpty = "cannot be an empty string" - const errMinLen = "must be at least 3 characters" - const errMaxLen = "must be no more than 30 characters" - const errAllowList = "can contain only alphanumeric characters, underscores, and hyphens" - - cases := []struct { - Name string - Input string - ExpectedErrors []string - }{ - // Happy paths: - { - Name: "Entire character allow-list", - Input: "aZ09_-", - }, - { - Name: "Minimum character length", - Input: "---", - }, - { - Name: "Maximum character length", - Input: "012345678901234567890123456789", // 30 chars - }, - - // Simple negative cases: - { - Name: "Introduce a non-allowed character", - Input: "aZ09_-$", // dollar sign - ExpectedErrors: []string{errAllowList}, - }, - { - Name: "Below minimum character length", - Input: "--", - ExpectedErrors: []string{errMinLen}, - }, - { - Name: "Above maximum character length", - Input: "0123456789012345678901234567890", // 31 chars - ExpectedErrors: []string{errMaxLen}, - }, - { - Name: "Specifically test for emptiness", - Input: "", - ExpectedErrors: []string{errEmpty}, - }, - - // Complex negative cases - { - Name: "Too short and non-allowed char", - Input: "*^", - ExpectedErrors: []string{errMinLen, errAllowList}, - }, - { - Name: "Too long and non-allowed char", - Input: "012345678901234567890123456789ß", - ExpectedErrors: []string{errMaxLen, errAllowList}, - }, - } - - errsContain := func(errors []error, text string) bool { - for _, err := range errors { - if strings.Contains(err.Error(), text) { - return true - } - } - return false - } - - t.Parallel() - for _, tc := range cases { - t.Run(tc.Name, func(t *testing.T) { - _, errors := databricks.ValidateDatabricksWorkspaceName(tc.Input, "azurerm_databricks_workspace.test.name") - - if len(errors) != len(tc.ExpectedErrors) { - t.Fatalf("Expected %d errors but got %d for %q: %v", len(tc.ExpectedErrors), len(errors), tc.Input, errors) - } - - for _, expectedError := range tc.ExpectedErrors { - if !errsContain(errors, expectedError) { - t.Fatalf("Errors did not contain expected error: %s", expectedError) - } - } - }) - } -} - -func TestAccAzureRMDatabricksWorkspace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabricksWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabricksWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabricksWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_id"), - resource.TestMatchResourceAttr(data.ResourceName, "workspace_url", regexp.MustCompile("azuredatabricks.net")), - resource.TestCheckResourceAttrSet(data.ResourceName, "workspace_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDatabricksWorkspace_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabricksWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabricksWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabricksWorkspaceExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDatabricksWorkspace_requiresImport), - }, - }) -} - -func TestAccAzureRMDatabricksWorkspace_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabricksWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabricksWorkspace_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabricksWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "custom_parameters.0.virtual_network_id"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Environment", "Production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Pricing", "Standard"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDatabricksWorkspace_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_databricks_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDatabricksWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDatabricksWorkspace_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabricksWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "custom_parameters.0.virtual_network_id"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Environment", "Production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Pricing", "Standard"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDatabricksWorkspace_completeUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDatabricksWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "managed_resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Pricing", "Standard"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDatabricksWorkspaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DataBricks.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Bad: Not found: %s", resourceName) - } - - id, err := parse.DatabricksWorkspaceID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Getting Workspace: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Databricks Workspace %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDatabricksWorkspaceDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DataBricks.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_databricks_workspace" { - continue - } - - id, err := parse.DatabricksWorkspaceID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Bad: Databricks Workspace still exists:\n%#v", resp.ID) - } - } - - return nil -} - -func testAccAzureRMDatabricksWorkspace_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-db-%d" - location = "%s" -} - -resource "azurerm_databricks_workspace" "test" { - name = "acctestDBW-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "standard" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMDatabricksWorkspace_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDatabricksWorkspace_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_databricks_workspace" "import" { - name = azurerm_databricks_workspace.test.name - resource_group_name = azurerm_databricks_workspace.test.resource_group_name - location = azurerm_databricks_workspace.test.location - sku = azurerm_databricks_workspace.test.sku -} -`, template) -} - -func testAccAzureRMDatabricksWorkspace_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-db-%[1]d" - - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-vnet-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] -} - -resource "azurerm_subnet" "public" { - name = "acctest-sn-public-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" - - delegation { - name = "acctest" - - service_delegation { - name = "Microsoft.Databricks/workspaces" - - actions = [ - "Microsoft.Network/virtualNetworks/subnets/join/action", - "Microsoft.Network/virtualNetworks/subnets/prepareNetworkPolicies/action", - "Microsoft.Network/virtualNetworks/subnets/unprepareNetworkPolicies/action", - ] - } - } -} - -resource "azurerm_subnet" "private" { - name = "acctest-sn-private-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" - - delegation { - name = "acctest" - - service_delegation { - name = "Microsoft.Databricks/workspaces" - - actions = [ - "Microsoft.Network/virtualNetworks/subnets/join/action", - "Microsoft.Network/virtualNetworks/subnets/prepareNetworkPolicies/action", - "Microsoft.Network/virtualNetworks/subnets/unprepareNetworkPolicies/action", - ] - } - } -} - -resource "azurerm_network_security_group" "nsg" { - name = "acctest-nsg-private-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet_network_security_group_association" "public" { - subnet_id = azurerm_subnet.public.id - network_security_group_id = azurerm_network_security_group.nsg.id -} - -resource "azurerm_subnet_network_security_group_association" "private" { - subnet_id = azurerm_subnet.private.id - network_security_group_id = azurerm_network_security_group.nsg.id -} - -resource "azurerm_databricks_workspace" "test" { - name = "acctestDBW-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "standard" - managed_resource_group_name = "acctestRG-DBW-%[1]d-managed" - - custom_parameters { - no_public_ip = true - public_subnet_name = azurerm_subnet.public.name - private_subnet_name = azurerm_subnet.private.name - virtual_network_id = azurerm_virtual_network.test.id - } - - tags = { - Environment = "Production" - Pricing = "Standard" - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMDatabricksWorkspace_completeUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-db-%d" - location = "%s" -} - -resource "azurerm_databricks_workspace" "test" { - name = "acctestDBW-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "standard" - managed_resource_group_name = "acctestRG-DBW-%d-managed" - - tags = { - Pricing = "Standard" - } - - custom_parameters { - no_public_ip = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/databricks/validate/workspace_id.go b/azurerm/internal/services/databricks/validate/workspace_id.go new file mode 100644 index 000000000000..597793f7981b --- /dev/null +++ b/azurerm/internal/services/databricks/validate/workspace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks/parse" +) + +func WorkspaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.WorkspaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/databricks/validate/workspace_id_test.go b/azurerm/internal/services/databricks/validate/workspace_id_test.go new file mode 100644 index 000000000000..3df37b3a8f05 --- /dev/null +++ b/azurerm/internal/services/databricks/validate/workspace_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestWorkspaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Databricks/workspaces/workspace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATABRICKS/WORKSPACES/WORKSPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := WorkspaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datafactory/data_factory.go b/azurerm/internal/services/datafactory/data_factory.go index d93c0d08754a..5039a21e4649 100644 --- a/azurerm/internal/services/datafactory/data_factory.go +++ b/azurerm/internal/services/datafactory/data_factory.go @@ -227,3 +227,37 @@ func serializeDataFactoryPipelineActivities(activities *[]datafactory.BasicActiv func suppressJsonOrderingDifference(_, old, new string, _ *schema.ResourceData) bool { return utils.NormalizeJson(old) == utils.NormalizeJson(new) } + +func expandAzureKeyVaultPassword(input []interface{}) *datafactory.AzureKeyVaultSecretReference { + if len(input) == 0 || input[0] == nil { + return nil + } + + config := input[0].(map[string]interface{}) + + return &datafactory.AzureKeyVaultSecretReference{ + SecretName: config["secret_name"].(string), + Store: &datafactory.LinkedServiceReference{ + Type: utils.String("LinkedServiceReference"), + ReferenceName: utils.String(config["linked_service_name"].(string)), + }, + } +} + +func flattenAzureKeyVaultPassword(secretReference *datafactory.AzureKeyVaultSecretReference) []interface{} { + if secretReference == nil { + return nil + } + + parameters := make(map[string]interface{}) + + if store := secretReference.Store; store != nil { + if store.ReferenceName != nil { + parameters["linked_service_name"] = *store.ReferenceName + } + } + + parameters["secret_name"] = secretReference.SecretName + + return []interface{}{parameters} +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go index 14cdef43dbe8..d611bb751712 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go @@ -112,7 +112,6 @@ func testCheckAzureRMDataFactoryDatasetAzureBlobDestroy(s *terraform.State) erro dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go index 2b66e6dbb147..bc69c1008cae 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go @@ -112,7 +112,6 @@ func testCheckAzureRMDataFactoryDatasetCosmosDbSQLAPIDestroy(s *terraform.State) dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go index d90bc1f855c5..12a049dfc288 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go @@ -64,7 +64,7 @@ func resourceArmDataFactoryDatasetDelimitedText() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"azure_blob_storage_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -92,7 +92,7 @@ func resourceArmDataFactoryDatasetDelimitedText() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"http_server_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -519,6 +519,7 @@ func expandDataFactoryDatasetHttpServerLocation(d *schema.ResourceData) datafact } return httpServerLocation } + func expandDataFactoryDatasetAzureBlobStorageLocation(d *schema.ResourceData) datafactory.BasicDatasetLocation { props := d.Get("azure_blob_storage_location").([]interface{})[0].(map[string]interface{}) container := props["container"].(string) diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go index 76d1bc05b94d..92b715f37b1e 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go @@ -57,7 +57,6 @@ func testCheckAzureRMDataFactoryDatasetDelimitedTextDestroy(s *terraform.State) dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_http_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_http_resource_test.go index 81fbbc269ddf..1a9c318e1476 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_http_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_http_resource_test.go @@ -112,7 +112,6 @@ func testCheckAzureRMDataFactoryDatasetHTTPDestroy(s *terraform.State) error { dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go index ce56aa21a922..20a68d1c798d 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go @@ -64,7 +64,7 @@ func resourceArmDataFactoryDatasetJSON() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"azure_blob_storage_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -92,7 +92,7 @@ func resourceArmDataFactoryDatasetJSON() *schema.Resource { Type: schema.TypeList, MaxItems: 1, Optional: true, - //ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, ConflictsWith: []string{"http_server_location"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go index 69c3cd0bc319..4b69d8a65dc9 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource_test.go @@ -112,7 +112,6 @@ func testCheckAzureRMDataFactoryDatasetJSONDestroy(s *terraform.State) error { dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go index d97ffdc5a7e3..23475a3b0da2 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go @@ -111,7 +111,6 @@ func testCheckAzureRMDataFactoryDatasetMySQLDestroy(s *terraform.State) error { dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go index e7062a6856fa..c3d70f807f01 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go @@ -111,7 +111,6 @@ func testCheckAzureRMDataFactoryDatasetPostgreSQLDestroy(s *terraform.State) err dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go index 94bb472e5996..bb5eb9b28eda 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go @@ -111,7 +111,6 @@ func testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy(s *terraform.State) dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_integration_runtime_managed_resource_test.go b/azurerm/internal/services/datafactory/data_factory_integration_runtime_managed_resource_test.go index a7b9b573298d..8b1cb62dd445 100644 --- a/azurerm/internal/services/datafactory/data_factory_integration_runtime_managed_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_integration_runtime_managed_resource_test.go @@ -335,7 +335,6 @@ func testCheckAzureRMDataFactoryIntegrationRuntimeManagedDestroy(s *terraform.St resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, factoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go b/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go index 3459bbe4cb9a..65a483da1b37 100644 --- a/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go @@ -28,7 +28,7 @@ func resourceArmDataFactoryIntegrationRuntimeSelfHosted() *schema.Resource { Delete: resourceArmDataFactoryIntegrationRuntimeSelfHostedDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataFactoryIntegrationRuntimeID(id) + _, err := parse.IntegrationRuntimeID(id) return err }), @@ -156,12 +156,12 @@ func resourceArmDataFactoryIntegrationRuntimeSelfHostedRead(d *schema.ResourceDa ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataFactoryIntegrationRuntimeID(d.Id()) + id, err := parse.IntegrationRuntimeID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - factoryName := id.DataFactory + factoryName := id.FactoryName name := id.Name resp, err := client.Get(ctx, resourceGroup, factoryName, name, "") @@ -222,12 +222,12 @@ func resourceArmDataFactoryIntegrationRuntimeSelfHostedDelete(d *schema.Resource ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataFactoryIntegrationRuntimeID(d.Id()) + id, err := parse.IntegrationRuntimeID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - factoryName := id.DataFactory + factoryName := id.FactoryName name := id.Name response, err := client.Delete(ctx, resourceGroup, factoryName, name) diff --git a/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go b/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go index 02c78426c0ef..eb20d67fbcd8 100644 --- a/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go @@ -229,18 +229,18 @@ func testCheckAzureRMDataFactoryIntegrationRuntimeSelfHostedExists(name string) if !ok { return fmt.Errorf("Not found: %s", name) } - id, err := parse.DataFactoryIntegrationRuntimeID(rs.Primary.ID) + id, err := parse.IntegrationRuntimeID(rs.Primary.ID) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.DataFactory, id.Name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { return fmt.Errorf("Bad: Get on IntegrationRuntimesClient: %+v", err) } if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Data Factory Self-hosted Integration Runtime %q (Resource Group: %q, Data Factory %q) does not exist", id.Name, id.DataFactory, id.ResourceGroup) + return fmt.Errorf("Bad: Data Factory Self-hosted Integration Runtime %q (Resource Group: %q, Data Factory %q) does not exist", id.Name, id.FactoryName, id.ResourceGroup) } return nil @@ -256,12 +256,12 @@ func testCheckAzureRMDataFactoryIntegrationRuntimeSelfHostedDestroy(s *terraform continue } - id, err := parse.DataFactoryIntegrationRuntimeID(rs.Primary.ID) + id, err := parse.IntegrationRuntimeID(rs.Primary.ID) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.DataFactory, id.Name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { return fmt.Errorf("Bad: Get on IntegrationRuntimesClient: %+v", err) } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource_test.go index d89ab5fb43bc..2ded2a686ef1 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource_test.go @@ -110,7 +110,6 @@ func testCheckAzureRMDataFactoryLinkedServiceAzureBlobStorageDestroy(s *terrafor dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource.go index ce15240d9aa3..74a797880bbf 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource.go @@ -59,6 +59,12 @@ func resourceArmDataFactoryLinkedServiceAzureFileStorage() *schema.Resource { ValidateFunc: validation.StringIsNotEmpty, }, + "file_share": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "host": { Type: schema.TypeString, Optional: true, @@ -144,8 +150,9 @@ func resourceArmDataFactoryLinkedServiceAzureFileStorageCreateUpdate(d *schema.R Value: utils.String(d.Get("connection_string").(string)), Type: datafactory.TypeSecureString, }, - Host: d.Get("host").(string), - UserID: d.Get("connection_string").(string), + FileShare: d.Get("file_share").(string), + Host: d.Get("host").(string), + UserID: d.Get("connection_string").(string), } password := d.Get("password").(string) @@ -252,6 +259,12 @@ func resourceArmDataFactoryLinkedServiceAzureFileStorageRead(d *schema.ResourceD } } + if props := fileStorage.AzureFileStorageLinkedServiceTypeProperties; props != nil { + if props.FileShare != nil { + d.Set("file_share", props.FileShare) + } + } + return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource_test.go index 67d700f45606..d08894aaa29e 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_file_storage_resource_test.go @@ -65,6 +65,26 @@ func TestAccAzureRMDataFactoryLinkedServiceAzureFileStorage_update(t *testing.T) }) } +func TestAccAzureRMDataFactoryLinkedServiceAzureFileStorage_file_share_name(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_azure_file_storage", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryLinkedServiceAzureFileStorageDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryLinkedServiceAzureFileStorage_file_share_name(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryLinkedServiceAzureFileStorageExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "file_share", "myshare"), + ), + }, + data.ImportStep("connection_string"), + }, + }) +} + func testCheckAzureRMDataFactoryLinkedServiceAzureFileStorageExists(name string) resource.TestCheckFunc { return func(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.LinkedServiceClient @@ -110,7 +130,6 @@ func testCheckAzureRMDataFactoryLinkedServiceAzureFileStorageDestroy(s *terrafor dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } @@ -225,3 +244,30 @@ resource "azurerm_data_factory_linked_service_azure_file_storage" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } + +func testAccAzureRMDataFactoryLinkedServiceAzureFileStorage_file_share_name(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_azure_file_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = "DefaultEndpointsProtocol=https;AccountName=foo;AccountKey=bar" + file_share = "myshare" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_function_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_function_resource_test.go index b018ff9697f7..eddf943b8d16 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_function_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_function_resource_test.go @@ -110,7 +110,6 @@ func testCheckAzureRMDataFactoryLinkedServiceAzureFunctionDestroy(s *terraform.S dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource.go index 0e756098e025..0bac78effaa1 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource.go @@ -176,28 +176,28 @@ func resourceArmDataFactoryLinkedServiceAzureSQLDatabaseRead(d *schema.ResourceD ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataFactoryLinkedServiceID(d.Id()) + id, err := parse.LinkedServiceID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.DataFactory, id.Name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving Data Factory Linked Service AzureSQLDatabase %q (Data Factory %q / Resource Group %q): %+v", id.Name, id.DataFactory, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Data Factory Linked Service AzureSQLDatabase %q (Data Factory %q / Resource Group %q): %+v", id.Name, id.FactoryName, id.ResourceGroup, err) } d.Set("name", resp.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("data_factory_name", id.DataFactory) + d.Set("data_factory_name", id.FactoryName) sql, ok := resp.Properties.AsAzureSQLDatabaseLinkedService() if !ok { - return fmt.Errorf("Error classifiying Data Factory Linked Service AzureSQLDatabase %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", id.Name, id.DataFactory, id.ResourceGroup, datafactory.TypeAzureSQLDatabase, *resp.Type) + return fmt.Errorf("Error classifiying Data Factory Linked Service AzureSQLDatabase %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", id.Name, id.FactoryName, id.ResourceGroup, datafactory.TypeAzureSQLDatabase, *resp.Type) } d.Set("additional_properties", sql.AdditionalProperties) @@ -227,15 +227,15 @@ func resourceArmDataFactoryLinkedServiceAzureSQLDatabaseDelete(d *schema.Resourc ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataFactoryLinkedServiceID(d.Id()) + id, err := parse.LinkedServiceID(d.Id()) if err != nil { return err } - response, err := client.Delete(ctx, id.ResourceGroup, id.DataFactory, id.Name) + response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Linked Service AzureSQLDatabase %q (Data Factory %q / Resource Group %q): %+v", id.Name, id.DataFactory, id.ResourceGroup, err) + return fmt.Errorf("deleting Data Factory Linked Service AzureSQLDatabase %q (Data Factory %q / Resource Group %q): %+v", id.Name, id.FactoryName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource_test.go index 73e253be2b78..9d39a33ebd26 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_sql_database_resource_test.go @@ -110,7 +110,6 @@ func testCheckAzureRMDataFactoryLinkedServiceAzureSQLDatabaseDestroy(s *terrafor dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_cosmosdb_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_cosmosdb_resource_test.go index 7a7c5789be32..cad3a04b180a 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_cosmosdb_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_cosmosdb_resource_test.go @@ -30,6 +30,7 @@ func TestAccAzureRMDataFactoryLinkedServiceCosmosDb_basic(t *testing.T) { }, }) } + func TestAccAzureRMDataFactoryLinkedServiceCosmosDb_accountkey(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_cosmosdb", "test") @@ -48,6 +49,7 @@ func TestAccAzureRMDataFactoryLinkedServiceCosmosDb_accountkey(t *testing.T) { }, }) } + func TestAccAzureRMDataFactoryLinkedServiceCosmosDb_accountkey_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_cosmosdb", "test") @@ -155,7 +157,6 @@ func testCheckAzureRMDataFactoryLinkedServiceCosmosDbDestroy(s *terraform.State) dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } @@ -193,6 +194,7 @@ resource "azurerm_data_factory_linked_service_cosmosdb" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } + func testAccAzureRMDataFactoryLinkedServiceCosmosDb_accountkey(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { @@ -220,6 +222,7 @@ resource "azurerm_data_factory_linked_service_cosmosdb" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } + func testAccAzureRMDataFactoryLinkedServiceCosmosDb_accountkey_update1(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource.go index 3440523b6c18..26175a709e70 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource.go @@ -58,21 +58,35 @@ func resourceArmDataFactoryLinkedServiceDataLakeStorageGen2() *schema.Resource { ValidateFunc: validation.IsURLWithHTTPS, }, + "use_managed_identity": { + Type: schema.TypeBool, + Optional: true, + Default: false, + ConflictsWith: []string{"service_principal_key", "service_principal_id"}, + AtLeastOneOf: []string{"service_principal_key", "service_principal_id", "use_managed_identity"}, + }, + "service_principal_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.IsUUID, + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.IsUUID, + RequiredWith: []string{"service_principal_key"}, + ConflictsWith: []string{"use_managed_identity"}, + AtLeastOneOf: []string{"service_principal_key", "service_principal_id", "use_managed_identity"}, }, "service_principal_key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + RequiredWith: []string{"service_principal_id"}, + ConflictsWith: []string{"use_managed_identity"}, + AtLeastOneOf: []string{"service_principal_key", "service_principal_id", "use_managed_identity"}, }, "tenant": { Type: schema.TypeString, - Required: true, + Optional: true, ValidateFunc: validation.StringIsNotEmpty, }, @@ -137,16 +151,25 @@ func resourceArmDataFactoryLinkedServiceDataLakeStorageGen2CreateUpdate(d *schem } } - secureString := datafactory.SecureString{ - Value: utils.String(d.Get("service_principal_key").(string)), - Type: datafactory.TypeSecureString, - } + var datalakeStorageGen2Properties *datafactory.AzureBlobFSLinkedServiceTypeProperties - datalakeStorageGen2Properties := &datafactory.AzureBlobFSLinkedServiceTypeProperties{ - URL: utils.String(d.Get("url").(string)), - ServicePrincipalID: utils.String(d.Get("service_principal_id").(string)), - Tenant: utils.String(d.Get("tenant").(string)), - ServicePrincipalKey: &secureString, + if d.Get("use_managed_identity").(bool) { + datalakeStorageGen2Properties = &datafactory.AzureBlobFSLinkedServiceTypeProperties{ + URL: utils.String(d.Get("url").(string)), + Tenant: utils.String(d.Get("tenant").(string)), + } + } else { + secureString := datafactory.SecureString{ + Value: utils.String(d.Get("service_principal_key").(string)), + Type: datafactory.TypeSecureString, + } + + datalakeStorageGen2Properties = &datafactory.AzureBlobFSLinkedServiceTypeProperties{ + URL: utils.String(d.Get("url").(string)), + ServicePrincipalID: utils.String(d.Get("service_principal_id").(string)), + Tenant: utils.String(d.Get("tenant").(string)), + ServicePrincipalKey: &secureString, + } } datalakeStorageGen2LinkedService := &datafactory.AzureBlobFSLinkedService{ @@ -222,6 +245,7 @@ func resourceArmDataFactoryLinkedServiceDataLakeStorageGen2Read(d *schema.Resour d.Set("data_factory_name", dataFactoryName) dataLakeStorageGen2, ok := resp.Properties.AsAzureBlobFSLinkedService() + if !ok { return fmt.Errorf("Error classifiying Data Factory Linked Service Data Lake Storage Gen2 %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeAzureBlobFS, *resp.Type) } @@ -232,6 +256,9 @@ func resourceArmDataFactoryLinkedServiceDataLakeStorageGen2Read(d *schema.Resour if dataLakeStorageGen2.ServicePrincipalID != nil { d.Set("service_principal_id", dataLakeStorageGen2.ServicePrincipalID) + d.Set("use_managed_identity", false) + } else { + d.Set("use_managed_identity", true) } if dataLakeStorageGen2.URL != nil { diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource_test.go index 4df4fcbd4f91..dc21ebef16ad 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_data_lake_storage_gen2_resource_test.go @@ -31,6 +31,25 @@ func TestAccAzureRMDataFactoryLinkedServiceDataLakeStorageGen2_basic(t *testing. }) } +func TestAccAzureRMDataFactoryLinkedServiceDataLakeStorageGen2_managed_id(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_data_lake_storage_gen2", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryLinkedServiceDataLakeStorageGen2Destroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryLinkedServiceDataLakeStorageGen2_managed_id(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryLinkedServiceDataLakeStorageGen2Exists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func TestAccAzureRMDataFactoryLinkedServiceDataLakeStorageGen2_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_data_lake_storage_gen2", "test") @@ -109,7 +128,6 @@ func testCheckAzureRMDataFactoryLinkedServiceDataLakeStorageGen2Destroy(s *terra dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } @@ -154,6 +172,39 @@ resource "azurerm_data_factory_linked_service_data_lake_storage_gen2" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } +func testAccAzureRMDataFactoryLinkedServiceDataLakeStorageGen2_managed_id(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_data_factory_linked_service_data_lake_storage_gen2" "test" { + name = "acctestDataLake%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + use_managed_identity = true + url = "https://test.azure.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + func testAccAzureRMDataFactoryLinkedServiceDataLakeStorageGen2_update1(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource.go index 6df8a4c37d98..54b684645033 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource.go @@ -109,7 +109,7 @@ func resourceArmDataFactoryLinkedServiceKeyVaultCreateUpdate(d *schema.ResourceD dataFactoryName := d.Get("data_factory_name").(string) resourceGroup := d.Get("resource_group_name").(string) keyVaultIdRaw := d.Get("key_vault_id").(string) - _, err := keyVaultParse.KeyVaultID(keyVaultIdRaw) + _, err := keyVaultParse.VaultID(keyVaultIdRaw) if err != nil { return err } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource_test.go index 8bde9c13f619..e5ab7528ba39 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_key_vault_resource_test.go @@ -109,7 +109,6 @@ func testCheckAzureRMDataFactoryLinkedServiceKeyVaultDestroy(s *terraform.State) dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_mysql_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_mysql_resource_test.go index 768a439910a5..751baa5d1c0e 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_mysql_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_mysql_resource_test.go @@ -109,7 +109,6 @@ func testCheckAzureRMDataFactoryLinkedServiceMySQLDestroy(s *terraform.State) er dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_postgresql_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_postgresql_resource_test.go index 8dc95456f7d4..c6f4bede5484 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_postgresql_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_postgresql_resource_test.go @@ -109,7 +109,6 @@ func testCheckAzureRMDataFactoryLinkedServicePostgreSQLDestroy(s *terraform.Stat dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_sftp_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_sftp_resource_test.go index be649a8b3e91..866cd938fbe4 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_sftp_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_sftp_resource_test.go @@ -110,7 +110,6 @@ func testCheckAzureRMDataFactoryLinkedServiceSFTPDestroy(s *terraform.State) err dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_sql_server_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_sql_server_resource_test.go index a52326b68520..01337025ac1d 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_sql_server_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_sql_server_resource_test.go @@ -92,7 +92,6 @@ func testCheckAzureRMDataFactoryLinkedServiceSQLServerDestroy(s *terraform.State dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_synapse_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_synapse_resource.go new file mode 100644 index 000000000000..269ac08c0437 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_synapse_resource.go @@ -0,0 +1,283 @@ +package datafactory + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataFactoryLinkedServiceSynapse() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataFactoryLinkedServiceSynapseCreateUpdate, + Read: resourceArmDataFactoryLinkedServiceSynapseRead, + Update: resourceArmDataFactoryLinkedServiceSynapseCreateUpdate, + Delete: resourceArmDataFactoryLinkedServiceSynapseDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName, + }, + + "data_factory_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryName(), + }, + + // There's a bug in the Azure API where this is returned in lower-case + // BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788 + "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + + "connection_string": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: azureRmDataFactoryLinkedServiceConnectionStringDiff, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "key_vault_password": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "linked_service_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "secret_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "integration_runtime_name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "parameters": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "additional_properties": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + } +} + +func resourceArmDataFactoryLinkedServiceSynapseCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.LinkedServiceClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Data Factory Linked Service Synapse%q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_linked_service_synapse", *existing.ID) + } + } + + password := d.Get("key_vault_password").([]interface{}) + + sqlDWLinkedService := &datafactory.AzureSQLDWLinkedService{ + Description: utils.String(d.Get("description").(string)), + AzureSQLDWLinkedServiceTypeProperties: &datafactory.AzureSQLDWLinkedServiceTypeProperties{ + ConnectionString: d.Get("connection_string").(string), + Password: expandAzureKeyVaultPassword(password), + }, + Type: datafactory.TypeAzureSQLDW, + } + + if v, ok := d.GetOk("parameters"); ok { + sqlDWLinkedService.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) + } + + if v, ok := d.GetOk("integration_runtime_name"); ok { + sqlDWLinkedService.ConnectVia = expandDataFactoryLinkedServiceIntegrationRuntime(v.(string)) + } + + if v, ok := d.GetOk("additional_properties"); ok { + sqlDWLinkedService.AdditionalProperties = v.(map[string]interface{}) + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + sqlDWLinkedService.Annotations = &annotations + } + + linkedService := datafactory.LinkedServiceResource{ + Properties: sqlDWLinkedService, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, linkedService, ""); err != nil { + return fmt.Errorf("Error creating/updating Data Factory Linked Service Synapse %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Error retrieving Data Factory Linked Service Synapse %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + if resp.ID == nil { + return fmt.Errorf("Cannot read Data Factory Linked Service Synapse %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + d.SetId(*resp.ID) + + return resourceArmDataFactoryLinkedServiceSynapseRead(d, meta) +} + +func resourceArmDataFactoryLinkedServiceSynapseRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.LinkedServiceClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LinkedServiceID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Data Factory Linked Service Synapse %q (Data Factory %q / Resource Group %q): %+v", id.Name, id.FactoryName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("data_factory_name", id.FactoryName) + + sqlDW, ok := resp.Properties.AsAzureSQLDWLinkedService() + if !ok { + return fmt.Errorf("Error classifying Data Factory Linked Service Synapse %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", id.Name, id.FactoryName, id.ResourceGroup, datafactory.TypeAzureSQLDW, *resp.Type) + } + + d.Set("additional_properties", sqlDW.AdditionalProperties) + d.Set("description", sqlDW.Description) + + annotations := flattenDataFactoryAnnotations(sqlDW.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations`: %+v", err) + } + + parameters := flattenDataFactoryParameters(sqlDW.Parameters) + if err := d.Set("parameters", parameters); err != nil { + return fmt.Errorf("Error setting `parameters`: %+v", err) + } + + if connectVia := sqlDW.ConnectVia; connectVia != nil { + if connectVia.ReferenceName != nil { + d.Set("integration_runtime_name", connectVia.ReferenceName) + } + } + + if properties := sqlDW.AzureSQLDWLinkedServiceTypeProperties; properties != nil { + if properties.ConnectionString != nil { + if val, ok := properties.ConnectionString.(string); ok { + d.Set("connection_string", val) + } else { + d.Set("connection_string", "") + log.Printf("[DEBUG] Skipping connection string %q since it's not a string", val) + } + } + + if err := d.Set("key_vault_password", flattenAzureKeyVaultPassword(properties.Password)); err != nil { + return fmt.Errorf("setting `key_vault_password`: %+v", err) + } + } + + return nil +} + +func resourceArmDataFactoryLinkedServiceSynapseDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.LinkedServiceClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LinkedServiceID(d.Id()) + if err != nil { + return err + } + + response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("Error deleting Data Factory Linked Service Synapse %q (Data Factory %q / Resource Group %q): %+v", id.Name, id.FactoryName, id.ResourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_synapse_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_synapse_resource_test.go new file mode 100644 index 000000000000..d3c7aad1dab3 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_synapse_resource_test.go @@ -0,0 +1,221 @@ +package datafactory_test + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccDataFactoryLinkedServiceSynapse_ConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_synapse", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckDataFactoryLinkedServiceSynapseDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataFactoryLinkedServiceSynapse_connection_string(data), + Check: resource.ComposeTestCheckFunc( + testCheckDataFactoryLinkedServiceSynapseExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "description", "test description"), + resource.TestCheckResourceAttrSet(data.ResourceName, "connection_string"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccDataFactoryLinkedServiceSynapse_KeyVaultReference(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_synapse", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckDataFactoryLinkedServiceSynapseDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataFactoryLinkedServiceSynapse_key_vault_reference(data), + Check: resource.ComposeTestCheckFunc( + testCheckDataFactoryLinkedServiceSynapseExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "parameters.%", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(data.ResourceName, "additional_properties.%", "2"), + resource.TestCheckResourceAttr(data.ResourceName, "description", "test description"), + resource.TestCheckResourceAttrSet(data.ResourceName, "connection_string"), + resource.TestCheckResourceAttrSet(data.ResourceName, "key_vault_password.0.linked_service_name"), + resource.TestCheckResourceAttr(data.ResourceName, "key_vault_password.0.secret_name", "secret"), + ), + }, + data.ImportStep(), + }, + }) +} + +func testCheckDataFactoryLinkedServiceSynapseExists(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.LinkedServiceClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[name] + if !ok { + return fmt.Errorf("Not found: %s", name) + } + + name := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for Data Factory: %s", name) + } + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Bad: Get on dataFactoryLinkedServiceClient: %+v", err) + } + + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Data Factory Linked Service Synapse %q (data factory name: %q / resource group: %q) does not exist", name, dataFactoryName, resourceGroup) + } + + return nil + } +} + +func testCheckDataFactoryLinkedServiceSynapseDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.LinkedServiceClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_factory_linked_service_synapse" { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Data Factory Linked Service Synapse still exists:\n%#v", resp.Properties) + } + } + + return nil +} + +func testAccDataFactoryLinkedServiceSynapse_connection_string(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_synapse" "test" { + name = "linksynapse" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" + + annotations = ["test1", "test2", "test3"] + description = "test description" + + parameters = { + foo = "test1" + bar = "test2" + } + + additional_properties = { + foo = "test1" + bar = "test2" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccDataFactoryLinkedServiceSynapse_key_vault_reference(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctkv%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_key_vault" "test" { + name = "linkkv" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + key_vault_id = azurerm_key_vault.test.id +} + +resource "azurerm_data_factory_linked_service_synapse" "test" { + name = "linksynapse" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;" + key_vault_password { + linked_service_name = azurerm_data_factory_linked_service_key_vault.test.name + secret_name = "secret" + } + + annotations = ["test1", "test2", "test3"] + description = "test description" + + parameters = { + foo = "test1" + bar = "test2" + } + + additional_properties = { + foo = "test1" + bar = "test2" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource.go index 8b81da348f40..f9b4fbbc296a 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource.go @@ -258,7 +258,7 @@ func resourceArmDataFactoryLinkedServiceWebRead(d *schema.ResourceData, meta int d.Set("authentication_type", basicProps.AuthenticationType) d.Set("url", basicProps.URL) d.Set("username", basicProps.Username) - //d.Set("password", basicProps.Password) + // d.Set("password", basicProps.Password) } } diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource_test.go b/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource_test.go index fff2043b7480..4e3e66ed8f64 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_web_resource_test.go @@ -137,7 +137,6 @@ func testCheckAzureRMDataFactoryLinkedServiceWebDestroy(s *terraform.State) erro dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_pipeline_resource_test.go b/azurerm/internal/services/datafactory/data_factory_pipeline_resource_test.go index d49e76f3bed1..f323fe6a25db 100644 --- a/azurerm/internal/services/datafactory/data_factory_pipeline_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_pipeline_resource_test.go @@ -116,7 +116,6 @@ func testCheckAzureRMDataFactoryPipelineDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { if !utils.ResponseWasNotFound(resp.Response) { return err diff --git a/azurerm/internal/services/datafactory/data_factory_resource_test.go b/azurerm/internal/services/datafactory/data_factory_resource_test.go index 9e17d3174d5b..ba9c03837811 100644 --- a/azurerm/internal/services/datafactory/data_factory_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_resource_test.go @@ -231,7 +231,6 @@ func testCheckAzureRMDataFactoryDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go b/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go index 141d74447dc2..49e851fc3245 100644 --- a/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go @@ -105,7 +105,6 @@ func testCheckAzureRMDataFactoryTriggerScheduleDestroy(s *terraform.State) error dataFactoryName := rs.Primary.Attributes["data_factory_name"] resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/datafactory/parse/data_factory_integration_runtime.go b/azurerm/internal/services/datafactory/parse/data_factory_integration_runtime.go deleted file mode 100644 index 3b6ab1d16e5c..000000000000 --- a/azurerm/internal/services/datafactory/parse/data_factory_integration_runtime.go +++ /dev/null @@ -1,34 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DataFactoryIntegrationRuntimeId struct { - ResourceGroup string - Name string - DataFactory string -} - -func DataFactoryIntegrationRuntimeID(input string) (*DataFactoryIntegrationRuntimeId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Data Factory Integration Runtime ID %q: %+v", input, err) - } - - dataFactoryIntegrationRuntime := DataFactoryIntegrationRuntimeId{ - ResourceGroup: id.ResourceGroup, - } - - if dataFactoryIntegrationRuntime.DataFactory, err = id.PopSegment("factories"); err != nil { - return nil, err - } - - if dataFactoryIntegrationRuntime.Name, err = id.PopSegment("integrationruntimes"); err != nil { - return nil, err - } - - return &dataFactoryIntegrationRuntime, nil -} diff --git a/azurerm/internal/services/datafactory/parse/data_factory_integration_runtime_test.go b/azurerm/internal/services/datafactory/parse/data_factory_integration_runtime_test.go deleted file mode 100644 index 2be0460f7030..000000000000 --- a/azurerm/internal/services/datafactory/parse/data_factory_integration_runtime_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package parse - -import "testing" - -func TestParseDataFactoryIntegrationRuntimeID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DataFactoryIntegrationRuntimeId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Data Factory segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/", - Expected: nil, - }, - { - Name: "No Integration Runtime name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/", - Expected: nil, - }, - { - Name: "Case incorrect in path element", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.dataFactory/factories/factory1/Integrationruntimes/integrationRuntimeName", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/integrationRuntimeName", - Expected: &DataFactoryIntegrationRuntimeId{ - ResourceGroup: "myGroup1", - Name: "integrationRuntimeName", - DataFactory: "factory1", - }, - }, - } - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DataFactoryIntegrationRuntimeID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/datafactory/parse/data_factory_linked_service.go b/azurerm/internal/services/datafactory/parse/data_factory_linked_service.go deleted file mode 100644 index 1ce4caf6f9d4..000000000000 --- a/azurerm/internal/services/datafactory/parse/data_factory_linked_service.go +++ /dev/null @@ -1,34 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DataFactoryLinkedServiceId struct { - ResourceGroup string - Name string - DataFactory string -} - -func DataFactoryLinkedServiceID(input string) (*DataFactoryLinkedServiceId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Data Factory Linked Service ID %q: %+v", input, err) - } - - dataFactoryIntegrationRuntime := DataFactoryLinkedServiceId{ - ResourceGroup: id.ResourceGroup, - } - - if dataFactoryIntegrationRuntime.DataFactory, err = id.PopSegment("factories"); err != nil { - return nil, err - } - - if dataFactoryIntegrationRuntime.Name, err = id.PopSegment("linkedservices"); err != nil { - return nil, err - } - - return &dataFactoryIntegrationRuntime, nil -} diff --git a/azurerm/internal/services/datafactory/parse/data_factory_linked_service_test.go b/azurerm/internal/services/datafactory/parse/data_factory_linked_service_test.go deleted file mode 100644 index 390f3b35d1f6..000000000000 --- a/azurerm/internal/services/datafactory/parse/data_factory_linked_service_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package parse - -import "testing" - -func TestParseDataFactoryLinkedServiceID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DataFactoryLinkedServiceId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Data Factory segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/", - Expected: nil, - }, - { - Name: "No Integration Runtime name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/", - Expected: nil, - }, - { - Name: "Case incorrect in path element", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.dataFactory/factories/factory1/Linkedservices/linkedService1", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/linkedService1", - Expected: &DataFactoryLinkedServiceId{ - ResourceGroup: "myGroup1", - Name: "linkedService1", - DataFactory: "factory1", - }, - }, - } - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DataFactoryLinkedServiceID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/datafactory/parse/integration_runtime.go b/azurerm/internal/services/datafactory/parse/integration_runtime.go new file mode 100644 index 000000000000..049af41b1bca --- /dev/null +++ b/azurerm/internal/services/datafactory/parse/integration_runtime.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type IntegrationRuntimeId struct { + SubscriptionId string + ResourceGroup string + FactoryName string + Name string +} + +func NewIntegrationRuntimeID(subscriptionId, resourceGroup, factoryName, name string) IntegrationRuntimeId { + return IntegrationRuntimeId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FactoryName: factoryName, + Name: name, + } +} + +func (id IntegrationRuntimeId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Factory Name %q", id.FactoryName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Integration Runtime", segmentsStr) +} + +func (id IntegrationRuntimeId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataFactory/factories/%s/integrationruntimes/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FactoryName, id.Name) +} + +// IntegrationRuntimeID parses a IntegrationRuntime ID into an IntegrationRuntimeId struct +func IntegrationRuntimeID(input string) (*IntegrationRuntimeId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := IntegrationRuntimeId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FactoryName, err = id.PopSegment("factories"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("integrationruntimes"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/datafactory/parse/integration_runtime_test.go b/azurerm/internal/services/datafactory/parse/integration_runtime_test.go new file mode 100644 index 000000000000..587d3149a5ab --- /dev/null +++ b/azurerm/internal/services/datafactory/parse/integration_runtime_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = IntegrationRuntimeId{} + +func TestIntegrationRuntimeIDFormatter(t *testing.T) { + actual := NewIntegrationRuntimeID("12345678-1234-9876-4563-123456789012", "resGroup1", "factory1", "runtime1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/runtime1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestIntegrationRuntimeID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *IntegrationRuntimeId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/", + Error: true, + }, + + { + // missing value for FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/runtime1", + Expected: &IntegrationRuntimeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FactoryName: "factory1", + Name: "runtime1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/FACTORY1/INTEGRATIONRUNTIMES/RUNTIME1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := IntegrationRuntimeID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FactoryName != v.Expected.FactoryName { + t.Fatalf("Expected %q but got %q for FactoryName", v.Expected.FactoryName, actual.FactoryName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datafactory/parse/linked_service.go b/azurerm/internal/services/datafactory/parse/linked_service.go new file mode 100644 index 000000000000..ae8022e9542f --- /dev/null +++ b/azurerm/internal/services/datafactory/parse/linked_service.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LinkedServiceId struct { + SubscriptionId string + ResourceGroup string + FactoryName string + Name string +} + +func NewLinkedServiceID(subscriptionId, resourceGroup, factoryName, name string) LinkedServiceId { + return LinkedServiceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FactoryName: factoryName, + Name: name, + } +} + +func (id LinkedServiceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Factory Name %q", id.FactoryName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Linked Service", segmentsStr) +} + +func (id LinkedServiceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataFactory/factories/%s/linkedservices/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FactoryName, id.Name) +} + +// LinkedServiceID parses a LinkedService ID into an LinkedServiceId struct +func LinkedServiceID(input string) (*LinkedServiceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LinkedServiceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FactoryName, err = id.PopSegment("factories"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("linkedservices"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/datafactory/parse/linked_service_test.go b/azurerm/internal/services/datafactory/parse/linked_service_test.go new file mode 100644 index 000000000000..9eb4bc586e2b --- /dev/null +++ b/azurerm/internal/services/datafactory/parse/linked_service_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LinkedServiceId{} + +func TestLinkedServiceIDFormatter(t *testing.T) { + actual := NewLinkedServiceID("12345678-1234-9876-4563-123456789012", "resGroup1", "factory1", "linkedService1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/linkedService1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLinkedServiceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LinkedServiceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/", + Error: true, + }, + + { + // missing value for FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/linkedService1", + Expected: &LinkedServiceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FactoryName: "factory1", + Name: "linkedService1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/FACTORY1/LINKEDSERVICES/LINKEDSERVICE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LinkedServiceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FactoryName != v.Expected.FactoryName { + t.Fatalf("Expected %q but got %q for FactoryName", v.Expected.FactoryName, actual.FactoryName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datafactory/registration.go b/azurerm/internal/services/datafactory/registration.go index 57c431e172a7..f71d9a5a528e 100644 --- a/azurerm/internal/services/datafactory/registration.go +++ b/azurerm/internal/services/datafactory/registration.go @@ -50,6 +50,7 @@ func (r Registration) SupportedResources() map[string]*schema.Resource { "azurerm_data_factory_linked_service_postgresql": resourceArmDataFactoryLinkedServicePostgreSQL(), "azurerm_data_factory_linked_service_sftp": resourceArmDataFactoryLinkedServiceSFTP(), "azurerm_data_factory_linked_service_sql_server": resourceArmDataFactoryLinkedServiceSQLServer(), + "azurerm_data_factory_linked_service_synapse": resourceArmDataFactoryLinkedServiceSynapse(), "azurerm_data_factory_linked_service_web": resourceArmDataFactoryLinkedServiceWeb(), "azurerm_data_factory_pipeline": resourceArmDataFactoryPipeline(), "azurerm_data_factory_trigger_schedule": resourceArmDataFactoryTriggerSchedule(), diff --git a/azurerm/internal/services/datafactory/resourceids.go b/azurerm/internal/services/datafactory/resourceids.go new file mode 100644 index 000000000000..feff550f04d7 --- /dev/null +++ b/azurerm/internal/services/datafactory/resourceids.go @@ -0,0 +1,4 @@ +package datafactory + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=IntegrationRuntime -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/runtime1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LinkedService -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/linkedService1 diff --git a/azurerm/internal/services/datafactory/validate/integration_runtime_id.go b/azurerm/internal/services/datafactory/validate/integration_runtime_id.go new file mode 100644 index 000000000000..9d5b8283a15f --- /dev/null +++ b/azurerm/internal/services/datafactory/validate/integration_runtime_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" +) + +func IntegrationRuntimeID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.IntegrationRuntimeID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/datafactory/validate/integration_runtime_id_test.go b/azurerm/internal/services/datafactory/validate/integration_runtime_id_test.go new file mode 100644 index 000000000000..084407093ce6 --- /dev/null +++ b/azurerm/internal/services/datafactory/validate/integration_runtime_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestIntegrationRuntimeID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/", + Valid: false, + }, + + { + // missing value for FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/runtime1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/FACTORY1/INTEGRATIONRUNTIMES/RUNTIME1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := IntegrationRuntimeID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datafactory/validate/linked_service_id.go b/azurerm/internal/services/datafactory/validate/linked_service_id.go new file mode 100644 index 000000000000..91e45c67e0c5 --- /dev/null +++ b/azurerm/internal/services/datafactory/validate/linked_service_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" +) + +func LinkedServiceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LinkedServiceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/datafactory/validate/linked_service_id_test.go b/azurerm/internal/services/datafactory/validate/linked_service_id_test.go new file mode 100644 index 000000000000..793e173e6064 --- /dev/null +++ b/azurerm/internal/services/datafactory/validate/linked_service_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLinkedServiceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/", + Valid: false, + }, + + { + // missing value for FactoryName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataFactory/factories/factory1/linkedservices/linkedService1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/FACTORY1/LINKEDSERVICES/LINKEDSERVICE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LinkedServiceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datalake/data_lake_analytics_account_resource.go b/azurerm/internal/services/datalake/data_lake_analytics_account_resource.go index 737929a6bfc2..2cb9d45f60de 100644 --- a/azurerm/internal/services/datalake/data_lake_analytics_account_resource.go +++ b/azurerm/internal/services/datalake/data_lake_analytics_account_resource.go @@ -19,7 +19,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDataLakeAnalyticsAccount() *schema.Resource { +func resourceDataLakeAnalyticsAccount() *schema.Resource { return &schema.Resource{ Create: resourceArmDateLakeAnalyticsAccountCreate, Read: resourceArmDateLakeAnalyticsAccountRead, @@ -42,7 +42,7 @@ func resourceArmDataLakeAnalyticsAccount() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeAccountName(), + ValidateFunc: validate.AccountName(), }, "location": azure.SchemaLocation(), @@ -71,7 +71,7 @@ func resourceArmDataLakeAnalyticsAccount() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeAccountName(), + ValidateFunc: validate.AccountName(), }, "tags": tags.Schema(), diff --git a/azurerm/internal/services/datalake/data_lake_analytics_account_resource_test.go b/azurerm/internal/services/datalake/data_lake_analytics_account_resource_test.go new file mode 100644 index 000000000000..de2910a72524 --- /dev/null +++ b/azurerm/internal/services/datalake/data_lake_analytics_account_resource_test.go @@ -0,0 +1,185 @@ +package datalake_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataLakeAnalyticsAccountResource struct { +} + +func TestAccDataLakeAnalyticsAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") + r := DataLakeAnalyticsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tier").HasValue("Consumption"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataLakeAnalyticsAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") + r := DataLakeAnalyticsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDataLakeAnalyticsAccount_tier(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") + r := DataLakeAnalyticsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tier(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tier").HasValue("Commitment_100AUHours"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataLakeAnalyticsAccount_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") + r := DataLakeAnalyticsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t DataLakeAnalyticsAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + name := id.Path["accounts"] + + resp, err := clients.Datalake.AnalyticsAccountsClient.Get(ctx, id.ResourceGroup, name) + if err != nil { + return nil, fmt.Errorf("retrieving Date Lake Analytics Account %q (resource group: %q): %+v", name, id.ResourceGroup, err) + } + + return utils.Bool(resp.DataLakeAnalyticsAccountProperties != nil), nil +} + +func (DataLakeAnalyticsAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + default_store_account_name = azurerm_data_lake_store.test.name +} +`, DataLakeStoreResource{}.basic(data), strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (r DataLakeAnalyticsAccountResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "import" { + name = azurerm_data_lake_analytics_account.test.name + resource_group_name = azurerm_data_lake_analytics_account.test.resource_group_name + location = azurerm_data_lake_analytics_account.test.location + default_store_account_name = azurerm_data_lake_analytics_account.test.default_store_account_name +} +`, DataLakeStoreResource{}.basic(data)) +} + +func (r DataLakeAnalyticsAccountResource) tier(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tier = "Commitment_100AUHours" + + default_store_account_name = azurerm_data_lake_store.test.name +} +`, DataLakeStoreResource{}.basic(data), strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (r DataLakeAnalyticsAccountResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + default_store_account_name = azurerm_data_lake_store.test.name + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, DataLakeStoreResource{}.basic(data), strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (r DataLakeAnalyticsAccountResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + default_store_account_name = azurerm_data_lake_store.test.name + + tags = { + environment = "staging" + } +} +`, DataLakeStoreResource{}.basic(data), strconv.Itoa(data.RandomInteger)[2:17]) +} diff --git a/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource.go b/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource.go index e48612e050ce..85249d2e2eab 100644 --- a/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource.go +++ b/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource.go @@ -17,7 +17,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDataLakeAnalyticsFirewallRule() *schema.Resource { +func resourceDataLakeAnalyticsFirewallRule() *schema.Resource { return &schema.Resource{ Create: resourceArmDateLakeAnalyticsFirewallRuleCreateUpdate, Read: resourceArmDateLakeAnalyticsFirewallRuleRead, @@ -40,14 +40,14 @@ func resourceArmDataLakeAnalyticsFirewallRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeFirewallRuleName(), + ValidateFunc: validate.FirewallRuleName(), }, "account_name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeAccountName(), + ValidateFunc: validate.AccountName(), }, "resource_group_name": azure.SchemaResourceGroupName(), diff --git a/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource_test.go b/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource_test.go new file mode 100644 index 000000000000..f7cf209f1968 --- /dev/null +++ b/azurerm/internal/services/datalake/data_lake_analytics_firewall_rule_resource_test.go @@ -0,0 +1,169 @@ +package datalake_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataLakeAnalyticsFirewallRuleResource struct { +} + +func TestAccDataLakeAnalyticsFirewallRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") + r := DataLakeAnalyticsFirewallRuleResource{} + startIP := "1.1.1.1" + endIP := "2.2.2.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, startIP, endIP), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue(startIP), + check.That(data.ResourceName).Key("end_ip_address").HasValue(endIP), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataLakeAnalyticsFirewallRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") + r := DataLakeAnalyticsFirewallRuleResource{} + startIP := "1.1.1.1" + endIP := "2.2.2.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, startIP, endIP), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue(startIP), + check.That(data.ResourceName).Key("end_ip_address").HasValue(endIP), + ), + }, + { + Config: r.requiresImport(data, startIP, endIP), + ExpectError: acceptance.RequiresImportError("azurerm_data_lake_analytics_firewall_rule"), + }, + }) +} + +func TestAccDataLakeAnalyticsFirewallRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") + r := DataLakeAnalyticsFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "1.1.1.1", "2.2.2.2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue("1.1.1.1"), + check.That(data.ResourceName).Key("end_ip_address").HasValue("2.2.2.2"), + ), + }, + { + Config: r.basic(data, "2.2.2.2", "3.3.3.3"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue("2.2.2.2"), + check.That(data.ResourceName).Key("end_ip_address").HasValue("3.3.3.3"), + ), + }, + }) +} + +func TestAccDataLakeAnalyticsFirewallRule_azureServices(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") + r := DataLakeAnalyticsFirewallRuleResource{} + azureServicesIP := "0.0.0.0" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, azureServicesIP, azureServicesIP), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue(azureServicesIP), + check.That(data.ResourceName).Key("end_ip_address").HasValue(azureServicesIP), + ), + }, + data.ImportStep(), + }) +} + +func (t DataLakeAnalyticsFirewallRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + accountName := id.Path["accounts"] + name := id.Path["firewallRules"] + + resp, err := clients.Datalake.AnalyticsFirewallRulesClient.Get(ctx, id.ResourceGroup, accountName, name) + if err != nil { + return nil, fmt.Errorf("retrieving Date Lake Analytics Firewall Rule %q (Account %q / Resource Group: %q): %v", name, accountName, id.ResourceGroup, err) + } + + return utils.Bool(resp.FirewallRuleProperties != nil), nil +} + +func (DataLakeAnalyticsFirewallRuleResource) basic(data acceptance.TestData, startIP, endIP string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_data_lake_analytics_account" "test" { + name = "acctest%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + default_store_account_name = azurerm_data_lake_store.test.name +} + +resource "azurerm_data_lake_analytics_firewall_rule" "test" { + name = "acctest%[3]s" + account_name = azurerm_data_lake_analytics_account.test.name + resource_group_name = azurerm_resource_group.test.name + start_ip_address = "%[4]s" + end_ip_address = "%[5]s" +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[10:17], startIP, endIP) +} + +func (DataLakeAnalyticsFirewallRuleResource) requiresImport(data acceptance.TestData, startIP, endIP string) string { + template := DataLakeAnalyticsFirewallRuleResource{}.basic(data, startIP, endIP) + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_analytics_firewall_rule" "import" { + name = azurerm_data_lake_analytics_firewall_rule.test.name + account_name = azurerm_data_lake_analytics_firewall_rule.test.account_name + resource_group_name = azurerm_data_lake_analytics_firewall_rule.test.resource_group_name + start_ip_address = azurerm_data_lake_analytics_firewall_rule.test.start_ip_address + end_ip_address = azurerm_data_lake_analytics_firewall_rule.test.end_ip_address +} +`, template) +} diff --git a/azurerm/internal/services/datalake/data_lake_store_data_source.go b/azurerm/internal/services/datalake/data_lake_store_data_source.go index 4eb28727d36a..a755e37d45ae 100644 --- a/azurerm/internal/services/datalake/data_lake_store_data_source.go +++ b/azurerm/internal/services/datalake/data_lake_store_data_source.go @@ -12,7 +12,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmDataLakeStoreAccount() *schema.Resource { +func dataSourceDataLakeStoreAccount() *schema.Resource { return &schema.Resource{ Read: dataSourceArmDateLakeStoreAccountRead, diff --git a/azurerm/internal/services/datalake/data_lake_store_data_source_test.go b/azurerm/internal/services/datalake/data_lake_store_data_source_test.go new file mode 100644 index 000000000000..abc8e774e6dd --- /dev/null +++ b/azurerm/internal/services/datalake/data_lake_store_data_source_test.go @@ -0,0 +1,96 @@ +package datalake_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataLakeStoreDataSource struct { +} + +func TestAccDataLakeStoreDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_lake_store", "test") + r := DataLakeStoreDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tier").HasValue("Consumption"), + ), + }, + }) +} + +func TestAccDataLakeStoreDataSource_tier(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_lake_store", "test") + r := DataLakeStoreDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.tier(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tier").HasValue("Commitment_1TB"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + }) +} + +func (DataLakeStoreDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "unlikely23exst2acct%s" + location = "%s" + resource_group_name = azurerm_resource_group.test.name +} + +data "azurerm_data_lake_store" "test" { + name = azurerm_data_lake_store.test.name + resource_group_name = azurerm_data_lake_store.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary) +} + +func (DataLakeStoreDataSource) tier(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "unlikely23exst2acct%s" + location = "%s" + tier = "Commitment_1TB" + resource_group_name = azurerm_resource_group.test.name + + tags = { + hello = "world" + } +} + +data "azurerm_data_lake_store" "test" { + name = azurerm_data_lake_store.test.name + resource_group_name = azurerm_data_lake_store.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary) +} diff --git a/azurerm/internal/services/datalake/tests/data_lake_store_file_migration_resource_test.go b/azurerm/internal/services/datalake/data_lake_store_file_migration_resource_test.go similarity index 96% rename from azurerm/internal/services/datalake/tests/data_lake_store_file_migration_resource_test.go rename to azurerm/internal/services/datalake/data_lake_store_file_migration_resource_test.go index b21d2706f00b..34b6c67bb182 100644 --- a/azurerm/internal/services/datalake/tests/data_lake_store_file_migration_resource_test.go +++ b/azurerm/internal/services/datalake/data_lake_store_file_migration_resource_test.go @@ -1,4 +1,4 @@ -package tests +package datalake_test import ( "context" @@ -13,7 +13,7 @@ import ( // NOTE: this is intentionally an acceptance test (and we're not explicitly setting the env) // as we want to run this depending on the cloud we're in. -func TestAccAzureRMDataLakeStoreFileMigrateState(t *testing.T) { +func TestAccDataLakeStoreFileMigrateState(t *testing.T) { config := acceptance.GetAuthConfig(t) if config == nil { t.SkipNow() @@ -63,7 +63,6 @@ func TestAccAzureRMDataLakeStoreFileMigrateState(t *testing.T) { Attributes: tc.InputAttributes, } is, err := datalake.ResourceDataLakeStoreFileMigrateState(tc.StateVersion, is, client) - if err != nil { t.Fatalf("bad: %s, err: %#v", tn, err) } diff --git a/azurerm/internal/services/datalake/data_lake_store_file_resource.go b/azurerm/internal/services/datalake/data_lake_store_file_resource.go index 37a0f5e5052c..a84123eac467 100644 --- a/azurerm/internal/services/datalake/data_lake_store_file_resource.go +++ b/azurerm/internal/services/datalake/data_lake_store_file_resource.go @@ -20,11 +20,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDataLakeStoreFile() *schema.Resource { +func resourceDataLakeStoreFile() *schema.Resource { return &schema.Resource{ - Create: resourceArmDataLakeStoreFileCreate, - Read: resourceArmDataLakeStoreFileRead, - Delete: resourceArmDataLakeStoreFileDelete, + Create: resourceDataLakeStoreFileCreate, + Read: resourceDataLakeStoreFileRead, + Delete: resourceDataLakeStoreFileDelete, MigrateState: ResourceDataLakeStoreFileMigrateState, SchemaVersion: 1, Importer: &schema.ResourceImporter{ @@ -61,7 +61,7 @@ func resourceArmDataLakeStoreFile() *schema.Resource { } } -func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}) error { +func resourceDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Datalake.StoreFilesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -120,50 +120,50 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} } d.SetId(id) - return resourceArmDataLakeStoreFileRead(d, meta) + return resourceDataLakeStoreFileRead(d, meta) } -func resourceArmDataLakeStoreFileRead(d *schema.ResourceData, meta interface{}) error { +func resourceDataLakeStoreFileRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Datalake.StoreFilesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix) + id, err := ParseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix) if err != nil { return err } - resp, err := client.GetFileStatus(ctx, id.storageAccountName, id.filePath, utils.Bool(true)) + resp, err := client.GetFileStatus(ctx, id.StorageAccountName, id.FilePath, utils.Bool(true)) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", id.filePath, id.storageAccountName) + log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", id.FilePath, id.StorageAccountName) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err) + return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", id.FilePath, id.StorageAccountName, err) } - d.Set("account_name", id.storageAccountName) - d.Set("remote_file_path", id.filePath) + d.Set("account_name", id.StorageAccountName) + d.Set("remote_file_path", id.FilePath) return nil } -func resourceArmDataLakeStoreFileDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDataLakeStoreFileDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Datalake.StoreFilesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix) + id, err := ParseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix) if err != nil { return err } - resp, err := client.Delete(ctx, id.storageAccountName, id.filePath, utils.Bool(false)) + resp, err := client.Delete(ctx, id.StorageAccountName, id.FilePath, utils.Bool(false)) if err != nil { if !response.WasNotFound(resp.Response.Response) { - return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err) + return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", id.FilePath, id.StorageAccountName, err) } } @@ -171,11 +171,11 @@ func resourceArmDataLakeStoreFileDelete(d *schema.ResourceData, meta interface{} } type dataLakeStoreFileId struct { - storageAccountName string - filePath string + StorageAccountName string + FilePath string } -func parseDataLakeStoreFileId(input string, suffix string) (*dataLakeStoreFileId, error) { +func ParseDataLakeStoreFileId(input string, suffix string) (*dataLakeStoreFileId, error) { // Example: tomdevdls1.azuredatalakestore.net/test/example.txt // we add a scheme to the start of this so it parses correctly uri, err := url.Parse(fmt.Sprintf("https://%s", input)) @@ -186,11 +186,11 @@ func parseDataLakeStoreFileId(input string, suffix string) (*dataLakeStoreFileId // TODO: switch to pulling this from the Environment when it's available there // BUG: https://github.com/Azure/go-autorest/issues/312 replacement := fmt.Sprintf(".%s", suffix) - accountName := strings.Replace(uri.Host, replacement, "", -1) + accountName := strings.ReplaceAll(uri.Host, replacement, "") file := dataLakeStoreFileId{ - storageAccountName: accountName, - filePath: uri.Path, + StorageAccountName: accountName, + FilePath: uri.Path, } return &file, nil } diff --git a/azurerm/internal/services/datalake/data_lake_store_file_resource_test.go b/azurerm/internal/services/datalake/data_lake_store_file_resource_test.go new file mode 100644 index 000000000000..1bec609fdc9f --- /dev/null +++ b/azurerm/internal/services/datalake/data_lake_store_file_resource_test.go @@ -0,0 +1,191 @@ +package datalake_test + +import ( + "context" + "fmt" + "io/ioutil" + "math/rand" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataLakeStoreFileResource struct { +} + +func TestValidateDataLakeStoreRemoteFilePath(t *testing.T) { + cases := []struct { + Value string + Errors int + }{ + { + Value: "bad", + Errors: 1, + }, + { + Value: "/good/file/path", + Errors: 0, + }, + } + + for _, tc := range cases { + _, errors := datalake.ValidateDataLakeStoreRemoteFilePath()(tc.Value, "unittest") + + if len(errors) != tc.Errors { + t.Fatalf("Expected validateDataLakeStoreRemoteFilePath to trigger '%d' errors for '%s' - got '%d'", tc.Errors, tc.Value, len(errors)) + } + } +} + +func TestAccDataLakeStoreFile_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_file", "test") + r := DataLakeStoreFileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("local_file_path"), + }) +} + +func TestAccDataLakeStoreFile_largefiles(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_file", "test") + r := DataLakeStoreFileResource{} + + // "large" in this context is anything greater than 4 megabytes + largeSize := 12 * 1024 * 1024 // 12 mb + bytes := make([]byte, largeSize) + rand.Read(bytes) // fill with random data + + tmpfile, err := ioutil.TempFile("", "azurerm-acc-datalake-file-large") + if err != nil { + t.Errorf("Unable to open a temporary file.") + } + defer os.Remove(tmpfile.Name()) + + if _, err := tmpfile.Write(bytes); err != nil { + t.Errorf("Unable to write to temporary file %q: %v", tmpfile.Name(), err) + } + if err := tmpfile.Close(); err != nil { + t.Errorf("Unable to close temporary file %q: %v", tmpfile.Name(), err) + } + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.largefiles(data, tmpfile.Name()), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("local_file_path"), + }) +} + +func TestAccDataLakeStoreFile_requiresimport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_file", "test") + r := DataLakeStoreFileResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_data_lake_store_file"), + }, + }) +} + +func (t DataLakeStoreFileResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + client := clients.Datalake.StoreFilesClient + id, err := datalake.ParseDataLakeStoreFileId(state.ID, client.AdlsFileSystemDNSSuffix) + if err != nil { + return nil, err + } + + resp, err := client.GetFileStatus(ctx, id.StorageAccountName, id.FilePath, utils.Bool(true)) + if err != nil { + return nil, fmt.Errorf("retrieving Date Lake Store File Rule %q (Account %q): %v", id.FilePath, id.StorageAccountName, err) + } + + return utils.Bool(resp.FileStatus != nil), nil +} + +func (DataLakeStoreFileResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + location = "%s" + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "test" { + remote_file_path = "/test/application_gateway_test.cer" + account_name = azurerm_data_lake_store.test.name + local_file_path = "./testdata/application_gateway_test.cer" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary) +} + +func (DataLakeStoreFileResource) largefiles(data acceptance.TestData, file string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + location = "%s" + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "test" { + remote_file_path = "/test/testAccAzureRMDataLakeStoreFile_largefiles.bin" + account_name = azurerm_data_lake_store.test.name + local_file_path = "%s" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary, file) +} + +func (DataLakeStoreFileResource) requiresImport(data acceptance.TestData) string { + template := DataLakeStoreFileResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_store_file" "import" { + remote_file_path = azurerm_data_lake_store_file.test.remote_file_path + account_name = azurerm_data_lake_store_file.test.account_name + local_file_path = "./testdata/application_gateway_test.cer" +} +`, template) +} diff --git a/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource.go b/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource.go index 5edb9f114a6d..8b86cce4614d 100644 --- a/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource.go +++ b/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource.go @@ -17,7 +17,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDataLakeStoreFirewallRule() *schema.Resource { +func resourceDataLakeStoreFirewallRule() *schema.Resource { return &schema.Resource{ Create: resourceArmDateLakeStoreAccountFirewallRuleCreateUpdate, Read: resourceArmDateLakeStoreAccountFirewallRuleRead, @@ -39,14 +39,14 @@ func resourceArmDataLakeStoreFirewallRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeFirewallRuleName(), + ValidateFunc: validate.FirewallRuleName(), }, "account_name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeAccountName(), + ValidateFunc: validate.AccountName(), }, "resource_group_name": azure.SchemaResourceGroupName(), diff --git a/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource_test.go b/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource_test.go new file mode 100644 index 000000000000..a49cf2788d98 --- /dev/null +++ b/azurerm/internal/services/datalake/data_lake_store_firewall_rule_resource_test.go @@ -0,0 +1,161 @@ +package datalake_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataLakeStoreFirewallRuleResource struct { +} + +func TestAccDataLakeStoreFirewallRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") + r := DataLakeStoreFirewallRuleResource{} + startIP := "1.1.1.1" + endIP := "2.2.2.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, startIP, endIP), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue(startIP), + check.That(data.ResourceName).Key("end_ip_address").HasValue(endIP), + ), + }, + data.ImportStep(), + }) +} + +// + +func TestAccDataLakeStoreFirewallRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") + r := DataLakeStoreFirewallRuleResource{} + startIP := "1.1.1.1" + endIP := "2.2.2.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, startIP, endIP), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data, startIP, endIP), + ExpectError: acceptance.RequiresImportError("azurerm_data_lake_store_firewall_rule"), + }, + }) +} + +func TestAccDataLakeStoreFirewallRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") + r := DataLakeStoreFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "1.1.1.1", "2.2.2.2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue("1.1.1.1"), + check.That(data.ResourceName).Key("end_ip_address").HasValue("2.2.2.2"), + ), + }, + { + Config: r.basic(data, "2.2.2.2", "3.3.3.3"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue("2.2.2.2"), + check.That(data.ResourceName).Key("end_ip_address").HasValue("3.3.3.3"), + ), + }, + }) +} + +func TestAccDataLakeStoreFirewallRule_azureServices(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") + r := DataLakeStoreFirewallRuleResource{} + azureServicesIP := "0.0.0.0" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, azureServicesIP, azureServicesIP), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue(azureServicesIP), + check.That(data.ResourceName).Key("end_ip_address").HasValue(azureServicesIP), + ), + }, + data.ImportStep(), + }) +} + +func (t DataLakeStoreFirewallRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + accountName := id.Path["accounts"] + name := id.Path["firewallRules"] + + resp, err := clients.Datalake.StoreFirewallRulesClient.Get(ctx, resourceGroup, accountName, name) + if err != nil { + return nil, fmt.Errorf("retrieving Date Lake Store Firewall Rule %q (Account %q / Resource Group: %q): %v", name, accountName, resourceGroup, err) + } + + return utils.Bool(resp.FirewallRuleProperties != nil), nil +} + +func (DataLakeStoreFirewallRuleResource) basic(data acceptance.TestData, startIP, endIP string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_data_lake_store_firewall_rule" "test" { + name = "acctest" + account_name = azurerm_data_lake_store.test.name + resource_group_name = azurerm_resource_group.test.name + start_ip_address = "%s" + end_ip_address = "%s" +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17], startIP, endIP) +} + +func (DataLakeStoreFirewallRuleResource) requiresImport(data acceptance.TestData, startIP, endIP string) string { + template := DataLakeStoreFirewallRuleResource{}.basic(data, startIP, endIP) + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_store_firewall_rule" "import" { + name = azurerm_data_lake_store_firewall_rule.test.name + account_name = azurerm_data_lake_store_firewall_rule.test.account_name + resource_group_name = azurerm_data_lake_store_firewall_rule.test.resource_group_name + start_ip_address = azurerm_data_lake_store_firewall_rule.test.start_ip_address + end_ip_address = azurerm_data_lake_store_firewall_rule.test.end_ip_address +} +`, template) +} diff --git a/azurerm/internal/services/datalake/data_lake_store_resource.go b/azurerm/internal/services/datalake/data_lake_store_resource.go index b7f7ea1f6acb..19c90e6a60bb 100644 --- a/azurerm/internal/services/datalake/data_lake_store_resource.go +++ b/azurerm/internal/services/datalake/data_lake_store_resource.go @@ -19,7 +19,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDataLakeStore() *schema.Resource { +func resourceDataLakeStore() *schema.Resource { return &schema.Resource{ Create: resourceArmDateLakeStoreCreate, Read: resourceArmDateLakeStoreRead, @@ -42,7 +42,7 @@ func resourceArmDataLakeStore() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataLakeAccountName(), + ValidateFunc: validate.AccountName(), }, "location": azure.SchemaLocation(), diff --git a/azurerm/internal/services/datalake/data_lake_store_resource_test.go b/azurerm/internal/services/datalake/data_lake_store_resource_test.go new file mode 100644 index 000000000000..763aa4773899 --- /dev/null +++ b/azurerm/internal/services/datalake/data_lake_store_resource_test.go @@ -0,0 +1,307 @@ +package datalake_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataLakeStoreResource struct { +} + +func TestAccDataLakeStore_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") + r := DataLakeStoreResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tier").HasValue("Consumption"), + check.That(data.ResourceName).Key("encryption_state").HasValue("Enabled"), + check.That(data.ResourceName).Key("encryption_type").HasValue("ServiceManaged"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataLakeStore_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") + r := DataLakeStoreResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_data_lake_store"), + }, + }) +} + +func TestAccDataLakeStore_tier(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") + r := DataLakeStoreResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tier(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tier").HasValue("Commitment_1TB"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataLakeStore_encryptionDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") + r := DataLakeStoreResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.encryptionDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("encryption_state").HasValue("Disabled"), + check.That(data.ResourceName).Key("encryption_type").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataLakeStore_firewallUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") + r := DataLakeStoreResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.firewall(data, "Enabled", "Enabled"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("firewall_state").HasValue("Enabled"), + check.That(data.ResourceName).Key("firewall_allow_azure_ips").HasValue("Enabled"), + ), + }, + { + Config: r.firewall(data, "Enabled", "Disabled"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("firewall_state").HasValue("Enabled"), + check.That(data.ResourceName).Key("firewall_allow_azure_ips").HasValue("Disabled"), + ), + }, + { + Config: r.firewall(data, "Disabled", "Enabled"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("firewall_state").HasValue("Disabled"), + check.That(data.ResourceName).Key("firewall_allow_azure_ips").HasValue("Enabled"), + ), + }, + { + Config: r.firewall(data, "Disabled", "Disabled"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("firewall_state").HasValue("Disabled"), + check.That(data.ResourceName).Key("firewall_allow_azure_ips").HasValue("Disabled"), + ), + }, + }) +} + +func TestAccDataLakeStore_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") + r := DataLakeStoreResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t DataLakeStoreResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + name := id.Path["accounts"] + + resp, err := clients.Datalake.StoreAccountsClient.Get(ctx, id.ResourceGroup, name) + if err != nil { + return nil, fmt.Errorf("retrieving Date Lake Store %q (resource group: %q): %+v", name, id.ResourceGroup, err) + } + + return utils.Bool(resp.DataLakeStoreAccountProperties != nil), nil +} + +func (DataLakeStoreResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (DataLakeStoreResource) requiresImport(data acceptance.TestData) string { + template := DataLakeStoreResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_lake_store" "import" { + name = azurerm_data_lake_store.test.name + resource_group_name = azurerm_data_lake_store.test.resource_group_name + location = azurerm_data_lake_store.test.location +} +`, template) +} + +func (DataLakeStoreResource) tier(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + tier = "Commitment_1TB" +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (DataLakeStoreResource) encryptionDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + encryption_state = "Disabled" +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (DataLakeStoreResource) firewall(data acceptance.TestData, firewallState string, firewallAllowAzureIPs string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + firewall_state = "%s" + firewall_allow_azure_ips = "%s" +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17], firewallState, firewallAllowAzureIPs) +} + +func (DataLakeStoreResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) +} + +func (DataLakeStoreResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datalake-%d" + location = "%s" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) +} diff --git a/azurerm/internal/services/datalake/parse/account.go b/azurerm/internal/services/datalake/parse/account.go new file mode 100644 index 000000000000..357725aa5db2 --- /dev/null +++ b/azurerm/internal/services/datalake/parse/account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccountId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewAccountID(subscriptionId, resourceGroup, name string) AccountId { + return AccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id AccountId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Account", segmentsStr) +} + +func (id AccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// AccountID parses a Account ID into an AccountId struct +func AccountID(input string) (*AccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/datalake/parse/account_test.go b/azurerm/internal/services/datalake/parse/account_test.go new file mode 100644 index 000000000000..7a260eb51dcd --- /dev/null +++ b/azurerm/internal/services/datalake/parse/account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccountId{} + +func TestAccountIDFormatter(t *testing.T) { + actual := NewAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/account1", + Expected: &AccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATALAKESTORE/ACCOUNTS/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datalake/parse/data_lake.go b/azurerm/internal/services/datalake/parse/data_lake.go deleted file mode 100644 index c29413d21353..000000000000 --- a/azurerm/internal/services/datalake/parse/data_lake.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DataLakeStoreId struct { - Subscription string - ResourceGroup string - Name string -} - -func DataLakeStoreID(input string) (*DataLakeStoreId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Data Lake Store ID %q: %+v", input, err) - } - - dataLakeStore := DataLakeStoreId{ - ResourceGroup: id.ResourceGroup, - Subscription: id.SubscriptionID, - } - if dataLakeStore.Name, err = id.PopSegment("accounts"); err != nil { - return nil, err - } - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &dataLakeStore, nil -} diff --git a/azurerm/internal/services/datalake/parse/data_lake_test.go b/azurerm/internal/services/datalake/parse/data_lake_test.go deleted file mode 100644 index 17ddd99e26dd..000000000000 --- a/azurerm/internal/services/datalake/parse/data_lake_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package parse - -import "testing" - -func TestDataLakeStoreID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DataLakeStoreId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/", - Expected: nil, - }, - { - Name: "Data lake account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/account1", - Expected: &DataLakeStoreId{ - Name: "account1", - ResourceGroup: "resGroup1", - Subscription: "00000000-0000-0000-0000-000000000000", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/Accounts/account1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) - - actual, err := DataLakeStoreID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Subscription != v.Expected.Subscription { - t.Fatalf("Expected %q but got %q for Subscription", v.Expected.Subscription, actual.Subscription) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/datalake/registration.go b/azurerm/internal/services/datalake/registration.go index fda187080739..7c58ba5146fb 100644 --- a/azurerm/internal/services/datalake/registration.go +++ b/azurerm/internal/services/datalake/registration.go @@ -21,15 +21,17 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_data_lake_store": dataSourceArmDataLakeStoreAccount()} + "azurerm_data_lake_store": dataSourceDataLakeStoreAccount(), + } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(), - "azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(), - "azurerm_data_lake_store_file": resourceArmDataLakeStoreFile(), - "azurerm_data_lake_store_firewall_rule": resourceArmDataLakeStoreFirewallRule(), - "azurerm_data_lake_store": resourceArmDataLakeStore()} + "azurerm_data_lake_analytics_account": resourceDataLakeAnalyticsAccount(), + "azurerm_data_lake_analytics_firewall_rule": resourceDataLakeAnalyticsFirewallRule(), + "azurerm_data_lake_store_file": resourceDataLakeStoreFile(), + "azurerm_data_lake_store_firewall_rule": resourceDataLakeStoreFirewallRule(), + "azurerm_data_lake_store": resourceDataLakeStore(), + } } diff --git a/azurerm/internal/services/datalake/resourceids.go b/azurerm/internal/services/datalake/resourceids.go new file mode 100644 index 000000000000..5b5e4afe413c --- /dev/null +++ b/azurerm/internal/services/datalake/resourceids.go @@ -0,0 +1,3 @@ +package datalake + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Account -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/account1 diff --git a/azurerm/internal/services/datalake/tests/testdata/application_gateway_test.cer b/azurerm/internal/services/datalake/testdata/application_gateway_test.cer similarity index 100% rename from azurerm/internal/services/datalake/tests/testdata/application_gateway_test.cer rename to azurerm/internal/services/datalake/testdata/application_gateway_test.cer diff --git a/azurerm/internal/services/datalake/tests/data_lake_analytics_account_resource_test.go b/azurerm/internal/services/datalake/tests/data_lake_analytics_account_resource_test.go deleted file mode 100644 index 029173dc677a..000000000000 --- a/azurerm/internal/services/datalake/tests/data_lake_analytics_account_resource_test.go +++ /dev/null @@ -1,241 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "strconv" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMDataLakeAnalyticsAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "Consumption"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataLakeAnalyticsAccount_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsAccountExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDataLakeAnalyticsAccount_requiresImport), - }, - }) -} - -func TestAccAzureRMDataLakeAnalyticsAccount_tier(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsAccount_tier(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "Commitment_100AUHours"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataLakeAnalyticsAccount_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsAccount_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDataLakeAnalyticsAccount_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDataLakeAnalyticsAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.AnalyticsAccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - accountName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for data lake store: %s", accountName) - } - - resp, err := conn.Get(ctx, resourceGroup, accountName) - if err != nil { - return fmt.Errorf("Bad: Get on dataLakeAnalyticsAccountClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Date Lake Analytics Account %q (resource group: %q) does not exist", accountName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDataLakeAnalyticsAccountDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.AnalyticsAccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_lake_analytics_account" { - continue - } - - accountName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, accountName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Data Lake Analytics Account still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDataLakeAnalyticsAccount_basic(data acceptance.TestData) string { - template := testAccAzureRMDataLakeStore_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_analytics_account" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - default_store_account_name = azurerm_data_lake_store.test.name -} -`, template, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeAnalyticsAccount_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDataLakeAnalyticsAccount_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_analytics_account" "import" { - name = azurerm_data_lake_analytics_account.test.name - resource_group_name = azurerm_data_lake_analytics_account.test.resource_group_name - location = azurerm_data_lake_analytics_account.test.location - default_store_account_name = azurerm_data_lake_analytics_account.test.default_store_account_name -} -`, template) -} - -func testAccAzureRMDataLakeAnalyticsAccount_tier(data acceptance.TestData) string { - template := testAccAzureRMDataLakeStore_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_analytics_account" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - tier = "Commitment_100AUHours" - - default_store_account_name = azurerm_data_lake_store.test.name -} -`, template, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeAnalyticsAccount_withTags(data acceptance.TestData) string { - template := testAccAzureRMDataLakeStore_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_analytics_account" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - default_store_account_name = azurerm_data_lake_store.test.name - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, template, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeAnalyticsAccount_withTagsUpdate(data acceptance.TestData) string { - template := testAccAzureRMDataLakeStore_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_analytics_account" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - default_store_account_name = azurerm_data_lake_store.test.name - - tags = { - environment = "staging" - } -} -`, template, strconv.Itoa(data.RandomInteger)[2:17]) -} diff --git a/azurerm/internal/services/datalake/tests/data_lake_analytics_firewall_rule_resource_test.go b/azurerm/internal/services/datalake/tests/data_lake_analytics_firewall_rule_resource_test.go deleted file mode 100644 index d5e1c1028473..000000000000 --- a/azurerm/internal/services/datalake/tests/data_lake_analytics_firewall_rule_resource_test.go +++ /dev/null @@ -1,222 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "strconv" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMDataLakeAnalyticsFirewallRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") - startIP := "1.1.1.1" - endIP := "2.2.2.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data, startIP, endIP), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", startIP), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", endIP), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataLakeAnalyticsFirewallRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") - startIP := "1.1.1.1" - endIP := "2.2.2.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data, startIP, endIP), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", startIP), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", endIP), - ), - }, - { - Config: testAccAzureRMDataLakeAnalyticsFirewallRule_requiresImport(data, startIP, endIP), - ExpectError: acceptance.RequiresImportError("azurerm_data_lake_analytics_firewall_rule"), - }, - }, - }) -} - -func TestAccAzureRMDataLakeAnalyticsFirewallRule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data, "1.1.1.1", "2.2.2.2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", "1.1.1.1"), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", "2.2.2.2"), - ), - }, - { - Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data, "2.2.2.2", "3.3.3.3"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", "2.2.2.2"), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", "3.3.3.3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDataLakeAnalyticsFirewallRule_azureServices(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_analytics_firewall_rule", "test") - azureServicesIP := "0.0.0.0" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data, azureServicesIP, azureServicesIP), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", azureServicesIP), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", azureServicesIP), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.AnalyticsFirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - firewallRuleName := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for data lake store firewall rule: %s", firewallRuleName) - } - - resp, err := conn.Get(ctx, resourceGroup, accountName, firewallRuleName) - if err != nil { - return fmt.Errorf("Bad: Get on dataLakeAnalyticsFirewallRulesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Date Lake Analytics Firewall Rule %q (Account %q / Resource Group: %q) does not exist", firewallRuleName, accountName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.AnalyticsFirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_lake_analytics_firewall_rule" { - continue - } - - firewallRuleName := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, accountName, firewallRuleName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Data Lake Analytics Firewall Rule still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data acceptance.TestData, startIP, endIP string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} - -resource "azurerm_data_lake_analytics_account" "test" { - name = "acctest%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - default_store_account_name = azurerm_data_lake_store.test.name -} - -resource "azurerm_data_lake_analytics_firewall_rule" "test" { - name = "acctest%[3]s" - account_name = azurerm_data_lake_analytics_account.test.name - resource_group_name = azurerm_resource_group.test.name - start_ip_address = "%[4]s" - end_ip_address = "%[5]s" -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[10:17], startIP, endIP) -} - -func testAccAzureRMDataLakeAnalyticsFirewallRule_requiresImport(data acceptance.TestData, startIP, endIP string) string { - template := testAccAzureRMDataLakeAnalyticsFirewallRule_basic(data, startIP, endIP) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_analytics_firewall_rule" "import" { - name = azurerm_data_lake_analytics_firewall_rule.test.name - account_name = azurerm_data_lake_analytics_firewall_rule.test.account_name - resource_group_name = azurerm_data_lake_analytics_firewall_rule.test.resource_group_name - start_ip_address = azurerm_data_lake_analytics_firewall_rule.test.start_ip_address - end_ip_address = azurerm_data_lake_analytics_firewall_rule.test.end_ip_address -} -`, template) -} diff --git a/azurerm/internal/services/datalake/tests/data_lake_store_data_source_test.go b/azurerm/internal/services/datalake/tests/data_lake_store_data_source_test.go deleted file mode 100644 index 8d54974f1d34..000000000000 --- a/azurerm/internal/services/datalake/tests/data_lake_store_data_source_test.go +++ /dev/null @@ -1,99 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataLakeStore_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataLakeStore_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "Consumption"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMDataLakeStore_tier(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataLakeStore_tier(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "tier", "Commitment_1TB"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - }, - }) -} - -func testAccDataSourceDataLakeStore_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" - location = "%s" - resource_group_name = azurerm_resource_group.test.name -} - -data "azurerm_data_lake_store" "test" { - name = azurerm_data_lake_store.test.name - resource_group_name = azurerm_data_lake_store.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary) -} - -func testAccDataSourceDataLakeStore_tier(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" - location = "%s" - tier = "Commitment_1TB" - resource_group_name = azurerm_resource_group.test.name - - tags = { - hello = "world" - } -} - -data "azurerm_data_lake_store" "test" { - name = azurerm_data_lake_store.test.name - resource_group_name = azurerm_data_lake_store.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary) -} diff --git a/azurerm/internal/services/datalake/tests/data_lake_store_file_resource_test.go b/azurerm/internal/services/datalake/tests/data_lake_store_file_resource_test.go deleted file mode 100644 index cb50fa0e2627..000000000000 --- a/azurerm/internal/services/datalake/tests/data_lake_store_file_resource_test.go +++ /dev/null @@ -1,238 +0,0 @@ -package tests - -import ( - "fmt" - "io/ioutil" - "math/rand" - "net/http" - "os" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestValidateAzureDataLakeStoreRemoteFilePath(t *testing.T) { - cases := []struct { - Value string - Errors int - }{ - { - Value: "bad", - Errors: 1, - }, - { - Value: "/good/file/path", - Errors: 0, - }, - } - - for _, tc := range cases { - _, errors := datalake.ValidateDataLakeStoreRemoteFilePath()(tc.Value, "unittest") - - if len(errors) != tc.Errors { - t.Fatalf("Expected validateDataLakeStoreRemoteFilePath to trigger '%d' errors for '%s' - got '%d'", tc.Errors, tc.Value, len(errors)) - } - } -} - -func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_file", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFileExists(data.ResourceName), - ), - }, - data.ImportStep("local_file_path"), - }, - }) -} - -func TestAccAzureRMDataLakeStoreFile_largefiles(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_file", "test") - - // "large" in this context is anything greater than 4 megabytes - largeSize := 12 * 1024 * 1024 // 12 mb - bytes := make([]byte, largeSize) - rand.Read(bytes) // fill with random data - - tmpfile, err := ioutil.TempFile("", "azurerm-acc-datalake-file-large") - if err != nil { - t.Errorf("Unable to open a temporary file.") - } - defer os.Remove(tmpfile.Name()) - - if _, err := tmpfile.Write(bytes); err != nil { - t.Errorf("Unable to write to temporary file %q: %v", tmpfile.Name(), err) - } - if err := tmpfile.Close(); err != nil { - t.Errorf("Unable to close temporary file %q: %v", tmpfile.Name(), err) - } - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFile_largefiles(data, tmpfile.Name()), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFileExists(data.ResourceName), - ), - }, - data.ImportStep("local_file_path"), - }, - }) -} - -func TestAccAzureRMDataLakeStoreFile_requiresimport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_file", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFileExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDataLakeStoreFile_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_data_lake_store_file"), - }, - }, - }) -} - -func testCheckAzureRMDataLakeStoreFileExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.StoreFilesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - remoteFilePath := rs.Primary.Attributes["remote_file_path"] - accountName := rs.Primary.Attributes["account_name"] - - resp, err := conn.GetFileStatus(ctx, accountName, remoteFilePath, utils.Bool(true)) - if err != nil { - return fmt.Errorf("Bad: Get on dataLakeStoreFileClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Date Lake Store File Rule %q (Account %q) does not exist", remoteFilePath, accountName) - } - - return nil - } -} - -func testCheckAzureRMDataLakeStoreFileDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.StoreFilesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_lake_store_file" { - continue - } - - remoteFilePath := rs.Primary.Attributes["remote_file_path"] - accountName := rs.Primary.Attributes["account_name"] - - resp, err := conn.GetFileStatus(ctx, accountName, remoteFilePath, utils.Bool(true)) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Data Lake Store File still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDataLakeStoreFile_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - location = "%s" - firewall_state = "Disabled" -} - -resource "azurerm_data_lake_store_file" "test" { - remote_file_path = "/test/application_gateway_test.cer" - account_name = azurerm_data_lake_store.test.name - local_file_path = "./testdata/application_gateway_test.cer" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary) -} - -func testAccAzureRMDataLakeStoreFile_largefiles(data acceptance.TestData, file string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - location = "%s" - firewall_state = "Disabled" -} - -resource "azurerm_data_lake_store_file" "test" { - remote_file_path = "/test/testAccAzureRMDataLakeStoreFile_largefiles.bin" - account_name = azurerm_data_lake_store.test.name - local_file_path = "%s" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.Locations.Primary, file) -} - -func testAccAzureRMDataLakeStoreFile_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDataLakeStoreFile_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_store_file" "import" { - remote_file_path = azurerm_data_lake_store_file.test.remote_file_path - account_name = azurerm_data_lake_store_file.test.account_name - local_file_path = "./testdata/application_gateway_test.cer" -} -`, template) -} diff --git a/azurerm/internal/services/datalake/tests/data_lake_store_firewall_rule_resource_test.go b/azurerm/internal/services/datalake/tests/data_lake_store_firewall_rule_resource_test.go deleted file mode 100644 index f57d5835c869..000000000000 --- a/azurerm/internal/services/datalake/tests/data_lake_store_firewall_rule_resource_test.go +++ /dev/null @@ -1,214 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "strconv" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMDataLakeStoreFirewallRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") - startIP := "1.1.1.1" - endIP := "2.2.2.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(data, startIP, endIP), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", startIP), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", endIP), - ), - }, - data.ImportStep(), - }, - }) -} - -// - -func TestAccAzureRMDataLakeStoreFirewallRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") - startIP := "1.1.1.1" - endIP := "2.2.2.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(data, startIP, endIP), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFirewallRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDataLakeStoreFirewallRule_requiresImport(data, startIP, endIP), - ExpectError: acceptance.RequiresImportError("azurerm_data_lake_store_firewall_rule"), - }, - }, - }) -} - -func TestAccAzureRMDataLakeStoreFirewallRule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(data, "1.1.1.1", "2.2.2.2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", "1.1.1.1"), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", "2.2.2.2"), - ), - }, - { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(data, "2.2.2.2", "3.3.3.3"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", "2.2.2.2"), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", "3.3.3.3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDataLakeStoreFirewallRule_azureServices(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store_firewall_rule", "test") - azureServicesIP := "0.0.0.0" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStoreFirewallRule_basic(data, azureServicesIP, azureServicesIP), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", azureServicesIP), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", azureServicesIP), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDataLakeStoreFirewallRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.StoreFirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - firewallRuleName := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for data lake store firewall rule: %s", firewallRuleName) - } - - resp, err := conn.Get(ctx, resourceGroup, accountName, firewallRuleName) - if err != nil { - return fmt.Errorf("Bad: Get on dataLakeStoreFirewallRulesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Date Lake Store Firewall Rule %q (Account %q / Resource Group: %q) does not exist", firewallRuleName, accountName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDataLakeStoreFirewallRuleDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.StoreFirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_lake_store_firewall_rule" { - continue - } - - firewallRuleName := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, accountName, firewallRuleName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Data Lake Store Firewall Rule still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDataLakeStoreFirewallRule_basic(data acceptance.TestData, startIP, endIP string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} - -resource "azurerm_data_lake_store_firewall_rule" "test" { - name = "acctest" - account_name = azurerm_data_lake_store.test.name - resource_group_name = azurerm_resource_group.test.name - start_ip_address = "%s" - end_ip_address = "%s" -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17], startIP, endIP) -} - -func testAccAzureRMDataLakeStoreFirewallRule_requiresImport(data acceptance.TestData, startIP, endIP string) string { - template := testAccAzureRMDataLakeStoreFirewallRule_basic(data, startIP, endIP) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_store_firewall_rule" "import" { - name = azurerm_data_lake_store_firewall_rule.test.name - account_name = azurerm_data_lake_store_firewall_rule.test.account_name - resource_group_name = azurerm_data_lake_store_firewall_rule.test.resource_group_name - start_ip_address = azurerm_data_lake_store_firewall_rule.test.start_ip_address - end_ip_address = azurerm_data_lake_store_firewall_rule.test.end_ip_address -} -`, template) -} diff --git a/azurerm/internal/services/datalake/tests/data_lake_store_resource_test.go b/azurerm/internal/services/datalake/tests/data_lake_store_resource_test.go deleted file mode 100644 index 317e5503d677..000000000000 --- a/azurerm/internal/services/datalake/tests/data_lake_store_resource_test.go +++ /dev/null @@ -1,365 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "strconv" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMDataLakeStore_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStore_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "Consumption"), - resource.TestCheckResourceAttr(data.ResourceName, "encryption_state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "encryption_type", "ServiceManaged"), - ), - }, - data.ImportStep(), - }, - }) -} -func TestAccAzureRMDataLakeStore_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStore_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDataLakeStore_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_data_lake_store"), - }, - }, - }) -} - -func TestAccAzureRMDataLakeStore_tier(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStore_tier(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "Commitment_1TB"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataLakeStore_encryptionDisabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStore_encryptionDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "encryption_state", "Disabled"), - resource.TestCheckResourceAttr(data.ResourceName, "encryption_type", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataLakeStore_firewallUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStore_firewall(data, "Enabled", "Enabled"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_allow_azure_ips", "Enabled"), - ), - }, - { - Config: testAccAzureRMDataLakeStore_firewall(data, "Enabled", "Disabled"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_allow_azure_ips", "Disabled"), - ), - }, - { - Config: testAccAzureRMDataLakeStore_firewall(data, "Disabled", "Enabled"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_state", "Disabled"), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_allow_azure_ips", "Enabled"), - ), - }, - { - Config: testAccAzureRMDataLakeStore_firewall(data, "Disabled", "Disabled"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_state", "Disabled"), - resource.TestCheckResourceAttr(data.ResourceName, "firewall_allow_azure_ips", "Disabled"), - ), - }, - }, - }) -} - -func TestAccAzureRMDataLakeStore_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_lake_store", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataLakeStoreDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataLakeStore_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDataLakeStore_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataLakeStoreExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDataLakeStoreExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.StoreAccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - accountName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for data lake store: %s", accountName) - } - - resp, err := conn.Get(ctx, resourceGroup, accountName) - if err != nil { - return fmt.Errorf("Bad: Get on dataLakeStoreAccountClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Date Lake Store %q (resource group: %q) does not exist", accountName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDataLakeStoreDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Datalake.StoreAccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_lake_store" { - continue - } - - accountName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, accountName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Data Lake Store still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDataLakeStore_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeStore_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDataLakeStore_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_lake_store" "import" { - name = azurerm_data_lake_store.test.name - resource_group_name = azurerm_data_lake_store.test.resource_group_name - location = azurerm_data_lake_store.test.location -} -`, template) -} - -func testAccAzureRMDataLakeStore_tier(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - tier = "Commitment_1TB" -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeStore_encryptionDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - encryption_state = "Disabled" -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeStore_firewall(data acceptance.TestData, firewallState string, firewallAllowAzureIPs string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - firewall_state = "%s" - firewall_allow_azure_ips = "%s" -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17], firewallState, firewallAllowAzureIPs) -} - -func testAccAzureRMDataLakeStore_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) -} - -func testAccAzureRMDataLakeStore_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, strconv.Itoa(data.RandomInteger)[2:17]) -} diff --git a/azurerm/internal/services/datalake/validate/account_id.go b/azurerm/internal/services/datalake/validate/account_id.go new file mode 100644 index 000000000000..421aac1446bc --- /dev/null +++ b/azurerm/internal/services/datalake/validate/account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/parse" +) + +func AccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/datalake/validate/account_id_test.go b/azurerm/internal/services/datalake/validate/account_id_test.go new file mode 100644 index 000000000000..4f21fcf3a62c --- /dev/null +++ b/azurerm/internal/services/datalake/validate/account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataLakeStore/accounts/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATALAKESTORE/ACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datalake/validate/account_name.go b/azurerm/internal/services/datalake/validate/account_name.go new file mode 100644 index 000000000000..7252b683e98c --- /dev/null +++ b/azurerm/internal/services/datalake/validate/account_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func AccountName() schema.SchemaValidateFunc { + // store and analytic account names are the same + return validation.StringMatch( + regexp.MustCompile(`\A([a-z0-9]{3,24})\z`), + "Name can only consist of lowercase letters and numbers and must be between 3 and 24 characters long", + ) +} diff --git a/azurerm/internal/services/datalake/validate/datalake.go b/azurerm/internal/services/datalake/validate/datalake.go deleted file mode 100644 index cce7711e7018..000000000000 --- a/azurerm/internal/services/datalake/validate/datalake.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -import ( - "regexp" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" -) - -// store and analytic account names are the same -func DataLakeAccountName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`\A([a-z0-9]{3,24})\z`), - "Name can only consist of lowercase letters and numbers and must be between 3 and 24 characters long", - ) -} - -func DataLakeFirewallRuleName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`\A([-_a-zA-Z0-9]{3,50})\z`), - "Name can only consist of letters, numbers, underscores and hyphens and must be between 3 and 50 characters long", - ) -} diff --git a/azurerm/internal/services/datalake/validate/firewall_rule_name.go b/azurerm/internal/services/datalake/validate/firewall_rule_name.go new file mode 100644 index 000000000000..bbf6ccb3f5e9 --- /dev/null +++ b/azurerm/internal/services/datalake/validate/firewall_rule_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func FirewallRuleName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`\A([-_a-zA-Z0-9]{3,50})\z`), + "Name can only consist of letters, numbers, underscores and hyphens and must be between 3 and 50 characters long", + ) +} diff --git a/azurerm/internal/services/datashare/data_share_account_data_source.go b/azurerm/internal/services/datashare/data_share_account_data_source.go index 2c8e6aa640af..c798fc47a66d 100644 --- a/azurerm/internal/services/datashare/data_share_account_data_source.go +++ b/azurerm/internal/services/datashare/data_share_account_data_source.go @@ -15,7 +15,7 @@ import ( func dataSourceDataShareAccount() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmDataShareAccountRead, + Read: dataSourceDataShareAccountRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -25,7 +25,7 @@ func dataSourceDataShareAccount() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.DataShareAccountName(), + ValidateFunc: validate.AccountName(), }, "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), @@ -56,7 +56,7 @@ func dataSourceDataShareAccount() *schema.Resource { } } -func dataSourceArmDataShareAccountRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceDataShareAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataShare.AccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/datashare/data_share_account_data_source_test.go b/azurerm/internal/services/datashare/data_share_account_data_source_test.go new file mode 100644 index 000000000000..135cfabaf425 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_account_data_source_test.go @@ -0,0 +1,42 @@ +package datashare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareAccountDataSource struct { +} + +func TestAccDataShareAccountDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_account", "test") + r := DataShareAccountDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.env").HasValue("Test"), + check.That(data.ResourceName).Key("identity.0.type").HasValue("SystemAssigned"), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + }) +} + +func (DataShareAccountDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share_account" "test" { + name = azurerm_data_share_account.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, DataShareAccountResource{}.complete(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_account_resource.go b/azurerm/internal/services/datashare/data_share_account_resource.go index c1828cac2c7b..bc96aed08103 100644 --- a/azurerm/internal/services/datashare/data_share_account_resource.go +++ b/azurerm/internal/services/datashare/data_share_account_resource.go @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDataShareAccount() *schema.Resource { +func resourceDataShareAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmDataShareAccountCreate, - Read: resourceArmDataShareAccountRead, - Update: resourceArmDataShareAccountUpdate, - Delete: resourceArmDataShareAccountDelete, + Create: resourceDataShareAccountCreate, + Read: resourceDataShareAccountRead, + Update: resourceDataShareAccountUpdate, + Delete: resourceDataShareAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -35,7 +35,7 @@ func resourceArmDataShareAccount() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataShareAccountID(id) + _, err := parse.AccountID(id) return err }), @@ -44,7 +44,7 @@ func resourceArmDataShareAccount() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DataShareAccountName(), + ValidateFunc: validate.AccountName(), }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -82,7 +82,8 @@ func resourceArmDataShareAccount() *schema.Resource { }, } } -func resourceArmDataShareAccountCreate(d *schema.ResourceData, meta interface{}) error { + +func resourceDataShareAccountCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataShare.AccountClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -127,15 +128,15 @@ func resourceArmDataShareAccountCreate(d *schema.ResourceData, meta interface{}) d.SetId(*resp.ID) - return resourceArmDataShareAccountRead(d, meta) + return resourceDataShareAccountRead(d, meta) } -func resourceArmDataShareAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceDataShareAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataShare.AccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataShareAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } @@ -159,12 +160,12 @@ func resourceArmDataShareAccountRead(d *schema.ResourceData, meta interface{}) e return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDataShareAccountUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDataShareAccountUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataShare.AccountClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataShareAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } @@ -180,15 +181,15 @@ func resourceArmDataShareAccountUpdate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("updating DataShare Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - return resourceArmDataShareAccountRead(d, meta) + return resourceDataShareAccountRead(d, meta) } -func resourceArmDataShareAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDataShareAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataShare.AccountClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DataShareAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/datashare/data_share_account_resource_test.go b/azurerm/internal/services/datashare/data_share_account_resource_test.go new file mode 100644 index 000000000000..26a19e6072ff --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_account_resource_test.go @@ -0,0 +1,205 @@ +package datashare_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataShareAccountResource struct { +} + +func TestAccDataShareAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + r := DataShareAccountResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + r := DataShareAccountResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDataShareAccount_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + r := DataShareAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareAccount_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + r := DataShareAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + data.ImportStep(), + data.ImportStep(), + }) +} + +func (t DataShareAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AccountID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataShare.AccountClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share Account %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.AccountProperties != nil), nil +} + +func (DataShareAccountResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r DataShareAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareAccountResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "import" { + name = azurerm_data_share_account.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} +`, r.basic(data)) +} + +func (r DataShareAccountResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Test" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareAccountResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Stage" + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/datashare/data_share_data_source.go b/azurerm/internal/services/datashare/data_share_data_source.go new file mode 100644 index 000000000000..10c46c9f82a3 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_data_source.go @@ -0,0 +1,156 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceDataShare() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataShareRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ShareName(), + }, + + "account_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.AccountID, + }, + + "kind": { + Type: schema.TypeString, + Computed: true, + }, + + "description": { + Type: schema.TypeString, + Computed: true, + }, + + "snapshot_schedule": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + + "recurrence": { + Type: schema.TypeString, + Computed: true, + }, + + "start_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "terms": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceDataShareRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.SharesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + syncClient := meta.(*clients.Client).DataShare.SynchronizationClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + accountId, err := parse.AccountID(d.Get("account_id").(string)) + if err != nil { + return err + } + + resp, err := client.Get(ctx, accountId.ResourceGroup, accountId.Name, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("DataShare %q (Account %q / Resource Group %q) was not found", name, accountId.Name, accountId.ResourceGroup) + } + return fmt.Errorf("retrieving DataShare %q (Account %q / Resource Group %q): %+v", name, accountId.Name, accountId.ResourceGroup, err) + } + + dataShareId := parse.NewShareID(subscriptionId, accountId.ResourceGroup, accountId.Name, name).ID() + d.SetId(dataShareId) + + d.Set("name", name) + d.Set("account_id", accountId.ID()) + + if props := resp.ShareProperties; props != nil { + d.Set("description", props.Description) + d.Set("kind", string(props.ShareKind)) + d.Set("terms", props.Terms) + } + + settings := make([]datashare.ScheduledSynchronizationSetting, 0) + syncIterator, err := syncClient.ListByShareComplete(ctx, accountId.ResourceGroup, accountId.Name, name, "") + if err != nil { + return fmt.Errorf("listing Snapshot Schedules for Data Share %q (Account %q / Resource Group %q): %+v", name, accountId.Name, accountId.ResourceGroup, err) + } + for syncIterator.NotDone() { + item, ok := syncIterator.Value().AsScheduledSynchronizationSetting() + if ok && item != nil { + settings = append(settings, *item) + } + + if err := syncIterator.NextWithContext(ctx); err != nil { + return fmt.Errorf("retrieving next Snapshot Schedule: %+v", err) + } + } + + if err := d.Set("snapshot_schedule", flattenDataShareDataSourceSnapshotSchedule(settings)); err != nil { + return fmt.Errorf("setting `snapshot_schedule`: %+v", err) + } + + return nil +} + +func flattenDataShareDataSourceSnapshotSchedule(input []datashare.ScheduledSynchronizationSetting) []interface{} { + output := make([]interface{}, 0) + + for _, sync := range input { + name := "" + if sync.Name != nil { + name = *sync.Name + } + + startTime := "" + if sync.SynchronizationTime != nil && !sync.SynchronizationTime.IsZero() { + startTime = sync.SynchronizationTime.Format(time.RFC3339) + } + + output = append(output, map[string]interface{}{ + "name": name, + "recurrence": string(sync.RecurrenceInterval), + "start_time": startTime, + }) + } + + return output +} diff --git a/azurerm/internal/services/datashare/data_share_data_source_test.go b/azurerm/internal/services/datashare/data_share_data_source_test.go new file mode 100644 index 000000000000..ec43562517eb --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_data_source_test.go @@ -0,0 +1,68 @@ +package datashare_test + +import ( + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareDataSource struct { +} + +func TestAccDataShareDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share", "test") + r := DataShareDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("account_id").Exists(), + check.That(data.ResourceName).Key("kind").Exists(), + ), + }, + }) +} + +func TestAccDataShareDataSource_snapshotSchedule(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share", "test") + r := DataShareDataSource{} + startTime := time.Now().Add(time.Hour * 7).Format(time.RFC3339) + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.snapshotSchedule(data, startTime), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("snapshot_schedule.0.name").Exists(), + check.That(data.ResourceName).Key("snapshot_schedule.0.recurrence").Exists(), + check.That(data.ResourceName).Key("snapshot_schedule.0.start_time").Exists(), + ), + }, + }) +} + +func (DataShareDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share" "test" { + name = azurerm_data_share.test.name + account_id = azurerm_data_share_account.test.id +} +`, DataShareResource{}.basic(data)) +} + +func (DataShareDataSource) snapshotSchedule(data acceptance.TestData, startTime string) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share" "test" { + name = azurerm_data_share.test.name + account_id = azurerm_data_share_account.test.id +} +`, DataShareResource{}.snapshotSchedule(data, startTime)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_blob_storage_data_source.go b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_data_source.go new file mode 100644 index 000000000000..7747cdd4907f --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_data_source.go @@ -0,0 +1,144 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetBlobStorage() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataShareDatasetBlobStorageRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataSetName(), + }, + + "data_share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ShareID, + }, + + "container_name": { + Type: schema.TypeString, + Computed: true, + }, + + "storage_account": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + + "resource_group_name": { + Type: schema.TypeString, + Computed: true, + }, + + "subscription_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "file_path": { + Type: schema.TypeString, + Computed: true, + }, + + "folder_path": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceDataShareDatasetBlobStorageRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("data_share_id").(string) + shareId, err := parse.ShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for reading DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("data_share_id", shareID) + + switch resp := respModel.Value.(type) { + case datashare.BlobDataSet: + if props := resp.BlobProperties; props != nil { + d.Set("container_name", props.ContainerName) + if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { + return fmt.Errorf("setting `storage_account`: %+v", err) + } + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobFolderDataSet: + if props := resp.BlobFolderProperties; props != nil { + d.Set("container_name", props.ContainerName) + if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { + return fmt.Errorf("setting `storage_account`: %+v", err) + } + d.Set("folder_path", props.Prefix) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobContainerDataSet: + if props := resp.BlobContainerProperties; props != nil { + d.Set("container_name", props.ContainerName) + if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { + return fmt.Errorf("setting `storage_account`: %+v", err) + } + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a blob storage dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_blob_storage_data_source_test.go b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_data_source_test.go new file mode 100644 index 000000000000..048a8473eaa8 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_data_source_test.go @@ -0,0 +1,44 @@ +package datashare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareDatasetBlobStorageDataSource struct { +} + +func TestAccDataShareDatasetBlobStorageDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_blob_storage", "test") + r := DataShareDatasetBlobStorageDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(DataShareDataSetBlobStorageResource{}), + check.That(data.ResourceName).Key("container_name").Exists(), + check.That(data.ResourceName).Key("storage_account.0.name").Exists(), + check.That(data.ResourceName).Key("storage_account.0.resource_group_name").Exists(), + check.That(data.ResourceName).Key("storage_account.0.subscription_id").Exists(), + check.That(data.ResourceName).Key("file_path").Exists(), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + }) +} + +func (DataShareDatasetBlobStorageDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_blob_storage" "test" { + name = azurerm_data_share_dataset_blob_storage.test.name + data_share_id = azurerm_data_share_dataset_blob_storage.test.data_share_id +} +`, DataShareDataSetBlobStorageResource{}.basicFile(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_blob_storage_resource.go b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_resource.go new file mode 100644 index 000000000000..33f5e6fec017 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_resource.go @@ -0,0 +1,300 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataShareDataSetBlobStorage() *schema.Resource { + return &schema.Resource{ + Create: resourceDataShareDataSetBlobStorageCreate, + Read: resourceDataShareDataSetBlobStorageRead, + Delete: resourceDataShareDataSetBlobStorageDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataSetName(), + }, + + "data_share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ShareID, + }, + + "container_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.StorageContainerName, + }, + + "storage_account": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: storage.ValidateStorageAccountName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "subscription_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + }, + }, + }, + + "file_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"folder_path"}, + }, + + "folder_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"file_path"}, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceDataShareDataSetBlobStorageCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.ShareID(d.Get("data_share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_blob_storage", *existingId) + } + + var dataSet datashare.BasicDataSet + if filePath, ok := d.GetOk("file_path"); ok { + dataSet = datashare.BlobDataSet{ + Kind: datashare.KindBlob, + BlobProperties: &datashare.BlobProperties{ + ContainerName: utils.String(d.Get("container_name").(string)), + StorageAccountName: utils.String(d.Get("storage_account.0.name").(string)), + ResourceGroup: utils.String(d.Get("storage_account.0.resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account.0.subscription_id").(string)), + FilePath: utils.String(filePath.(string)), + }, + } + } else if folderPath, ok := d.GetOk("folder_path"); ok { + dataSet = datashare.BlobFolderDataSet{ + Kind: datashare.KindBlobFolder, + BlobFolderProperties: &datashare.BlobFolderProperties{ + ContainerName: utils.String(d.Get("container_name").(string)), + StorageAccountName: utils.String(d.Get("storage_account.0.name").(string)), + ResourceGroup: utils.String(d.Get("storage_account.0.resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account.0.subscription_id").(string)), + Prefix: utils.String(folderPath.(string)), + }, + } + } else { + dataSet = datashare.BlobContainerDataSet{ + Kind: datashare.KindContainer, + BlobContainerProperties: &datashare.BlobContainerProperties{ + ContainerName: utils.String(d.Get("container_name").(string)), + StorageAccountName: utils.String(d.Get("storage_account.0.name").(string)), + ResourceGroup: utils.String(d.Get("storage_account.0.resource_group_name").(string)), + SubscriptionID: utils.String(d.Get("storage_account.0.subscription_id").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceDataShareDataSetBlobStorageRead(d, meta) +} + +func resourceDataShareDataSetBlobStorageRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + + d.Set("data_share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.BlobDataSet: + if props := resp.BlobProperties; props != nil { + d.Set("container_name", props.ContainerName) + if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { + return fmt.Errorf("setting `storage_account`: %+v", err) + } + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobFolderDataSet: + if props := resp.BlobFolderProperties; props != nil { + d.Set("container_name", props.ContainerName) + if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { + return fmt.Errorf("setting `storage_account`: %+v", err) + } + d.Set("folder_path", props.Prefix) + d.Set("display_name", props.DataSetID) + } + + case datashare.BlobContainerDataSet: + if props := resp.BlobContainerProperties; props != nil { + d.Set("container_name", props.ContainerName) + if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { + return fmt.Errorf("setting `storage_account`: %+v", err) + } + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a blob storage dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceDataShareDataSetBlobStorageDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + return nil +} + +func flattenAzureRmDataShareDataSetBlobStorageAccount(strName, strRG, strSubs *string) []interface{} { + var name, rg, subs string + if strName != nil { + name = *strName + } + + if strRG != nil { + rg = *strRG + } + + if strSubs != nil { + subs = *strSubs + } + + return []interface{}{ + map[string]interface{}{ + "name": name, + "resource_group_name": rg, + "subscription_id": subs, + }, + } +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_blob_storage_resource_test.go b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_resource_test.go new file mode 100644 index 000000000000..bb975910dd6e --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_blob_storage_resource_test.go @@ -0,0 +1,243 @@ +package datashare_test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataShareDataSetBlobStorageResource struct { +} + +func TestAccDataShareDataSetBlobStorage_basicFile(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + r := DataShareDataSetBlobStorageResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicFile(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetBlobStorage_basicFolder(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + r := DataShareDataSetBlobStorageResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicFolder(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetBlobStorage_basicContainer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + r := DataShareDataSetBlobStorageResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicContainer(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetBlobStorage_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") + r := DataShareDataSetBlobStorageResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicFile(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t DataShareDataSetBlobStorageResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DataSetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataShare.DataSetClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share Data Set %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + switch resp := resp.Value.(type) { + case datashare.BlobDataSet: + return utils.Bool(resp.BlobProperties != nil), nil + + case datashare.BlobFolderDataSet: + return utils.Bool(resp.BlobFolderProperties != nil), nil + + case datashare.BlobContainerDataSet: + return utils.Bool(resp.BlobContainerProperties != nil), nil + } + + return nil, fmt.Errorf("Data Share Data %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) +} + +func (DataShareDataSetBlobStorageResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_DS_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_storage_account" "test" { + name = "acctest%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "RAGRS" + allow_blob_public_access = true +} + +resource "azurerm_storage_container" "test" { + name = "acctest-sc-%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "container" +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Reader" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func (r DataShareDataSetBlobStorageResource) basicFile(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share_dataset_blob_storage" "test" { + name = "acctest-DSDSBS-file-%[2]d" + data_share_id = azurerm_data_share.test.id + container_name = azurerm_storage_container.test.name + storage_account { + name = azurerm_storage_account.test.name + resource_group_name = azurerm_storage_account.test.resource_group_name + subscription_id = "%[3]s" + } + file_path = "myfile.txt" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func (r DataShareDataSetBlobStorageResource) basicFolder(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share_dataset_blob_storage" "test" { + name = "acctest-DSDSBS-folder-%[2]d" + data_share_id = azurerm_data_share.test.id + container_name = azurerm_storage_container.test.name + storage_account { + name = azurerm_storage_account.test.name + resource_group_name = azurerm_storage_account.test.resource_group_name + subscription_id = "%[3]s" + } + folder_path = "test" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func (r DataShareDataSetBlobStorageResource) basicContainer(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share_dataset_blob_storage" "test" { + name = "acctest-DSDSBS-folder-%[2]d" + data_share_id = azurerm_data_share.test.id + container_name = azurerm_storage_container.test.name + storage_account { + name = azurerm_storage_account.test.name + resource_group_name = azurerm_storage_account.test.resource_group_name + subscription_id = "%[3]s" + } + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) +} + +func (r DataShareDataSetBlobStorageResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_blob_storage" "import" { + name = azurerm_data_share_dataset_blob_storage.test.name + data_share_id = azurerm_data_share.test.id + container_name = azurerm_data_share_dataset_blob_storage.test.container_name + storage_account { + name = azurerm_data_share_dataset_blob_storage.test.storage_account.0.name + resource_group_name = azurerm_data_share_dataset_blob_storage.test.storage_account.0.resource_group_name + subscription_id = azurerm_data_share_dataset_blob_storage.test.storage_account.0.subscription_id + } +} +`, r.basicFile(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_data_source.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_data_source.go new file mode 100644 index 000000000000..66b816d72bed --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_data_source.go @@ -0,0 +1,111 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetDataLakeGen1() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataShareDatasetDataLakeGen1Read, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataSetName(), + }, + + "data_share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ShareID, + }, + + "data_lake_store_id": { + Type: schema.TypeString, + Computed: true, + }, + + "folder_path": { + Type: schema.TypeString, + Computed: true, + }, + + "file_name": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceDataShareDatasetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("data_share_id").(string) + shareId, err := parse.ShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("data_share_id", shareID) + + switch resp := respModel.Value.(type) { + case datashare.ADLSGen1FileDataSet: + if props := resp.ADLSGen1FileProperties; props != nil { + if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { + d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) + } + d.Set("folder_path", props.FolderPath) + d.Set("file_name", props.FileName) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen1FolderDataSet: + if props := resp.ADLSGen1FolderProperties; props != nil { + if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { + d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) + } + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_data_source_test.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_data_source_test.go new file mode 100644 index 000000000000..35bbd0dbd50b --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_data_source_test.go @@ -0,0 +1,40 @@ +package datashare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareDatasetDataLakeGen1DataSource struct { +} + +func TestAccDataShareDatasetDataLakeGen1DataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_data_lake_gen1", "test") + r := DataShareDatasetDataLakeGen1DataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("data_lake_store_id").Exists(), + check.That(data.ResourceName).Key("file_name").Exists(), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + }) +} + +func (DataShareDatasetDataLakeGen1DataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = azurerm_data_share_dataset_data_lake_gen1.test.name + data_share_id = azurerm_data_share_dataset_data_lake_gen1.test.data_share_id +} +`, DataShareDataSetDataLakeGen1Resource{}.basicFile(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_resource.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_resource.go new file mode 100644 index 000000000000..c928af853666 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_resource.go @@ -0,0 +1,231 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + dataLakeParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/parse" + dataLakeValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataShareDataSetDataLakeGen1() *schema.Resource { + return &schema.Resource{ + Create: resourceDataShareDataSetDataLakeGen1Create, + Read: resourceDataShareDataSetDataLakeGen1Read, + Delete: resourceDataShareDataSetDataLakeGen1Delete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataSetName(), + }, + + "data_share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ShareID, + }, + + "data_lake_store_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: dataLakeValidate.AccountID, + }, + + "folder_path": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "file_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceDataShareDataSetDataLakeGen1Create(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.ShareID(d.Get("data_share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen1", *existingId) + } + + dataLakeStoreId, err := dataLakeParse.AccountID(d.Get("data_lake_store_id").(string)) + if err != nil { + return err + } + + var dataSet datashare.BasicDataSet + + if fileName, ok := d.GetOk("file_name"); ok { + dataSet = datashare.ADLSGen1FileDataSet{ + Kind: datashare.KindAdlsGen1File, + ADLSGen1FileProperties: &datashare.ADLSGen1FileProperties{ + AccountName: utils.String(dataLakeStoreId.Name), + ResourceGroup: utils.String(dataLakeStoreId.ResourceGroup), + SubscriptionID: utils.String(dataLakeStoreId.SubscriptionId), + FolderPath: utils.String(d.Get("folder_path").(string)), + FileName: utils.String(fileName.(string)), + }, + } + } else { + dataSet = datashare.ADLSGen1FolderDataSet{ + Kind: datashare.KindAdlsGen1Folder, + ADLSGen1FolderProperties: &datashare.ADLSGen1FolderProperties{ + AccountName: utils.String(dataLakeStoreId.Name), + ResourceGroup: utils.String(dataLakeStoreId.ResourceGroup), + SubscriptionID: utils.String(dataLakeStoreId.SubscriptionId), + FolderPath: utils.String(d.Get("folder_path").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating/updating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceDataShareDataSetDataLakeGen1Read(d, meta) +} + +func resourceDataShareDataSetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("reading ID of DataShare %q (Resource Group %q / accountName %q): ID is empt", id.ShareName, id.ResourceGroup, id.AccountName) + } + d.Set("data_share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.ADLSGen1FileDataSet: + if props := resp.ADLSGen1FileProperties; props != nil { + if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { + d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) + } + d.Set("folder_path", props.FolderPath) + d.Set("file_name", props.FileName) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen1FolderDataSet: + if props := resp.ADLSGen1FolderProperties; props != nil { + if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { + d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) + } + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceDataShareDataSetDataLakeGen1Delete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_resource_test.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_resource_test.go new file mode 100644 index 000000000000..93571c124466 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen1_resource_test.go @@ -0,0 +1,191 @@ +package datashare_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataShareDataSetDataLakeGen1Resource struct { +} + +func TestAccDataShareDataSetDataLakeGen1_basicFile(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + r := DataShareDataSetDataLakeGen1Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicFile(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetDataLakeGen1_basicFolder(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + r := DataShareDataSetDataLakeGen1Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicFolder(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetDataLakeGen1_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") + r := DataShareDataSetDataLakeGen1Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicFile(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t DataShareDataSetDataLakeGen1Resource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DataSetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataShare.DataSetClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share Data Set %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + switch resp := resp.Value.(type) { + case datashare.ADLSGen1FileDataSet: + return utils.Bool(resp.ADLSGen1FileProperties != nil), nil + + case datashare.ADLSGen1FolderDataSet: + return utils.Bool(resp.ADLSGen1FolderProperties != nil), nil + } + + return nil, fmt.Errorf("Data Share Data %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) +} + +func (DataShareDataSetDataLakeGen1Resource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Test" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_DS_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_data_lake_store" "test" { + name = "acctestdls%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + firewall_state = "Disabled" +} + +resource "azurerm_data_lake_store_file" "test" { + account_name = azurerm_data_lake_store.test.name + local_file_path = "./testdata/application_gateway_test.cer" + remote_file_path = "/test/application_gateway_test.cer" +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_data_lake_store.test.id + role_definition_name = "Owner" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func (r DataShareDataSetDataLakeGen1Resource) basicFile(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = "acctest-DSDL1-%d" + data_share_id = azurerm_data_share.test.id + data_lake_store_id = azurerm_data_lake_store.test.id + file_name = "application_gateway_test.cer" + folder_path = "test" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareDataSetDataLakeGen1Resource) basicFolder(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen1" "test" { + name = "acctest-DSDL1-%d" + data_share_id = azurerm_data_share.test.id + data_lake_store_id = azurerm_data_lake_store.test.id + folder_path = "test" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareDataSetDataLakeGen1Resource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_data_share_dataset_data_lake_gen1" "import" { + name = azurerm_data_share_dataset_data_lake_gen1.test.name + data_share_id = azurerm_data_share.test.id + data_lake_store_id = azurerm_data_share_dataset_data_lake_gen1.test.data_lake_store_id + folder_path = azurerm_data_share_dataset_data_lake_gen1.test.folder_path +} +`, r.basicFile(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_data_source.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_data_source.go new file mode 100644 index 000000000000..c4c78bcf4a0a --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_data_source.go @@ -0,0 +1,120 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetDataLakeGen2() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataShareDatasetDataLakeGen2Read, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ShareID, + }, + + "storage_account_id": { + Type: schema.TypeString, + Computed: true, + }, + + "file_system_name": { + Type: schema.TypeString, + Computed: true, + }, + + "file_path": { + Type: schema.TypeString, + Computed: true, + }, + + "folder_path": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceDataShareDatasetDataLakeGen2Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.ShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + switch resp := respModel.Value.(type) { + case datashare.ADLSGen2FileDataSet: + if props := resp.ADLSGen2FileProperties; props != nil { + d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) + d.Set("file_system_name", props.FileSystem) + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FolderDataSet: + if props := resp.ADLSGen2FolderProperties; props != nil { + d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) + d.Set("file_system_name", props.FileSystem) + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FileSystemDataSet: + if props := resp.ADLSGen2FileSystemProperties; props != nil { + d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) + d.Set("file_system_name", props.FileSystem) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_data_source_test.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_data_source_test.go new file mode 100644 index 000000000000..c588d579b0ca --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_data_source_test.go @@ -0,0 +1,41 @@ +package datashare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareDatasetDataLakeGen2DataSource struct { +} + +func TestAccDataShareDatasetDataLakeGen2DataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_data_lake_gen2", "test") + r := DataShareDatasetDataLakeGen2DataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("storage_account_id").Exists(), + check.That(data.ResourceName).Key("file_system_name").Exists(), + check.That(data.ResourceName).Key("file_path").Exists(), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + }) +} + +func (DataShareDatasetDataLakeGen2DataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = azurerm_data_share_dataset_data_lake_gen2.test.name + share_id = azurerm_data_share_dataset_data_lake_gen2.test.share_id +} +`, DataShareDataSetDataLakeGen2Resource{}.basicFile(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_resource.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_resource.go new file mode 100644 index 000000000000..1bf7bfbfea21 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_resource.go @@ -0,0 +1,255 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + storageParsers "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/parse" + storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataShareDataSetDataLakeGen2() *schema.Resource { + return &schema.Resource{ + Create: resourceDataShareDataSetDataLakeGen2Create, + Read: resourceDataShareDataSetDataLakeGen2Read, + Delete: resourceDataShareDataSetDataLakeGen2Delete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ShareID, + }, + + "storage_account_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: storageValidate.StorageAccountID, + }, + + "file_system_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "file_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"folder_path"}, + }, + + "folder_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + ConflictsWith: []string{"file_path"}, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceDataShareDataSetDataLakeGen2Create(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.ShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen2", *existingId) + } + + strId, err := storageParsers.StorageAccountID(d.Get("storage_account_id").(string)) + if err != nil { + return err + } + + var dataSet datashare.BasicDataSet + + if filePath, ok := d.GetOk("file_path"); ok { + dataSet = datashare.ADLSGen2FileDataSet{ + Kind: datashare.KindAdlsGen2File, + ADLSGen2FileProperties: &datashare.ADLSGen2FileProperties{ + StorageAccountName: utils.String(strId.Name), + ResourceGroup: utils.String(strId.ResourceGroup), + SubscriptionID: utils.String(strId.SubscriptionId), + FileSystem: utils.String(d.Get("file_system_name").(string)), + FilePath: utils.String(filePath.(string)), + }, + } + } else if folderPath, ok := d.GetOk("folder_path"); ok { + dataSet = datashare.ADLSGen2FolderDataSet{ + Kind: datashare.KindAdlsGen2Folder, + ADLSGen2FolderProperties: &datashare.ADLSGen2FolderProperties{ + StorageAccountName: utils.String(strId.Name), + ResourceGroup: utils.String(strId.ResourceGroup), + SubscriptionID: utils.String(strId.SubscriptionId), + FileSystem: utils.String(d.Get("file_system_name").(string)), + FolderPath: utils.String(folderPath.(string)), + }, + } + } else { + dataSet = datashare.ADLSGen2FileSystemDataSet{ + Kind: datashare.KindAdlsGen2FileSystem, + ADLSGen2FileSystemProperties: &datashare.ADLSGen2FileSystemProperties{ + StorageAccountName: utils.String(strId.Name), + ResourceGroup: utils.String(strId.ResourceGroup), + SubscriptionID: utils.String(strId.SubscriptionId), + FileSystem: utils.String(d.Get("file_system_name").(string)), + }, + } + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceDataShareDataSetDataLakeGen2Read(d, meta) +} + +func resourceDataShareDataSetDataLakeGen2Read(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + d.Set("share_id", shareResp.ID) + + switch resp := resp.Value.(type) { + case datashare.ADLSGen2FileDataSet: + if props := resp.ADLSGen2FileProperties; props != nil { + d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) + d.Set("file_system_name", props.FileSystem) + d.Set("file_path", props.FilePath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FolderDataSet: + if props := resp.ADLSGen2FolderProperties; props != nil { + d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) + d.Set("file_system_name", props.FileSystem) + d.Set("folder_path", props.FolderPath) + d.Set("display_name", props.DataSetID) + } + + case datashare.ADLSGen2FileSystemDataSet: + if props := resp.ADLSGen2FileSystemProperties; props != nil { + d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) + d.Set("file_system_name", props.FileSystem) + d.Set("display_name", props.DataSetID) + } + + default: + return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + + return nil +} + +func resourceDataShareDataSetDataLakeGen2Delete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_resource_test.go b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_resource_test.go new file mode 100644 index 000000000000..5c45fef91470 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_data_lake_gen2_resource_test.go @@ -0,0 +1,226 @@ +package datashare_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataShareDataSetDataLakeGen2Resource struct { +} + +func TestAccDataShareDataSetDataLakeGen2_basicFile(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + r := DataShareDataSetDataLakeGen2Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicFile(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetDataLakeGen2_basicFolder(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + r := DataShareDataSetDataLakeGen2Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicFolder(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetDataLakeGen2File_basicSystem(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + r := DataShareDataSetDataLakeGen2Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicSystem(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataLakeGen2DataSet_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") + r := DataShareDataSetDataLakeGen2Resource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicFile(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t DataShareDataSetDataLakeGen2Resource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DataSetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataShare.DataSetClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share Data Set %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + switch resp := resp.Value.(type) { + case datashare.ADLSGen2FileDataSet: + return utils.Bool(resp.ADLSGen2FileProperties != nil), nil + + case datashare.ADLSGen2FolderDataSet: + return utils.Bool(resp.ADLSGen2FolderProperties != nil), nil + + case datashare.ADLSGen2FileSystemDataSet: + return utils.Bool(resp.ADLSGen2FileSystemProperties != nil), nil + } + + return nil, fmt.Errorf("Data Share Data %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) +} + +func (DataShareDataSetDataLakeGen2Resource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +provider "azuread" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} + +resource "azurerm_storage_account" "test" { + name = "accteststr%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "BlobStorage" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "test" { + name = "acctest-%[1]d" + storage_account_id = azurerm_storage_account.test.id +} + +data "azuread_service_principal" "test" { + display_name = azurerm_data_share_account.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Reader" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func (r DataShareDataSetDataLakeGen2Resource) basicFile(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + storage_account_id = azurerm_storage_account.test.id + file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name + file_path = "myfile.txt" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareDataSetDataLakeGen2Resource) basicFolder(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + storage_account_id = azurerm_storage_account.test.id + file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name + folder_path = "test" + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareDataSetDataLakeGen2Resource) basicSystem(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "test" { + name = "acctest-dlds-%d" + share_id = azurerm_data_share.test.id + storage_account_id = azurerm_storage_account.test.id + file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareDataSetDataLakeGen2Resource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_data_lake_gen2" "import" { + name = azurerm_data_share_dataset_data_lake_gen2.test.name + share_id = azurerm_data_share.test.id + storage_account_id = azurerm_data_share_dataset_data_lake_gen2.test.storage_account_id + file_system_name = azurerm_data_share_dataset_data_lake_gen2.test.file_system_name + file_path = azurerm_data_share_dataset_data_lake_gen2.test.file_path +} +`, r.basicFile(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_data_source.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_data_source.go new file mode 100644 index 000000000000..07499348b770 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_data_source.go @@ -0,0 +1,91 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetKustoCluster() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataShareDatasetKustoClusterRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ShareID, + }, + + "kusto_cluster_id": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceDataShareDatasetKustoClusterRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.ShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + resp, ok := respModel.Value.AsKustoClusterDataSet() + if !ok { + return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q / shareName %q) is not kusto cluster dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + if props := resp.KustoClusterDataSetProperties; props != nil { + d.Set("kusto_cluster_id", props.KustoClusterResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_data_source_test.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_data_source_test.go new file mode 100644 index 000000000000..13ad26e0b0d6 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_data_source_test.go @@ -0,0 +1,40 @@ +package datashare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareKustoClusterDatasetDataSource struct { +} + +func TestAccDataShareKustoClusterDatasetDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_kusto_cluster", "test") + r := DataShareKustoClusterDatasetDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kusto_cluster_id").Exists(), + check.That(data.ResourceName).Key("display_name").Exists(), + check.That(data.ResourceName).Key("kusto_cluster_location").Exists(), + ), + }, + }) +} + +func (DataShareKustoClusterDatasetDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_kusto_cluster" "test" { + name = azurerm_data_share_dataset_kusto_cluster.test.name + share_id = azurerm_data_share_dataset_kusto_cluster.test.share_id +} +`, ShareKustoClusterDataSetResource{}.basic(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_resource.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_resource.go new file mode 100644 index 000000000000..a67b81d51472 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_resource.go @@ -0,0 +1,185 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataShareDataSetKustoCluster() *schema.Resource { + return &schema.Resource{ + Create: resourceDataShareDataSetKustoClusterCreate, + Read: resourceDataShareDataSetKustoClusterRead, + Delete: resourceDataShareDataSetKustoClusterDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ShareID, + }, + + "kusto_cluster_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceDataShareDataSetKustoClusterCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.ShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existingModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existingModel.Response) { + return fmt.Errorf("checking for presence of existing DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existingModel.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_kusto_cluster", *existingId) + } + + dataSet := datashare.KustoClusterDataSet{ + Kind: datashare.KindKustoCluster, + KustoClusterDataSetProperties: &datashare.KustoClusterDataSetProperties{ + KustoClusterResourceID: utils.String(d.Get("kusto_cluster_id").(string)), + }, + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + return resourceDataShareDataSetKustoClusterRead(d, meta) +} + +func resourceDataShareDataSetKustoClusterRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(respModel.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + + d.Set("share_id", shareResp.ID) + + resp, ok := respModel.Value.AsKustoClusterDataSet() + if !ok { + return fmt.Errorf("dataShare dataset %q (Resource Group %q / accountName %q / shareName %q) is not kusto cluster dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) + } + if props := resp.KustoClusterDataSetProperties; props != nil { + d.Set("kusto_cluster_id", props.KustoClusterResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} + +func resourceDataShareDataSetKustoClusterDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_resource_test.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_resource_test.go new file mode 100644 index 000000000000..0573cc921668 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_cluster_resource_test.go @@ -0,0 +1,142 @@ +package datashare_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ShareKustoClusterDataSetResource struct { +} + +func TestAccDataShareKustoClusterDataSet_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_cluster", "test") + r := ShareKustoClusterDataSetResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + check.That(data.ResourceName).Key("kusto_cluster_location").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareKustoClusterDataSet_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_cluster", "test") + r := ShareKustoClusterDataSetResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t ShareKustoClusterDataSetResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DataSetID(state.ID) + if err != nil { + return nil, err + } + + respRaw, err := clients.DataShare.DataSetClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share Data Set %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + resp, ok := respRaw.Value.AsKustoClusterDataSet() + if !ok { + return nil, fmt.Errorf("Data Share Data Set %q (Resource Group %q / accountName %q) is not Kusto Database DataSet", id.ShareName, id.ResourceGroup, id.AccountName) + } + + return utils.Bool(resp.KustoClusterDataSetProperties != nil), nil +} + +func (ShareKustoClusterDataSetResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_DS_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "InPlace" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%[3]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_kusto_cluster.test.id + role_definition_name = "Contributor" + principal_id = azurerm_data_share_account.test.identity.0.principal_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func (r ShareKustoClusterDataSetResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_cluster" "test" { + name = "acctest-DSKC-%d" + share_id = azurerm_data_share.test.id + kusto_cluster_id = azurerm_kusto_cluster.test.id + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r ShareKustoClusterDataSetResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_cluster" "import" { + name = azurerm_data_share_dataset_kusto_cluster.test.name + share_id = azurerm_data_share.test.id + kusto_cluster_id = azurerm_kusto_cluster.test.id +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_database_data_source.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_data_source.go new file mode 100644 index 000000000000..2f3ebac7e310 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_data_source.go @@ -0,0 +1,91 @@ +package datashare + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceDataShareDatasetKustoDatabase() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataShareDatasetKustoDatabaseRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ShareID, + }, + + "kusto_database_id": { + Type: schema.TypeString, + Computed: true, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceDataShareDatasetKustoDatabaseRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareID := d.Get("share_id").(string) + shareId, err := parse.ShareID(shareID) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(respModel.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + d.Set("name", name) + d.Set("share_id", shareID) + + resp, ok := respModel.Value.AsKustoDatabaseDataSet() + if !ok { + return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q / shareName %q) is not kusto database dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + if props := resp.KustoDatabaseDataSetProperties; props != nil { + d.Set("kusto_database_id", props.KustoDatabaseResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_database_data_source_test.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_data_source_test.go new file mode 100644 index 000000000000..b93357301575 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_data_source_test.go @@ -0,0 +1,40 @@ +package datashare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DataShareDatasetKustoDatabaseDataSource struct { +} + +func TestAccDataShareDatasetKustoDatabaseDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_kusto_database", "test") + r := DataShareDatasetKustoDatabaseDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kusto_database_id").Exists(), + check.That(data.ResourceName).Key("display_name").Exists(), + check.That(data.ResourceName).Key("kusto_cluster_location").Exists(), + ), + }, + }) +} + +func (DataShareDatasetKustoDatabaseDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_share_dataset_kusto_database" "test" { + name = azurerm_data_share_dataset_kusto_database.test.name + share_id = azurerm_data_share_dataset_kusto_database.test.share_id +} +`, DataShareDataSetKustoDatabaseResource{}.basic(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_database_resource.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_resource.go new file mode 100644 index 000000000000..619e77c1a470 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_resource.go @@ -0,0 +1,186 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataShareDataSetKustoDatabase() *schema.Resource { + return &schema.Resource{ + Create: resourceDataShareDataSetKustoDatabaseCreate, + Read: resourceDataShareDataSetKustoDatabaseRead, + Delete: resourceDataShareDataSetKustoDatabaseDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataSetName(), + }, + + "share_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ShareID, + }, + + "kusto_database_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + + "kusto_cluster_location": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceDataShareDataSetKustoDatabaseCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + shareId, err := parse.ShareID(d.Get("share_id").(string)) + if err != nil { + return err + } + + existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + } + existingId := helper.GetAzurermDataShareDataSetId(existing.Value) + if existingId != nil && *existingId != "" { + return tf.ImportAsExistsError("azurerm_data_share_dataset_kusto_database", *existingId) + } + + dataSet := datashare.KustoDatabaseDataSet{ + Kind: datashare.KindKustoDatabase, + KustoDatabaseDataSetProperties: &datashare.KustoDatabaseDataSetProperties{ + KustoDatabaseResourceID: utils.String(d.Get("kusto_database_id").(string)), + }, + } + + if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { + return fmt.Errorf("creating DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) + } + + respId := helper.GetAzurermDataShareDataSetId(resp.Value) + if respId == nil || *respId == "" { + return fmt.Errorf("empty or nil ID returned for DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) + } + + d.SetId(*respId) + + return resourceDataShareDataSetKustoDatabaseRead(d, meta) +} + +func resourceDataShareDataSetKustoDatabaseRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + shareClient := meta.(*clients.Client).DataShare.SharesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + respModel, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(respModel.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + d.Set("name", id.Name) + shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) + if err != nil { + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) + } + if shareResp.ID == nil || *shareResp.ID == "" { + return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) + } + + d.Set("share_id", shareResp.ID) + + resp, ok := respModel.Value.AsKustoDatabaseDataSet() + if !ok { + return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q) is not Kusto Database DataSet", id.ShareName, id.ResourceGroup, id.AccountName) + } + if props := resp.KustoDatabaseDataSetProperties; props != nil { + d.Set("kusto_database_id", props.KustoDatabaseResourceID) + d.Set("display_name", props.DataSetID) + d.Set("kusto_cluster_location", props.Location) + } + + return nil +} + +func resourceDataShareDataSetKustoDatabaseDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.DataSetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of DataShare Kusto Database DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) + } + + return nil +} diff --git a/azurerm/internal/services/datashare/data_share_dataset_kusto_database_resource_test.go b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_resource_test.go new file mode 100644 index 000000000000..1b181b8cecb6 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_dataset_kusto_database_resource_test.go @@ -0,0 +1,148 @@ +package datashare_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataShareDataSetKustoDatabaseResource struct { +} + +func TestAccDataShareDataSetKustoDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_database", "test") + r := DataShareDataSetKustoDatabaseResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").Exists(), + check.That(data.ResourceName).Key("kusto_cluster_location").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShareDataSetKustoDatabase_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_database", "test") + r := DataShareDataSetKustoDatabaseResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t DataShareDataSetKustoDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DataSetID(state.ID) + if err != nil { + return nil, err + } + + respRaw, err := clients.DataShare.DataSetClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share Data Set %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + resp, ok := respRaw.Value.AsKustoDatabaseDataSet() + if !ok { + return nil, fmt.Errorf("Data Share Data Set %q (Resource Group %q / accountName %q) is not Kusto Database DataSet", id.ShareName, id.ResourceGroup, id.AccountName) + } + + return utils.Bool(resp.KustoDatabaseDataSetProperties != nil), nil +} + +func (DataShareDataSetKustoDatabaseResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%[1]d" + location = "%[2]s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_share" "test" { + name = "acctest_DS_%[1]d" + account_id = azurerm_data_share_account.test.id + kind = "InPlace" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%[3]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestKD-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_name = azurerm_kusto_cluster.test.name +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_kusto_cluster.test.id + role_definition_name = "Contributor" + principal_id = azurerm_data_share_account.test.identity.0.principal_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) +} + +func (r DataShareDataSetKustoDatabaseResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_database" "test" { + name = "acctest-DSKD-%d" + share_id = azurerm_data_share.test.id + kusto_database_id = azurerm_kusto_database.test.id + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareDataSetKustoDatabaseResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_dataset_kusto_database" "import" { + name = azurerm_data_share_dataset_kusto_database.test.name + share_id = azurerm_data_share.test.id + kusto_database_id = azurerm_kusto_database.test.id +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/datashare/data_share_resource.go b/azurerm/internal/services/datashare/data_share_resource.go new file mode 100644 index 000000000000..8d51dfe08dd3 --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_resource.go @@ -0,0 +1,305 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataShare() *schema.Resource { + return &schema.Resource{ + Create: resourceDataShareCreateUpdate, + Read: resourceDataShareRead, + Update: resourceDataShareCreateUpdate, + Delete: resourceDataShareDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ShareID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.ShareName(), + }, + + "account_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.AccountID, + }, + + "kind": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datashare.CopyBased), + string(datashare.InPlace), + }, false), + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "snapshot_schedule": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.SnapshotScheduleName(), + }, + + "recurrence": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datashare.Day), + string(datashare.Hour), + }, false), + }, + + "start_time": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.IsRFC3339Time, + DiffSuppressFunc: suppress.RFC3339Time, + }, + }, + }, + }, + + "terms": { + Type: schema.TypeString, + Optional: true, + }, + }, + } +} + +func resourceDataShareCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.SharesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + syncClient := meta.(*clients.Client).DataShare.SynchronizationClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + accountId, err := parse.AccountID(d.Get("account_id").(string)) + if err != nil { + return err + } + + resourceId := parse.NewShareID(subscriptionId, accountId.ResourceGroup, accountId.Name, name).ID() + if d.IsNewResource() { + existing, err := client.Get(ctx, accountId.ResourceGroup, accountId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare %q (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_share", resourceId) + } + } + + share := datashare.Share{ + ShareProperties: &datashare.ShareProperties{ + ShareKind: datashare.ShareKind(d.Get("kind").(string)), + Description: utils.String(d.Get("description").(string)), + Terms: utils.String(d.Get("terms").(string)), + }, + } + + if _, err := client.Create(ctx, accountId.ResourceGroup, accountId.Name, name, share); err != nil { + return fmt.Errorf("creating Data Share %q (Account %q / Resource Group %q): %+v", name, accountId.Name, accountId.ResourceGroup, err) + } + + d.SetId(resourceId) + + if d.HasChange("snapshot_schedule") { + // only one dependent sync setting is allowed in one data share + o, _ := d.GetChange("snapshot_schedule") + if origins := o.([]interface{}); len(origins) > 0 { + origin := origins[0].(map[string]interface{}) + if originName, ok := origin["name"].(string); ok && originName != "" { + future, err := syncClient.Delete(ctx, accountId.ResourceGroup, accountId.Name, name, originName) + if err != nil { + return fmt.Errorf("deleting DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) + } + if err = future.WaitForCompletionRef(ctx, syncClient.Client); err != nil { + return fmt.Errorf("waiting for DataShare %q snapshot schedule (Resource Group %q / accountName %q) to be deleted: %+v", name, accountId.ResourceGroup, accountId.Name, err) + } + } + } + } + + if snapshotSchedule := expandAzureRmDataShareSnapshotSchedule(d.Get("snapshot_schedule").([]interface{})); snapshotSchedule != nil { + if _, err := syncClient.Create(ctx, accountId.ResourceGroup, accountId.Name, name, d.Get("snapshot_schedule.0.name").(string), snapshotSchedule); err != nil { + return fmt.Errorf("creating DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) + } + } + + return resourceDataShareRead(d, meta) +} + +func resourceDataShareRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.SharesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + syncClient := meta.(*clients.Client).DataShare.SynchronizationClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ShareID(d.Id()) + if err != nil { + return err + } + + dataShare, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(dataShare.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) + } + + accountId := parse.NewAccountID(subscriptionId, id.ResourceGroup, id.AccountName) + + d.Set("name", id.Name) + d.Set("account_id", accountId.ID()) + + if props := dataShare.ShareProperties; props != nil { + d.Set("kind", props.ShareKind) + d.Set("description", props.Description) + d.Set("terms", props.Terms) + } + + settings := make([]datashare.ScheduledSynchronizationSetting, 0) + syncIterator, err := syncClient.ListByShareComplete(ctx, id.ResourceGroup, id.AccountName, id.Name, "") + if err != nil { + return fmt.Errorf("listing Snapshot Schedules for Data Share %q (Account %q / Resource Group %q): %+v", id.Name, id.AccountName, id.ResourceGroup, err) + } + for syncIterator.NotDone() { + item, ok := syncIterator.Value().AsScheduledSynchronizationSetting() + if ok && item != nil { + settings = append(settings, *item) + } + + if err := syncIterator.NextWithContext(ctx); err != nil { + return fmt.Errorf("retrieving next Snapshot Schedule: %+v", err) + } + } + if err := d.Set("snapshot_schedule", flattenAzureRmDataShareSnapshotSchedule(settings)); err != nil { + return fmt.Errorf("setting `snapshot_schedule`: %+v", err) + } + + return nil +} + +func resourceDataShareDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.SharesClient + syncClient := meta.(*clients.Client).DataShare.SynchronizationClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ShareID(d.Id()) + if err != nil { + return err + } + + // sync setting will not automatically be deleted after the data share is deleted + if _, ok := d.GetOk("snapshot_schedule"); ok { + syncFuture, err := syncClient.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name, d.Get("snapshot_schedule.0.name").(string)) + if err != nil { + return fmt.Errorf("deleting DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) + } + if err = syncFuture.WaitForCompletionRef(ctx, syncClient.Client); err != nil { + return fmt.Errorf("waiting for DataShare %q snapshot schedule (Resource Group %q / accountName %q) to be deleted: %+v", id.Name, id.ResourceGroup, id.AccountName, err) + } + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare %q (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for DataShare %q (Resource Group %q / accountName %q) to be deleted: %+v", id.Name, id.ResourceGroup, id.AccountName, err) + } + + return nil +} + +func expandAzureRmDataShareSnapshotSchedule(input []interface{}) *datashare.ScheduledSynchronizationSetting { + if len(input) == 0 { + return nil + } + + snapshotSchedule := input[0].(map[string]interface{}) + + startTime, _ := time.Parse(time.RFC3339, snapshotSchedule["start_time"].(string)) + + return &datashare.ScheduledSynchronizationSetting{ + Kind: datashare.KindBasicSynchronizationSettingKindScheduleBased, + ScheduledSynchronizationSettingProperties: &datashare.ScheduledSynchronizationSettingProperties{ + RecurrenceInterval: datashare.RecurrenceInterval(snapshotSchedule["recurrence"].(string)), + SynchronizationTime: &date.Time{Time: startTime}, + }, + } +} + +func flattenAzureRmDataShareSnapshotSchedule(input []datashare.ScheduledSynchronizationSetting) []interface{} { + output := make([]interface{}, 0) + + for _, setting := range input { + name := "" + if setting.Name != nil { + name = *setting.Name + } + + startTime := "" + if setting.SynchronizationTime != nil && !setting.SynchronizationTime.IsZero() { + startTime = setting.SynchronizationTime.Format(time.RFC3339) + } + + output = append(output, map[string]interface{}{ + "name": name, + "recurrence": string(setting.RecurrenceInterval), + "start_time": startTime, + }) + } + + return output +} diff --git a/azurerm/internal/services/datashare/data_share_resource_test.go b/azurerm/internal/services/datashare/data_share_resource_test.go new file mode 100644 index 000000000000..b0459981c64a --- /dev/null +++ b/azurerm/internal/services/datashare/data_share_resource_test.go @@ -0,0 +1,258 @@ +package datashare_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DataShareResource struct { +} + +func TestAccDataShare_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share", "test") + r := DataShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShare_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share", "test") + r := DataShareResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDataShare_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share", "test") + r := DataShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShare_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share", "test") + r := DataShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataShare_snapshotSchedule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share", "test") + r := DataShareResource{} + startTime := time.Now().Add(time.Hour * 7).Format(time.RFC3339) + startTime2 := time.Now().Add(time.Hour * 8).Format(time.RFC3339) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.snapshotSchedule(data, startTime), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.snapshotScheduleUpdated(data, startTime2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t DataShareResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ShareID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DataShare.SharesClient.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Data Share %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ShareProperties != nil), nil +} + +func (DataShareResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-datashare-%d" + location = "%s" +} + +resource "azurerm_data_share_account" "test" { + name = "acctest-dsa-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r DataShareResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share" "import" { + name = azurerm_data_share.test.name + account_id = azurerm_data_share_account.test.id + kind = azurerm_data_share.test.kind +} +`, r.basic(data)) +} + +func (r DataShareResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" + description = "share desc" + terms = "share terms" +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" + description = "share desc 2" + terms = "share terms 2" +} +`, r.template(data), data.RandomInteger) +} + +func (r DataShareResource) snapshotSchedule(data acceptance.TestData, startTime string) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[2]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" + + snapshot_schedule { + name = "acctest-ss-%[2]d" + recurrence = "Day" + start_time = "%[3]s" + } +} +`, r.template(data), data.RandomInteger, startTime) +} + +func (r DataShareResource) snapshotScheduleUpdated(data acceptance.TestData, startTime string) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_data_share" "test" { + name = "acctest_ds_%[2]d" + account_id = azurerm_data_share_account.test.id + kind = "CopyBased" + + snapshot_schedule { + name = "acctest-ss2-%[2]d" + recurrence = "Hour" + start_time = "%[3]s" + } +} +`, r.template(data), data.RandomInteger, startTime) +} diff --git a/azurerm/internal/services/datashare/data_source_data_share.go b/azurerm/internal/services/datashare/data_source_data_share.go deleted file mode 100644 index 860491b8424a..000000000000 --- a/azurerm/internal/services/datashare/data_source_data_share.go +++ /dev/null @@ -1,139 +0,0 @@ -package datashare - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceDataShare() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmDataShareRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DatashareName(), - }, - - "account_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DatashareAccountID, - }, - - "kind": { - Type: schema.TypeString, - Computed: true, - }, - - "description": { - Type: schema.TypeString, - Computed: true, - }, - - "snapshot_schedule": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - - "recurrence": { - Type: schema.TypeString, - Computed: true, - }, - - "start_time": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "terms": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmDataShareRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.SharesClient - syncClient := meta.(*clients.Client).DataShare.SynchronizationClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - accountID := d.Get("account_id").(string) - accountId, err := parse.DataShareAccountID(accountID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, accountId.ResourceGroup, accountId.Name, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("reading DataShare %q (Resource Group %q / accountName %q): ID is empty", name, accountId.ResourceGroup, accountId.Name) - } - - d.SetId(*resp.ID) - d.Set("name", name) - d.Set("account_id", accountID) - if props := resp.ShareProperties; props != nil { - d.Set("kind", props.ShareKind) - d.Set("description", props.Description) - d.Set("terms", props.Terms) - } - - if syncIterator, err := syncClient.ListByShareComplete(ctx, accountId.ResourceGroup, accountId.Name, name, ""); syncIterator.NotDone() { - if err != nil { - return fmt.Errorf("listing DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - if syncName := syncIterator.Value().(datashare.ScheduledSynchronizationSetting).Name; syncName != nil && *syncName != "" { - syncResp, err := syncClient.Get(ctx, accountId.ResourceGroup, accountId.Name, name, *syncName) - if err != nil { - return fmt.Errorf("reading DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - if schedule := syncResp.Value.(datashare.ScheduledSynchronizationSetting); schedule.ID != nil && *schedule.ID != "" { - if err := d.Set("snapshot_schedule", flattenAzureRmDataShareSnapshotSchedule(&schedule)); err != nil { - return fmt.Errorf("setting `snapshot_schedule`: %+v", err) - } - } - } - if err := syncIterator.NextWithContext(ctx); err != nil { - return fmt.Errorf("listing DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - if syncIterator.NotDone() { - return fmt.Errorf("more than one DataShare %q snapshot schedule (Resource Group %q / accountName %q) is returned", name, accountId.ResourceGroup, accountId.Name) - } - } - - return nil -} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_blob_storage.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_blob_storage.go deleted file mode 100644 index 6ea6a98d36f7..000000000000 --- a/azurerm/internal/services/datashare/data_source_data_share_dataset_blob_storage.go +++ /dev/null @@ -1,144 +0,0 @@ -package datashare - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" -) - -func dataSourceDataShareDatasetBlobStorage() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmDataShareDatasetBlobStorageRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "data_share_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DataShareID, - }, - - "container_name": { - Type: schema.TypeString, - Computed: true, - }, - - "storage_account": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - - "resource_group_name": { - Type: schema.TypeString, - Computed: true, - }, - - "subscription_id": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "file_path": { - Type: schema.TypeString, - Computed: true, - }, - - "folder_path": { - Type: schema.TypeString, - Computed: true, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmDataShareDatasetBlobStorageRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareID := d.Get("data_share_id").(string) - shareId, err := parse.DataShareID(shareID) - if err != nil { - return err - } - - respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(respModel.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for reading DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - d.Set("name", name) - d.Set("data_share_id", shareID) - - switch resp := respModel.Value.(type) { - case datashare.BlobDataSet: - if props := resp.BlobProperties; props != nil { - d.Set("container_name", props.ContainerName) - if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { - return fmt.Errorf("setting `storage_account`: %+v", err) - } - d.Set("file_path", props.FilePath) - d.Set("display_name", props.DataSetID) - } - - case datashare.BlobFolderDataSet: - if props := resp.BlobFolderProperties; props != nil { - d.Set("container_name", props.ContainerName) - if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { - return fmt.Errorf("setting `storage_account`: %+v", err) - } - d.Set("folder_path", props.Prefix) - d.Set("display_name", props.DataSetID) - } - - case datashare.BlobContainerDataSet: - if props := resp.BlobContainerProperties; props != nil { - d.Set("container_name", props.ContainerName) - if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { - return fmt.Errorf("setting `storage_account`: %+v", err) - } - d.Set("display_name", props.DataSetID) - } - - default: - return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a blob storage dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - return nil -} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen1.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen1.go deleted file mode 100644 index 5e957ce0bb8f..000000000000 --- a/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen1.go +++ /dev/null @@ -1,111 +0,0 @@ -package datashare - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" -) - -func dataSourceDataShareDatasetDataLakeGen1() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmDataShareDatasetDataLakeGen1Read, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "data_share_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DataShareID, - }, - - "data_lake_store_id": { - Type: schema.TypeString, - Computed: true, - }, - - "folder_path": { - Type: schema.TypeString, - Computed: true, - }, - - "file_name": { - Type: schema.TypeString, - Computed: true, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmDataShareDatasetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareID := d.Get("data_share_id").(string) - shareId, err := parse.DataShareID(shareID) - if err != nil { - return err - } - - respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(respModel.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare Data Lake Gen1 DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - d.Set("name", name) - d.Set("data_share_id", shareID) - - switch resp := respModel.Value.(type) { - case datashare.ADLSGen1FileDataSet: - if props := resp.ADLSGen1FileProperties; props != nil { - if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { - d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) - } - d.Set("folder_path", props.FolderPath) - d.Set("file_name", props.FileName) - d.Set("display_name", props.DataSetID) - } - - case datashare.ADLSGen1FolderDataSet: - if props := resp.ADLSGen1FolderProperties; props != nil { - if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { - d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) - } - d.Set("folder_path", props.FolderPath) - d.Set("display_name", props.DataSetID) - } - - default: - return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - return nil -} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen2.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen2.go deleted file mode 100644 index d5331a3be1b8..000000000000 --- a/azurerm/internal/services/datashare/data_source_data_share_dataset_data_lake_gen2.go +++ /dev/null @@ -1,120 +0,0 @@ -package datashare - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" -) - -func dataSourceDataShareDatasetDataLakeGen2() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmDataShareDatasetDataLakeGen2Read, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "share_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DataShareID, - }, - - "storage_account_id": { - Type: schema.TypeString, - Computed: true, - }, - - "file_system_name": { - Type: schema.TypeString, - Computed: true, - }, - - "file_path": { - Type: schema.TypeString, - Computed: true, - }, - - "folder_path": { - Type: schema.TypeString, - Computed: true, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmDataShareDatasetDataLakeGen2Read(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareID := d.Get("share_id").(string) - shareId, err := parse.DataShareID(shareID) - if err != nil { - return err - } - - respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(respModel.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare Data Lake Gen2 DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - d.Set("name", name) - d.Set("share_id", shareID) - - switch resp := respModel.Value.(type) { - case datashare.ADLSGen2FileDataSet: - if props := resp.ADLSGen2FileProperties; props != nil { - d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) - d.Set("file_system_name", props.FileSystem) - d.Set("file_path", props.FilePath) - d.Set("display_name", props.DataSetID) - } - - case datashare.ADLSGen2FolderDataSet: - if props := resp.ADLSGen2FolderProperties; props != nil { - d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) - d.Set("file_system_name", props.FileSystem) - d.Set("folder_path", props.FolderPath) - d.Set("display_name", props.DataSetID) - } - - case datashare.ADLSGen2FileSystemDataSet: - if props := resp.ADLSGen2FileSystemProperties; props != nil { - d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) - d.Set("file_system_name", props.FileSystem) - d.Set("display_name", props.DataSetID) - } - - default: - return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - return nil -} diff --git a/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_cluster.go b/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_cluster.go deleted file mode 100644 index 90e9099bb66b..000000000000 --- a/azurerm/internal/services/datashare/data_source_data_share_dataset_kusto_cluster.go +++ /dev/null @@ -1,91 +0,0 @@ -package datashare - -import ( - "fmt" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" -) - -func dataSourceDataShareDatasetKustoCluster() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmDataShareDatasetKustoClusterRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "share_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DataShareID, - }, - - "kusto_cluster_id": { - Type: schema.TypeString, - Computed: true, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - - "kusto_cluster_location": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmDataShareDatasetKustoClusterRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareID := d.Get("share_id").(string) - shareId, err := parse.DataShareID(shareID) - if err != nil { - return err - } - - respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(respModel.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - d.Set("name", name) - d.Set("share_id", shareID) - - resp, ok := respModel.Value.AsKustoClusterDataSet() - if !ok { - return fmt.Errorf("dataShare %q (Resource Group %q / accountName %q / shareName %q) is not kusto cluster dataset", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - if props := resp.KustoClusterDataSetProperties; props != nil { - d.Set("kusto_cluster_id", props.KustoClusterResourceID) - d.Set("display_name", props.DataSetID) - d.Set("kusto_cluster_location", props.Location) - } - - return nil -} diff --git a/azurerm/internal/services/datashare/parse/account.go b/azurerm/internal/services/datashare/parse/account.go new file mode 100644 index 000000000000..bceb9ecdb31d --- /dev/null +++ b/azurerm/internal/services/datashare/parse/account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccountId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewAccountID(subscriptionId, resourceGroup, name string) AccountId { + return AccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id AccountId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Account", segmentsStr) +} + +func (id AccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataShare/accounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// AccountID parses a Account ID into an AccountId struct +func AccountID(input string) (*AccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/datashare/parse/account_test.go b/azurerm/internal/services/datashare/parse/account_test.go new file mode 100644 index 000000000000..9274ff00f69e --- /dev/null +++ b/azurerm/internal/services/datashare/parse/account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccountId{} + +func TestAccountIDFormatter(t *testing.T) { + actual := NewAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1", + Expected: &AccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATASHARE/ACCOUNTS/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datashare/parse/data_set.go b/azurerm/internal/services/datashare/parse/data_set.go new file mode 100644 index 000000000000..8e4b0e28285f --- /dev/null +++ b/azurerm/internal/services/datashare/parse/data_set.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DataSetId struct { + SubscriptionId string + ResourceGroup string + AccountName string + ShareName string + Name string +} + +func NewDataSetID(subscriptionId, resourceGroup, accountName, shareName, name string) DataSetId { + return DataSetId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + AccountName: accountName, + ShareName: shareName, + Name: name, + } +} + +func (id DataSetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Share Name %q", id.ShareName), + fmt.Sprintf("Account Name %q", id.AccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Data Set", segmentsStr) +} + +func (id DataSetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataShare/accounts/%s/shares/%s/dataSets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) +} + +// DataSetID parses a DataSet ID into an DataSetId struct +func DataSetID(input string) (*DataSetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DataSetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.AccountName, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + if resourceId.ShareName, err = id.PopSegment("shares"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("dataSets"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/datashare/parse/data_set_test.go b/azurerm/internal/services/datashare/parse/data_set_test.go new file mode 100644 index 000000000000..d948d7c26645 --- /dev/null +++ b/azurerm/internal/services/datashare/parse/data_set_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DataSetId{} + +func TestDataSetIDFormatter(t *testing.T) { + actual := NewDataSetID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "share1", "dataSet1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDataSetID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DataSetId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/", + Error: true, + }, + + { + // missing value for AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Error: true, + }, + + { + // missing ShareName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", + Error: true, + }, + + { + // missing value for ShareName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1", + Expected: &DataSetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + AccountName: "account1", + ShareName: "share1", + Name: "dataSet1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATASHARE/ACCOUNTS/ACCOUNT1/SHARES/SHARE1/DATASETS/DATASET1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DataSetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.AccountName != v.Expected.AccountName { + t.Fatalf("Expected %q but got %q for AccountName", v.Expected.AccountName, actual.AccountName) + } + if actual.ShareName != v.Expected.ShareName { + t.Fatalf("Expected %q but got %q for ShareName", v.Expected.ShareName, actual.ShareName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datashare/parse/data_share.go b/azurerm/internal/services/datashare/parse/data_share.go deleted file mode 100644 index 88331cbd53dd..000000000000 --- a/azurerm/internal/services/datashare/parse/data_share.go +++ /dev/null @@ -1,91 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DataShareAccountId struct { - ResourceGroup string - Name string -} - -type DataShareId struct { - ResourceGroup string - AccountName string - Name string -} - -type DataShareDataSetId struct { - ResourceGroup string - AccountName string - ShareName string - Name string -} - -func DataShareAccountID(input string) (*DataShareAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing DataShareAccount ID %q: %+v", input, err) - } - - dataShareAccount := DataShareAccountId{ - ResourceGroup: id.ResourceGroup, - } - if dataShareAccount.Name, err = id.PopSegment("accounts"); err != nil { - return nil, err - } - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &dataShareAccount, nil -} - -func DataShareID(input string) (*DataShareId, error) { - var id, err = azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse DataShare ID %q: %+v", input, err) - } - - DataShare := DataShareId{ - ResourceGroup: id.ResourceGroup, - } - if DataShare.AccountName, err = id.PopSegment("accounts"); err != nil { - return nil, err - } - if DataShare.Name, err = id.PopSegment("shares"); err != nil { - return nil, err - } - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &DataShare, nil -} - -func DataShareDataSetID(input string) (*DataShareDataSetId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DataShareDataSet ID %q: %+v", input, err) - } - - dataShareDataSet := DataShareDataSetId{ - ResourceGroup: id.ResourceGroup, - } - if dataShareDataSet.AccountName, err = id.PopSegment("accounts"); err != nil { - return nil, err - } - if dataShareDataSet.ShareName, err = id.PopSegment("shares"); err != nil { - return nil, err - } - if dataShareDataSet.Name, err = id.PopSegment("dataSets"); err != nil { - return nil, err - } - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &dataShareDataSet, nil -} diff --git a/azurerm/internal/services/datashare/parse/data_share_test.go b/azurerm/internal/services/datashare/parse/data_share_test.go deleted file mode 100644 index 6c8c6e96d3ee..000000000000 --- a/azurerm/internal/services/datashare/parse/data_share_test.go +++ /dev/null @@ -1,251 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDataShareAccountID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DataShareAccountId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", - Expected: nil, - }, - { - Name: "Datashare account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1", - Expected: &DataShareAccountId{ - Name: "account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/Accounts/account1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) - - actual, err := DataShareAccountID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} - -func TestDataShareID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DataShareId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", - Expected: nil, - }, - { - Name: "Missing Share", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", - Expected: nil, - }, - { - Name: "Missing Share Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", - Expected: nil, - }, - { - Name: "Data Share ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1", - Expected: &DataShareId{ - Name: "share1", - AccountName: "account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/Shares/share1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) - - actual, err := DataShareID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for account name", v.Expected.AccountName, actual.AccountName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} - -func TestDataShareDataSetID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DataShareDataSetId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", - Expected: nil, - }, - { - Name: "Missing Share", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", - Expected: nil, - }, - { - Name: "Missing Share Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", - Expected: nil, - }, - { - Name: "Missing DataSet", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1", - Expected: nil, - }, - { - Name: "Missing DataSet Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets", - Expected: nil, - }, - { - Name: "DataSet ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1", - Expected: &DataShareDataSetId{ - Name: "dataSet1", - AccountName: "account1", - ResourceGroup: "resGroup1", - ShareName: "share1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/DataSets/dataSet1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) - - actual, err := DataShareDataSetID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ShareName != v.Expected.ShareName { - t.Fatalf("Expected %q but got %q for account name", v.Expected.ShareName, actual.ShareName) - } - - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for account name", v.Expected.AccountName, actual.AccountName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/datashare/parse/share.go b/azurerm/internal/services/datashare/parse/share.go new file mode 100644 index 000000000000..60cbd8b8d001 --- /dev/null +++ b/azurerm/internal/services/datashare/parse/share.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ShareId struct { + SubscriptionId string + ResourceGroup string + AccountName string + Name string +} + +func NewShareID(subscriptionId, resourceGroup, accountName, name string) ShareId { + return ShareId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + AccountName: accountName, + Name: name, + } +} + +func (id ShareId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Account Name %q", id.AccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Share", segmentsStr) +} + +func (id ShareId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataShare/accounts/%s/shares/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.AccountName, id.Name) +} + +// ShareID parses a Share ID into an ShareId struct +func ShareID(input string) (*ShareId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ShareId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.AccountName, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("shares"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/datashare/parse/share_test.go b/azurerm/internal/services/datashare/parse/share_test.go new file mode 100644 index 000000000000..3cdd1fbfde2c --- /dev/null +++ b/azurerm/internal/services/datashare/parse/share_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ShareId{} + +func TestShareIDFormatter(t *testing.T) { + actual := NewShareID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "share1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestShareID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ShareId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/", + Error: true, + }, + + { + // missing value for AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1", + Expected: &ShareId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + AccountName: "account1", + Name: "share1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATASHARE/ACCOUNTS/ACCOUNT1/SHARES/SHARE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ShareID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.AccountName != v.Expected.AccountName { + t.Fatalf("Expected %q but got %q for AccountName", v.Expected.AccountName, actual.AccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datashare/registration.go b/azurerm/internal/services/datashare/registration.go index 699f7c3c8c4b..c3d8eda4eb80 100644 --- a/azurerm/internal/services/datashare/registration.go +++ b/azurerm/internal/services/datashare/registration.go @@ -25,17 +25,19 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { "azurerm_data_share_dataset_data_lake_gen1": dataSourceDataShareDatasetDataLakeGen1(), "azurerm_data_share_dataset_data_lake_gen2": dataSourceDataShareDatasetDataLakeGen2(), "azurerm_data_share_dataset_kusto_cluster": dataSourceDataShareDatasetKustoCluster(), + "azurerm_data_share_dataset_kusto_database": dataSourceDataShareDatasetKustoDatabase(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_data_share_account": resourceArmDataShareAccount(), - "azurerm_data_share": resourceArmDataShare(), - "azurerm_data_share_dataset_blob_storage": resourceArmDataShareDataSetBlobStorage(), - "azurerm_data_share_dataset_data_lake_gen1": resourceArmDataShareDataSetDataLakeGen1(), - "azurerm_data_share_dataset_data_lake_gen2": resourceArmDataShareDataSetDataLakeGen2(), - "azurerm_data_share_dataset_kusto_cluster": resourceArmDataShareDataSetKustoCluster(), + "azurerm_data_share_account": resourceDataShareAccount(), + "azurerm_data_share": resourceDataShare(), + "azurerm_data_share_dataset_blob_storage": resourceDataShareDataSetBlobStorage(), + "azurerm_data_share_dataset_data_lake_gen1": resourceDataShareDataSetDataLakeGen1(), + "azurerm_data_share_dataset_data_lake_gen2": resourceDataShareDataSetDataLakeGen2(), + "azurerm_data_share_dataset_kusto_cluster": resourceDataShareDataSetKustoCluster(), + "azurerm_data_share_dataset_kusto_database": resourceDataShareDataSetKustoDatabase(), } } diff --git a/azurerm/internal/services/datashare/resource_arm_data_share.go b/azurerm/internal/services/datashare/resource_arm_data_share.go deleted file mode 100644 index ffa55de8c553..000000000000 --- a/azurerm/internal/services/datashare/resource_arm_data_share.go +++ /dev/null @@ -1,316 +0,0 @@ -package datashare - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/Azure/go-autorest/autorest/date" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDataShare() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDataShareCreateUpdate, - Read: resourceArmDataShareRead, - Update: resourceArmDataShareCreateUpdate, - Delete: resourceArmDataShareDelete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataShareID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatashareName(), - }, - - "account_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatashareAccountID, - }, - - "kind": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - string(datashare.CopyBased), - string(datashare.InPlace), - }, false), - }, - - "description": { - Type: schema.TypeString, - Optional: true, - }, - - "snapshot_schedule": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.DataShareSyncName(), - }, - - "recurrence": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{ - string(datashare.Day), - string(datashare.Hour), - }, false), - }, - - "start_time": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.IsRFC3339Time, - DiffSuppressFunc: suppress.RFC3339Time, - }, - }, - }, - }, - - "terms": { - Type: schema.TypeString, - Optional: true, - }, - }, - } -} -func resourceArmDataShareCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.SharesClient - syncClient := meta.(*clients.Client).DataShare.SynchronizationClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - accountId, err := parse.DataShareAccountID(d.Get("account_id").(string)) - if err != nil { - return err - } - - if d.IsNewResource() { - existing, err := client.Get(ctx, accountId.ResourceGroup, accountId.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing DataShare %q (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_data_share", *existing.ID) - } - } - - share := datashare.Share{ - ShareProperties: &datashare.ShareProperties{ - ShareKind: datashare.ShareKind(d.Get("kind").(string)), - Description: utils.String(d.Get("description").(string)), - Terms: utils.String(d.Get("terms").(string)), - }, - } - - if _, err := client.Create(ctx, accountId.ResourceGroup, accountId.Name, name, share); err != nil { - return fmt.Errorf("creating DataShare %q (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - - resp, err := client.Get(ctx, accountId.ResourceGroup, accountId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("reading DataShare %q (Resource Group %q / accountName %q): ID is empty", name, accountId.ResourceGroup, accountId.Name) - } - - d.SetId(*resp.ID) - - if d.HasChange("snapshot_schedule") { - // only one dependent sync setting is allowed in one data share - o, _ := d.GetChange("snapshot_schedule") - if origins := o.([]interface{}); len(origins) > 0 { - origin := origins[0].(map[string]interface{}) - if originName, ok := origin["name"].(string); ok && originName != "" { - syncFuture, err := syncClient.Delete(ctx, accountId.ResourceGroup, accountId.Name, name, originName) - if err != nil { - return fmt.Errorf("deleting DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - if err = syncFuture.WaitForCompletionRef(ctx, syncClient.Client); err != nil { - return fmt.Errorf("waiting for DataShare %q snapshot schedule (Resource Group %q / accountName %q) to be deleted: %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - } - } - } - - if snapshotSchedule := expandAzureRmDataShareSnapshotSchedule(d.Get("snapshot_schedule").([]interface{})); snapshotSchedule != nil { - if _, err := syncClient.Create(ctx, accountId.ResourceGroup, accountId.Name, name, d.Get("snapshot_schedule.0.name").(string), snapshotSchedule); err != nil { - return fmt.Errorf("creating DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", name, accountId.ResourceGroup, accountId.Name, err) - } - } - - return resourceArmDataShareRead(d, meta) -} - -func resourceArmDataShareRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.SharesClient - accountClient := meta.(*clients.Client).DataShare.AccountClient - syncClient := meta.(*clients.Client).DataShare.SynchronizationClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - - accountResp, err := accountClient.Get(ctx, id.ResourceGroup, id.AccountName) - if err != nil { - return fmt.Errorf("retrieving DataShare Account %q (Resource Group %q): %+v", id.AccountName, id.ResourceGroup, err) - } - if accountResp.ID == nil || *accountResp.ID == "" { - return fmt.Errorf("reading DataShare Account %q (Resource Group %q): ID is empty", id.AccountName, id.ResourceGroup) - } - - d.Set("name", id.Name) - d.Set("account_id", accountResp.ID) - - if props := resp.ShareProperties; props != nil { - d.Set("kind", props.ShareKind) - d.Set("description", props.Description) - d.Set("terms", props.Terms) - } - - if syncIterator, err := syncClient.ListByShareComplete(ctx, id.ResourceGroup, id.AccountName, id.Name, ""); syncIterator.NotDone() { - if err != nil { - return fmt.Errorf("listing DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - if syncName := syncIterator.Value().(datashare.ScheduledSynchronizationSetting).Name; syncName != nil && *syncName != "" { - syncResp, err := syncClient.Get(ctx, id.ResourceGroup, id.AccountName, id.Name, *syncName) - if err != nil { - return fmt.Errorf("reading DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - if schedule := syncResp.Value.(datashare.ScheduledSynchronizationSetting); schedule.ID != nil && *schedule.ID != "" { - if err := d.Set("snapshot_schedule", flattenAzureRmDataShareSnapshotSchedule(&schedule)); err != nil { - return fmt.Errorf("setting `snapshot_schedule`: %+v", err) - } - } - } - if err := syncIterator.NextWithContext(ctx); err != nil { - return fmt.Errorf("listing DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - if syncIterator.NotDone() { - return fmt.Errorf("more than one DataShare %q snapshot schedule (Resource Group %q / accountName %q) is returned", id.Name, id.ResourceGroup, id.AccountName) - } - } - - return nil -} - -func resourceArmDataShareDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.SharesClient - syncClient := meta.(*clients.Client).DataShare.SynchronizationClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareID(d.Id()) - if err != nil { - return err - } - - // sync setting will not automatically be deleted after the data share is deleted - if _, ok := d.GetOk("snapshot_schedule"); ok { - syncFuture, err := syncClient.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name, d.Get("snapshot_schedule.0.name").(string)) - if err != nil { - return fmt.Errorf("deleting DataShare %q snapshot schedule (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - if err = syncFuture.WaitForCompletionRef(ctx, syncClient.Client); err != nil { - return fmt.Errorf("waiting for DataShare %q snapshot schedule (Resource Group %q / accountName %q) to be deleted: %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name) - if err != nil { - return fmt.Errorf("deleting DataShare %q (Resource Group %q / accountName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for DataShare %q (Resource Group %q / accountName %q) to be deleted: %+v", id.Name, id.ResourceGroup, id.AccountName, err) - } - - return nil -} - -func expandAzureRmDataShareSnapshotSchedule(input []interface{}) *datashare.ScheduledSynchronizationSetting { - if len(input) == 0 { - return nil - } - - snapshotSchedule := input[0].(map[string]interface{}) - - startTime, _ := time.Parse(time.RFC3339, snapshotSchedule["start_time"].(string)) - - return &datashare.ScheduledSynchronizationSetting{ - Kind: datashare.KindBasicSynchronizationSettingKindScheduleBased, - ScheduledSynchronizationSettingProperties: &datashare.ScheduledSynchronizationSettingProperties{ - RecurrenceInterval: datashare.RecurrenceInterval(snapshotSchedule["recurrence"].(string)), - SynchronizationTime: &date.Time{Time: startTime}, - }, - } -} - -func flattenAzureRmDataShareSnapshotSchedule(sync *datashare.ScheduledSynchronizationSetting) []interface{} { - if sync == nil { - return []interface{}{} - } - - var startTime string - if sync.SynchronizationTime != nil && !sync.SynchronizationTime.IsZero() { - startTime = sync.SynchronizationTime.Format(time.RFC3339) - } - - return []interface{}{ - map[string]interface{}{ - "name": sync.Name, - "recurrence": string(sync.RecurrenceInterval), - "start_time": startTime, - }, - } -} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_blob_storage.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_blob_storage.go deleted file mode 100644 index 6e030c3ec699..000000000000 --- a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_blob_storage.go +++ /dev/null @@ -1,294 +0,0 @@ -package datashare - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDataShareDataSetBlobStorage() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDataShareDataSetBlobStorageCreate, - Read: resourceArmDataShareDataSetBlobStorageRead, - Delete: resourceArmDataShareDataSetBlobStorageDelete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataShareDataSetID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "data_share_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DataShareID, - }, - - "container_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azValidate.StorageContainerName, - }, - - "storage_account": { - Type: schema.TypeList, - Required: true, - ForceNew: true, - MaxItems: 1, - MinItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: storage.ValidateArmStorageAccountName, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "subscription_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.IsUUID, - }, - }, - }, - }, - - "file_path": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - ConflictsWith: []string{"folder_path"}, - }, - - "folder_path": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - ConflictsWith: []string{"file_path"}, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} -func resourceArmDataShareDataSetBlobStorageCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareId, err := parse.DataShareID(d.Get("data_share_id").(string)) - if err != nil { - return err - } - - existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - } - existingId := helper.GetAzurermDataShareDataSetId(existing.Value) - if existingId != nil && *existingId != "" { - return tf.ImportAsExistsError("azurerm_data_share_dataset_blob_storage", *existingId) - } - - var dataSet datashare.BasicDataSet - if filePath, ok := d.GetOk("file_path"); ok { - dataSet = datashare.BlobDataSet{ - Kind: datashare.KindBlob, - BlobProperties: &datashare.BlobProperties{ - ContainerName: utils.String(d.Get("container_name").(string)), - StorageAccountName: utils.String(d.Get("storage_account.0.name").(string)), - ResourceGroup: utils.String(d.Get("storage_account.0.resource_group_name").(string)), - SubscriptionID: utils.String(d.Get("storage_account.0.subscription_id").(string)), - FilePath: utils.String(filePath.(string)), - }, - } - } else if folderPath, ok := d.GetOk("folder_path"); ok { - dataSet = datashare.BlobFolderDataSet{ - Kind: datashare.KindBlobFolder, - BlobFolderProperties: &datashare.BlobFolderProperties{ - ContainerName: utils.String(d.Get("container_name").(string)), - StorageAccountName: utils.String(d.Get("storage_account.0.name").(string)), - ResourceGroup: utils.String(d.Get("storage_account.0.resource_group_name").(string)), - SubscriptionID: utils.String(d.Get("storage_account.0.subscription_id").(string)), - Prefix: utils.String(folderPath.(string)), - }, - } - } else { - dataSet = datashare.BlobContainerDataSet{ - Kind: datashare.KindContainer, - BlobContainerProperties: &datashare.BlobContainerProperties{ - ContainerName: utils.String(d.Get("container_name").(string)), - StorageAccountName: utils.String(d.Get("storage_account.0.name").(string)), - ResourceGroup: utils.String(d.Get("storage_account.0.resource_group_name").(string)), - SubscriptionID: utils.String(d.Get("storage_account.0.subscription_id").(string)), - }, - } - } - - if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { - return fmt.Errorf("creating DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(resp.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - return resourceArmDataShareDataSetBlobStorageRead(d, meta) -} - -func resourceArmDataShareDataSetBlobStorageRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - shareClient := meta.(*clients.Client).DataShare.SharesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("retrieving DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - - d.Set("name", id.Name) - shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) - if err != nil { - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) - } - if shareResp.ID == nil || *shareResp.ID == "" { - return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) - } - - d.Set("data_share_id", shareResp.ID) - - switch resp := resp.Value.(type) { - case datashare.BlobDataSet: - if props := resp.BlobProperties; props != nil { - d.Set("container_name", props.ContainerName) - if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { - return fmt.Errorf("setting `storage_account`: %+v", err) - } - d.Set("file_path", props.FilePath) - d.Set("display_name", props.DataSetID) - } - - case datashare.BlobFolderDataSet: - if props := resp.BlobFolderProperties; props != nil { - d.Set("container_name", props.ContainerName) - if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { - return fmt.Errorf("setting `storage_account`: %+v", err) - } - d.Set("folder_path", props.Prefix) - d.Set("display_name", props.DataSetID) - } - - case datashare.BlobContainerDataSet: - if props := resp.BlobContainerProperties; props != nil { - d.Set("container_name", props.ContainerName) - if err := d.Set("storage_account", flattenAzureRmDataShareDataSetBlobStorageAccount(props.StorageAccountName, props.ResourceGroup, props.SubscriptionID)); err != nil { - return fmt.Errorf("setting `storage_account`: %+v", err) - } - d.Set("display_name", props.DataSetID) - } - - default: - return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a blob storage dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) - } - - return nil -} - -func resourceArmDataShareDataSetBlobStorageDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { - return fmt.Errorf("deleting DataShare Blob Storage DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - return nil -} - -func flattenAzureRmDataShareDataSetBlobStorageAccount(strName, strRG, strSubs *string) []interface{} { - var name, rg, subs string - if strName != nil { - name = *strName - } - - if strRG != nil { - rg = *strRG - } - - if strSubs != nil { - subs = *strSubs - } - - return []interface{}{ - map[string]interface{}{ - "name": name, - "resource_group_name": rg, - "subscription_id": subs, - }, - } -} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go deleted file mode 100644 index 77df25952780..000000000000 --- a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen1.go +++ /dev/null @@ -1,223 +0,0 @@ -package datashare - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - dataLakeParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDataShareDataSetDataLakeGen1() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDataShareDataSetDataLakeGen1Create, - Read: resourceArmDataShareDataSetDataLakeGen1Read, - Delete: resourceArmDataShareDataSetDataLakeGen1Delete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataShareDataSetID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "data_share_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DataShareID, - }, - - "data_lake_store_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatalakeStoreID, - }, - - "folder_path": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "file_name": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} -func resourceArmDataShareDataSetDataLakeGen1Create(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareId, err := parse.DataShareID(d.Get("data_share_id").(string)) - if err != nil { - return err - } - - existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - } - existingId := helper.GetAzurermDataShareDataSetId(existing.Value) - if existingId != nil && *existingId != "" { - return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen1", *existingId) - } - - dataLakeStoreId, err := dataLakeParse.DataLakeStoreID(d.Get("data_lake_store_id").(string)) - if err != nil { - return err - } - - var dataSet datashare.BasicDataSet - - if fileName, ok := d.GetOk("file_name"); ok { - dataSet = datashare.ADLSGen1FileDataSet{ - Kind: datashare.KindAdlsGen1File, - ADLSGen1FileProperties: &datashare.ADLSGen1FileProperties{ - AccountName: utils.String(dataLakeStoreId.Name), - ResourceGroup: utils.String(dataLakeStoreId.ResourceGroup), - SubscriptionID: utils.String(dataLakeStoreId.Subscription), - FolderPath: utils.String(d.Get("folder_path").(string)), - FileName: utils.String(fileName.(string)), - }, - } - } else { - dataSet = datashare.ADLSGen1FolderDataSet{ - Kind: datashare.KindAdlsGen1Folder, - ADLSGen1FolderProperties: &datashare.ADLSGen1FolderProperties{ - AccountName: utils.String(dataLakeStoreId.Name), - ResourceGroup: utils.String(dataLakeStoreId.ResourceGroup), - SubscriptionID: utils.String(dataLakeStoreId.Subscription), - FolderPath: utils.String(d.Get("folder_path").(string)), - }, - } - } - - if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { - return fmt.Errorf("creating/updating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(resp.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - return resourceArmDataShareDataSetDataLakeGen1Read(d, meta) -} - -func resourceArmDataShareDataSetDataLakeGen1Read(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - shareClient := meta.(*clients.Client).DataShare.SharesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - d.Set("name", id.Name) - shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) - if err != nil { - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) - } - if shareResp.ID == nil || *shareResp.ID == "" { - return fmt.Errorf("reading ID of DataShare %q (Resource Group %q / accountName %q): ID is empt", id.ShareName, id.ResourceGroup, id.AccountName) - } - d.Set("data_share_id", shareResp.ID) - - switch resp := resp.Value.(type) { - case datashare.ADLSGen1FileDataSet: - if props := resp.ADLSGen1FileProperties; props != nil { - if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { - d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) - } - d.Set("folder_path", props.FolderPath) - d.Set("file_name", props.FileName) - d.Set("display_name", props.DataSetID) - } - - case datashare.ADLSGen1FolderDataSet: - if props := resp.ADLSGen1FolderProperties; props != nil { - if props.SubscriptionID != nil && props.ResourceGroup != nil && props.AccountName != nil { - d.Set("data_lake_store_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataLakeStore/accounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.AccountName)) - } - d.Set("folder_path", props.FolderPath) - d.Set("display_name", props.DataSetID) - } - - default: - return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen1 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) - } - - return nil -} - -func resourceArmDataShareDataSetDataLakeGen1Delete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { - return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - return nil -} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen2.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen2.go deleted file mode 100644 index 7b2656959116..000000000000 --- a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_data_lake_gen2.go +++ /dev/null @@ -1,247 +0,0 @@ -package datashare - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - storageParsers "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/parsers" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDataShareDataSetDataLakeGen2() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDataShareDataSetDataLakeGen2Create, - Read: resourceArmDataShareDataSetDataLakeGen2Read, - Delete: resourceArmDataShareDataSetDataLakeGen2Delete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataShareDataSetID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "share_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DataShareID, - }, - - "storage_account_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.StorageAccountID, - }, - - "file_system_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "file_path": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - ConflictsWith: []string{"folder_path"}, - }, - - "folder_path": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - ConflictsWith: []string{"file_path"}, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} -func resourceArmDataShareDataSetDataLakeGen2Create(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareId, err := parse.DataShareID(d.Get("share_id").(string)) - if err != nil { - return err - } - - existing, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - } - existingId := helper.GetAzurermDataShareDataSetId(existing.Value) - if existingId != nil && *existingId != "" { - return tf.ImportAsExistsError("azurerm_data_share_dataset_data_lake_gen2", *existingId) - } - - strId, err := storageParsers.ParseAccountID(d.Get("storage_account_id").(string)) - if err != nil { - return err - } - - var dataSet datashare.BasicDataSet - - if filePath, ok := d.GetOk("file_path"); ok { - dataSet = datashare.ADLSGen2FileDataSet{ - Kind: datashare.KindAdlsGen2File, - ADLSGen2FileProperties: &datashare.ADLSGen2FileProperties{ - StorageAccountName: utils.String(strId.Name), - ResourceGroup: utils.String(strId.ResourceGroup), - SubscriptionID: utils.String(strId.SubscriptionId), - FileSystem: utils.String(d.Get("file_system_name").(string)), - FilePath: utils.String(filePath.(string)), - }, - } - } else if folderPath, ok := d.GetOk("folder_path"); ok { - dataSet = datashare.ADLSGen2FolderDataSet{ - Kind: datashare.KindAdlsGen2Folder, - ADLSGen2FolderProperties: &datashare.ADLSGen2FolderProperties{ - StorageAccountName: utils.String(strId.Name), - ResourceGroup: utils.String(strId.ResourceGroup), - SubscriptionID: utils.String(strId.SubscriptionId), - FileSystem: utils.String(d.Get("file_system_name").(string)), - FolderPath: utils.String(folderPath.(string)), - }, - } - } else { - dataSet = datashare.ADLSGen2FileSystemDataSet{ - Kind: datashare.KindAdlsGen2FileSystem, - ADLSGen2FileSystemProperties: &datashare.ADLSGen2FileSystemProperties{ - StorageAccountName: utils.String(strId.Name), - ResourceGroup: utils.String(strId.ResourceGroup), - SubscriptionID: utils.String(strId.SubscriptionId), - FileSystem: utils.String(d.Get("file_system_name").(string)), - }, - } - } - - if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { - return fmt.Errorf("creating DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - resp, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(resp.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - return resourceArmDataShareDataSetDataLakeGen2Read(d, meta) -} - -func resourceArmDataShareDataSetDataLakeGen2Read(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - shareClient := meta.(*clients.Client).DataShare.SharesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("retrieving DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - d.Set("name", id.Name) - shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) - if err != nil { - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) - } - if shareResp.ID == nil || *shareResp.ID == "" { - return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) - } - d.Set("share_id", shareResp.ID) - - switch resp := resp.Value.(type) { - case datashare.ADLSGen2FileDataSet: - if props := resp.ADLSGen2FileProperties; props != nil { - d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) - d.Set("file_system_name", props.FileSystem) - d.Set("file_path", props.FilePath) - d.Set("display_name", props.DataSetID) - } - - case datashare.ADLSGen2FolderDataSet: - if props := resp.ADLSGen2FolderProperties; props != nil { - d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) - d.Set("file_system_name", props.FileSystem) - d.Set("folder_path", props.FolderPath) - d.Set("display_name", props.DataSetID) - } - - case datashare.ADLSGen2FileSystemDataSet: - if props := resp.ADLSGen2FileSystemProperties; props != nil { - d.Set("storage_account_id", fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Storage/storageAccounts/%s", *props.SubscriptionID, *props.ResourceGroup, *props.StorageAccountName)) - d.Set("file_system_name", props.FileSystem) - d.Set("display_name", props.DataSetID) - } - - default: - return fmt.Errorf("data share dataset %q (Resource Group %q / accountName %q / shareName %q) is not a datalake store gen2 dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) - } - - return nil -} - -func resourceArmDataShareDataSetDataLakeGen2Delete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { - return fmt.Errorf("deleting DataShare DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - return nil -} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_cluster.go b/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_cluster.go deleted file mode 100644 index 9d0d5b58271b..000000000000 --- a/azurerm/internal/services/datashare/resource_arm_data_share_dataset_kusto_cluster.go +++ /dev/null @@ -1,178 +0,0 @@ -package datashare - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/helper" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDataShareDataSetKustoCluster() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDataShareDataSetKustoClusterCreate, - Read: resourceArmDataShareDataSetKustoClusterRead, - Delete: resourceArmDataShareDataSetKustoClusterDelete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DataShareDataSetID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DatashareDataSetName(), - }, - - "share_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DataShareID, - }, - - "kusto_cluster_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - }, - - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - - "kusto_cluster_location": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} -func resourceArmDataShareDataSetKustoClusterCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - shareId, err := parse.DataShareID(d.Get("share_id").(string)) - if err != nil { - return err - } - - existingModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existingModel.Response) { - return fmt.Errorf("checking for presence of existing DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - } - existingId := helper.GetAzurermDataShareDataSetId(existingModel.Value) - if existingId != nil && *existingId != "" { - return tf.ImportAsExistsError("azurerm_data_share_dataset_kusto_cluster", *existingId) - } - - dataSet := datashare.KustoClusterDataSet{ - Kind: datashare.KindKustoCluster, - KustoClusterDataSetProperties: &datashare.KustoClusterDataSetProperties{ - KustoClusterResourceID: utils.String(d.Get("kusto_cluster_id").(string)), - }, - } - - if _, err := client.Create(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name, dataSet); err != nil { - return fmt.Errorf("creating DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respModel, err := client.Get(ctx, shareId.ResourceGroup, shareId.AccountName, shareId.Name, name) - if err != nil { - return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name, err) - } - - respId := helper.GetAzurermDataShareDataSetId(respModel.Value) - if respId == nil || *respId == "" { - return fmt.Errorf("empty or nil ID returned for DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q)", name, shareId.ResourceGroup, shareId.AccountName, shareId.Name) - } - - d.SetId(*respId) - return resourceArmDataShareDataSetKustoClusterRead(d, meta) -} - -func resourceArmDataShareDataSetKustoClusterRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - shareClient := meta.(*clients.Client).DataShare.SharesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - respModel, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(respModel.Response) { - log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - return fmt.Errorf("retrieving DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - - d.Set("name", id.Name) - shareResp, err := shareClient.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName) - if err != nil { - return fmt.Errorf("retrieving DataShare %q (Resource Group %q / accountName %q): %+v", id.ShareName, id.ResourceGroup, id.AccountName, err) - } - if shareResp.ID == nil || *shareResp.ID == "" { - return fmt.Errorf("empty or nil ID returned for DataShare %q (Resource Group %q / accountName %q)", id.ShareName, id.ResourceGroup, id.AccountName) - } - - d.Set("share_id", shareResp.ID) - - resp, ok := respModel.Value.AsKustoClusterDataSet() - if !ok { - return fmt.Errorf("dataShare dataset %q (Resource Group %q / accountName %q / shareName %q) is not kusto cluster dataset", id.Name, id.ResourceGroup, id.AccountName, id.ShareName) - } - if props := resp.KustoClusterDataSetProperties; props != nil { - d.Set("kusto_cluster_id", props.KustoClusterResourceID) - d.Set("display_name", props.DataSetID) - d.Set("kusto_cluster_location", props.Location) - } - - return nil -} - -func resourceArmDataShareDataSetKustoClusterDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).DataShare.DataSetClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DataShareDataSetID(d.Id()) - if err != nil { - return err - } - - if _, err := client.Delete(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { - return fmt.Errorf("deleting DataShare Kusto Cluster DataSet %q (Resource Group %q / accountName %q / shareName %q): %+v", id.Name, id.ResourceGroup, id.AccountName, id.ShareName, err) - } - return nil -} diff --git a/azurerm/internal/services/datashare/resourceids.go b/azurerm/internal/services/datashare/resourceids.go new file mode 100644 index 000000000000..b2f4275f4b08 --- /dev/null +++ b/azurerm/internal/services/datashare/resourceids.go @@ -0,0 +1,5 @@ +package datashare + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Account -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DataSet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Share -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1 diff --git a/azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer b/azurerm/internal/services/datashare/testdata/application_gateway_test.cer similarity index 100% rename from azurerm/internal/services/datashare/tests/testdata/application_gateway_test.cer rename to azurerm/internal/services/datashare/testdata/application_gateway_test.cer diff --git a/azurerm/internal/services/datashare/tests/data_share_account_data_source_test.go b/azurerm/internal/services/datashare/tests/data_share_account_data_source_test.go deleted file mode 100644 index ca83c7d0eb6e..000000000000 --- a/azurerm/internal/services/datashare/tests/data_share_account_data_source_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataShareAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share_account", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataShareAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "Test"), - resource.TestCheckResourceAttr(data.ResourceName, "identity.0.type", "SystemAssigned"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - }, - }) -} - -func testAccDataSourceDataShareAccount_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareAccount_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_data_share_account" "test" { - name = azurerm_data_share_account.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/data_share_account_resource_test.go b/azurerm/internal/services/datashare/tests/data_share_account_resource_test.go deleted file mode 100644 index 292b6229aa9e..000000000000 --- a/azurerm/internal/services/datashare/tests/data_share_account_resource_test.go +++ /dev/null @@ -1,250 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMDataShareAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareAccount_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDataShareAccount_requiresImport), - }, - }) -} - -func TestAccAzureRMDataShareAccount_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareAccount_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShareAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShareAccount_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - { - Config: testAccAzureRMDataShareAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareAccountExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - data.ImportStep(), - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDataShareAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.AccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("dataShare Account not found: %s", resourceName) - } - id, err := parse.DataShareAccountID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: data_share account %q does not exist", id.Name) - } - return fmt.Errorf("bad: Get on DataShareAccountClient: %+v", err) - } - return nil - } -} - -func testCheckAzureRMDataShareAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.AccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_share_account" { - continue - } - id, err := parse.DataShareAccountID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Get on data_share.accountClient: %+v", err) - } - } - return nil - } - return nil -} - -func testAccAzureRMDataShareAccount_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctest-datashare-%d" - location = "%s" -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMDataShareAccount_basic(data acceptance.TestData) string { - template := testAccAzureRMDataShareAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_account" "test" { - name = "acctest-DSA-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDataShareAccount_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDataShareAccount_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_account" "import" { - name = azurerm_data_share_account.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } -} -`, config) -} - -func testAccAzureRMDataShareAccount_complete(data acceptance.TestData) string { - template := testAccAzureRMDataShareAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_account" "test" { - name = "acctest-DSA-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } - - tags = { - env = "Test" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDataShareAccount_update(data acceptance.TestData) string { - template := testAccAzureRMDataShareAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_account" "test" { - name = "acctest-DSA-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } - - tags = { - env = "Stage" - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_blob_storage_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_blob_storage_test.go deleted file mode 100644 index f986effc9ea2..000000000000 --- a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_blob_storage_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataShareDatasetBlobStorage_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_blob_storage", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataShareDatasetBlobStorage_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "container_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account.0.name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account.0.resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account.0.subscription_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "file_path"), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - }, - }) -} - -func testAccDataSourceDataShareDatasetBlobStorage_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetBlobStorageFile_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_data_share_dataset_blob_storage" "test" { - name = azurerm_data_share_dataset_blob_storage.test.name - data_share_id = azurerm_data_share_dataset_blob_storage.test.data_share_id -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen1_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen1_test.go deleted file mode 100644 index ff2489c2c0e3..000000000000 --- a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen1_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataShareDatasetDataLakeGen1_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_data_lake_gen1", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataShareDatasetDataLakeGen1_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "data_lake_store_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "file_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - }, - }) -} - -func testAccDataSourceDataShareDatasetDataLakeGen1_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_data_share_dataset_data_lake_gen1" "test" { - name = azurerm_data_share_dataset_data_lake_gen1.test.name - data_share_id = azurerm_data_share_dataset_data_lake_gen1.test.data_share_id -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen2_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen2_test.go deleted file mode 100644 index e40570bf2cd3..000000000000 --- a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_data_lake_gen2_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataShareDatasetDataLakeGen2_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_data_lake_gen2", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataShareDatasetDataLakeGen2_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "storage_account_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "file_system_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "file_path"), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - }, - }) -} - -func testAccDataSourceDataShareDatasetDataLakeGen2_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_data_share_dataset_data_lake_gen2" "test" { - name = azurerm_data_share_dataset_data_lake_gen2.test.name - share_id = azurerm_data_share_dataset_data_lake_gen2.test.share_id -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_cluster_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_cluster_test.go deleted file mode 100644 index c31d6bc417fa..000000000000 --- a/azurerm/internal/services/datashare/tests/data_source_data_share_dataset_kusto_cluster_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataShareKustoClusterDataset_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share_dataset_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_cluster"), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataShareKustoClusterDataset_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_location"), - ), - }, - }, - }) -} - -func testAccDataSourceDataShareKustoClusterDataset_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareKustoClusterDataSet_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_data_share_dataset_kusto_cluster" "test" { - name = azurerm_data_share_dataset_kusto_cluster.test.name - share_id = azurerm_data_share_dataset_kusto_cluster.test.share_id -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_test.go deleted file mode 100644 index 5444560e6cc9..000000000000 --- a/azurerm/internal/services/datashare/tests/data_source_data_share_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDataShare_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDataShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "account_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "kind"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMDataShare_snapshotSchedule(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_data_share", "test") - startTime := time.Now().Add(time.Hour * 7).Format(time.RFC3339) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMDataShare_snapshotSchedule(data, startTime), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "snapshot_schedule.0.name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "snapshot_schedule.0.recurrence"), - resource.TestCheckResourceAttrSet(data.ResourceName, "snapshot_schedule.0.start_time"), - ), - }, - }, - }) -} - -func testAccDataSourceDataShare_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShare_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_data_share" "test" { - name = azurerm_data_share.test.name - account_id = azurerm_data_share_account.test.id -} -`, config) -} - -func testAccDataSourceAzureRMDataShare_snapshotSchedule(data acceptance.TestData, startTime string) string { - config := testAccAzureRMDataShare_snapshotSchedule(data, startTime) - return fmt.Sprintf(` -%s - -data "azurerm_data_share" "test" { - name = azurerm_data_share.test.name - account_id = azurerm_data_share_account.test.id -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_blob_storage_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_blob_storage_test.go deleted file mode 100644 index 5a6a31c50de4..000000000000 --- a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_blob_storage_test.go +++ /dev/null @@ -1,278 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMDataShareDataSetBlobStorageFile_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetBlobStorageFile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetBlobStorageFolder_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetBlobStorageFolder_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetBlobStorageContainer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetBlobStorageContainer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetBlobStorage_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_blob_storage", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_blob_storage"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetBlobStorageFile_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetBlobStorage_requiresImport), - }, - }) -} - -func testCheckAzureRMDataShareDataSetExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.DataSetClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("DataShare DataSet not found: %s", resourceName) - } - id, err := parse.DataShareDataSetID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: data share data set %q does not exist", id.Name) - } - return fmt.Errorf("bad: Get on DataShare DataSet Client: %+v", err) - } - return nil - } -} - -// nolint -func testCheckAzureRMDataShareDataSetDestroy(resourceTypeName string) func(s *terraform.State) error { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.DataSetClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != resourceTypeName { - continue - } - id, err := parse.DataShareDataSetID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.ShareName, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: get on data share data set client: %+v", err) - } - } - return nil - } - return nil - } -} - -func testAccAzureRMDataShareDataSetBlobStorage_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -provider "azuread" { -} - -resource "azurerm_resource_group" "test" { - name = "acctest-datashare-%[1]d" - location = "%[2]s" -} - -resource "azurerm_data_share_account" "test" { - name = "acctest-DSA-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } -} - -resource "azurerm_data_share" "test" { - name = "acctest_DS_%[1]d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" -} - -resource "azurerm_storage_account" "test" { - name = "acctest%[3]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "RAGRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest-sc-%[1]d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "container" -} - -data "azuread_service_principal" "test" { - display_name = azurerm_data_share_account.test.name -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_storage_account.test.id - role_definition_name = "Storage Blob Data Reader" - principal_id = data.azuread_service_principal.test.object_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) -} - -func testAccAzureRMDataShareDataSetBlobStorageFile_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetBlobStorage_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_data_share_dataset_blob_storage" "test" { - name = "acctest-DSDSBS-file-%[2]d" - data_share_id = azurerm_data_share.test.id - container_name = azurerm_storage_container.test.name - storage_account { - name = azurerm_storage_account.test.name - resource_group_name = azurerm_storage_account.test.resource_group_name - subscription_id = "%[3]s" - } - file_path = "myfile.txt" - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) -} - -func testAccAzureRMDataShareDataSetBlobStorageFolder_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetBlobStorage_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_data_share_dataset_blob_storage" "test" { - name = "acctest-DSDSBS-folder-%[2]d" - data_share_id = azurerm_data_share.test.id - container_name = azurerm_storage_container.test.name - storage_account { - name = azurerm_storage_account.test.name - resource_group_name = azurerm_storage_account.test.resource_group_name - subscription_id = "%[3]s" - } - folder_path = "test" - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) -} - -func testAccAzureRMDataShareDataSetBlobStorageContainer_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetBlobStorage_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_data_share_dataset_blob_storage" "test" { - name = "acctest-DSDSBS-folder-%[2]d" - data_share_id = azurerm_data_share.test.id - container_name = azurerm_storage_container.test.name - storage_account { - name = azurerm_storage_account.test.name - resource_group_name = azurerm_storage_account.test.resource_group_name - subscription_id = "%[3]s" - } - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger, os.Getenv("ARM_SUBSCRIPTION_ID")) -} - -func testAccAzureRMDataShareDataSetBlobStorage_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetBlobStorageFile_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_blob_storage" "import" { - name = azurerm_data_share_dataset_blob_storage.test.name - data_share_id = azurerm_data_share.test.id - container_name = azurerm_data_share_dataset_blob_storage.test.container_name - storage_account { - name = azurerm_data_share_dataset_blob_storage.test.storage_account.0.name - resource_group_name = azurerm_data_share_dataset_blob_storage.test.storage_account.0.resource_group_name - subscription_id = azurerm_data_share_dataset_blob_storage.test.storage_account.0.subscription_id - } -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go deleted file mode 100644 index 6ef6e981f4f4..000000000000 --- a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen1_test.go +++ /dev/null @@ -1,174 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMDataShareDataSetDataLakeGen1File_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetDataLakeGen1_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen1", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen1"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDataShareDataSetDataLakeGen1_requiresImport), - }, - }) -} - -func testAccAzureRMDataShareDataSetDataLakeGen1_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -provider "azuread" { -} - -resource "azurerm_resource_group" "test" { - name = "acctest-datashare-%[1]d" - location = "%[2]s" -} - -resource "azurerm_data_share_account" "test" { - name = "acctest-DSA-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } - - tags = { - env = "Test" - } -} - -resource "azurerm_data_share" "test" { - name = "acctest_DS_%[1]d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" -} - -resource "azurerm_data_lake_store" "test" { - name = "acctestdls%[3]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - firewall_state = "Disabled" -} - -resource "azurerm_data_lake_store_file" "test" { - account_name = azurerm_data_lake_store.test.name - local_file_path = "./testdata/application_gateway_test.cer" - remote_file_path = "/test/application_gateway_test.cer" -} - -data "azuread_service_principal" "test" { - display_name = azurerm_data_share_account.test.name -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_data_lake_store.test.id - role_definition_name = "Owner" - principal_id = data.azuread_service_principal.test.object_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) -} - -func testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetDataLakeGen1_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_data_lake_gen1" "test" { - name = "acctest-DSDL1-%d" - data_share_id = azurerm_data_share.test.id - data_lake_store_id = azurerm_data_lake_store.test.id - file_name = "application_gateway_test.cer" - folder_path = "test" - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger) -} - -func testAccAzureRMDataShareDataSetDataLakeGen1Folder_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetDataLakeGen1_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_data_lake_gen1" "test" { - name = "acctest-DSDL1-%d" - data_share_id = azurerm_data_share.test.id - data_lake_store_id = azurerm_data_lake_store.test.id - folder_path = "test" - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger) -} - -func testAccAzureRMDataShareDataSetDataLakeGen1_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetDataLakeGen1File_basic(data) - return fmt.Sprintf(` -%s -resource "azurerm_data_share_dataset_data_lake_gen1" "import" { - name = azurerm_data_share_dataset_data_lake_gen1.test.name - data_share_id = azurerm_data_share.test.id - data_lake_store_id = azurerm_data_share_dataset_data_lake_gen1.test.data_lake_store_id - folder_path = azurerm_data_share_dataset_data_lake_gen1.test.folder_path -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen2_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen2_test.go deleted file mode 100644 index 13b9f8632ec1..000000000000 --- a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_data_lake_gen2_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMDataShareDataSetDataLakeGen2File_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetDataLakeGen2Folder_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen2Folder_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataSetDataLakeGen2FileSystem_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen2FileSystem_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareDataLakeGen2DataSet_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_data_lake_gen2", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_data_lake_gen2"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDataShareDataLakeGen2DataSet_requiresImport), - }, - }) -} - -func testAccAzureRMDataShareDataLakeGen2DataSet_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -provider "azuread" { -} - -resource "azurerm_resource_group" "test" { - name = "acctest-datashare-%[1]d" - location = "%[2]s" -} - -resource "azurerm_data_share_account" "test" { - name = "acctest-dsa-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } -} - -resource "azurerm_data_share" "test" { - name = "acctest_ds_%[1]d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" -} - -resource "azurerm_storage_account" "test" { - name = "accteststr%[3]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_kind = "BlobStorage" - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_data_lake_gen2_filesystem" "test" { - name = "acctest-%[1]d" - storage_account_id = azurerm_storage_account.test.id -} - -data "azuread_service_principal" "test" { - display_name = azurerm_data_share_account.test.name -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_storage_account.test.id - role_definition_name = "Storage Blob Data Reader" - principal_id = data.azuread_service_principal.test.object_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) -} - -func testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataLakeGen2DataSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_data_lake_gen2" "test" { - name = "acctest-dlds-%d" - share_id = azurerm_data_share.test.id - storage_account_id = azurerm_storage_account.test.id - file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name - file_path = "myfile.txt" - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger) -} - -func testAccAzureRMDataShareDataSetDataLakeGen2Folder_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataLakeGen2DataSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_data_lake_gen2" "test" { - name = "acctest-dlds-%d" - share_id = azurerm_data_share.test.id - storage_account_id = azurerm_storage_account.test.id - file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name - folder_path = "test" - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger) -} - -func testAccAzureRMDataShareDataSetDataLakeGen2FileSystem_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataLakeGen2DataSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_data_lake_gen2" "test" { - name = "acctest-dlds-%d" - share_id = azurerm_data_share.test.id - storage_account_id = azurerm_storage_account.test.id - file_system_name = azurerm_storage_data_lake_gen2_filesystem.test.name - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger) -} - -func testAccAzureRMDataShareDataLakeGen2DataSet_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDataShareDataSetDataLakeGen2File_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_data_lake_gen2" "import" { - name = azurerm_data_share_dataset_data_lake_gen2.test.name - share_id = azurerm_data_share.test.id - storage_account_id = azurerm_data_share_dataset_data_lake_gen2.test.storage_account_id - file_system_name = azurerm_data_share_dataset_data_lake_gen2.test.file_system_name - file_path = azurerm_data_share_dataset_data_lake_gen2.test.file_path -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_cluster_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_cluster_test.go deleted file mode 100644 index 4234eae5052e..000000000000 --- a/azurerm/internal/services/datashare/tests/resource_arm_data_share_dataset_kusto_cluster_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMDataShareKustoClusterDataSet_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_cluster", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_cluster"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareKustoClusterDataSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "display_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "kusto_cluster_location"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShareKustoClusterDataSet_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share_dataset_kusto_cluster", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDataSetDestroy("azurerm_data_share_dataset_kusto_cluster"), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShareKustoClusterDataSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareDataSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - data.RequiresImportErrorStep(testAccAzureRMDataShareKustoClusterDataSet_requiresImport), - }, - }) -} - -func testAccAzureRMDataShareKustoClusterDataSet_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctest-datashare-%[1]d" - location = "%[2]s" -} - -resource "azurerm_data_share_account" "test" { - name = "acctest-DSA-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } -} - -resource "azurerm_data_share" "test" { - name = "acctest_DS_%[1]d" - account_id = azurerm_data_share_account.test.id - kind = "InPlace" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%[3]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_kusto_cluster.test.id - role_definition_name = "Contributor" - principal_id = azurerm_data_share_account.test.identity.0.principal_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12)) -} - -func testAccAzureRMDataShareKustoClusterDataSet_basic(data acceptance.TestData) string { - config := testAccAzureRMDataShareKustoClusterDataSet_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_kusto_cluster" "test" { - name = "acctest-DSKC-%d" - share_id = azurerm_data_share.test.id - kusto_cluster_id = azurerm_kusto_cluster.test.id - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, config, data.RandomInteger) -} - -func testAccAzureRMDataShareKustoClusterDataSet_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDataShareKustoClusterDataSet_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share_dataset_kusto_cluster" "import" { - name = azurerm_data_share_dataset_kusto_cluster.test.name - share_id = azurerm_data_share.test.id - kusto_cluster_id = azurerm_kusto_cluster.test.id -} -`, config) -} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_test.go deleted file mode 100644 index 6829c4fc8c48..000000000000 --- a/azurerm/internal/services/datashare/tests/resource_arm_data_share_test.go +++ /dev/null @@ -1,308 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMDataShare_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShare_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDataShare_requiresImport), - }, - }) -} - -func TestAccAzureRMDataShare_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShare_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShare_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShare_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShare_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDataShare_snapshotSchedule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_share", "test") - startTime := time.Now().Add(time.Hour * 7).Format(time.RFC3339) - startTime2 := time.Now().Add(time.Hour * 8).Format(time.RFC3339) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShare_snapshotSchedule(data, startTime), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShare_snapshotScheduleUpdated(data, startTime2), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDataShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataShareExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDataShareExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.SharesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("dataShare not found: %s", resourceName) - } - id, err := parse.DataShareID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: data_share share %q does not exist", id.Name) - } - return fmt.Errorf("bad: Get on DataShareClient: %+v", err) - } - return nil - } -} - -func testCheckAzureRMDataShareDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.SharesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_share" { - continue - } - id, err := parse.DataShareID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Get on data_share.shareClient: %+v", err) - } - } - return nil - } - return nil -} -func testAccAzureRMDataShare_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctest-datashare-%d" - location = "%s" -} - -resource "azurerm_data_share_account" "test" { - name = "acctest-dsa-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - identity { - type = "SystemAssigned" - } - - tags = { - env = "Test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMDataShare_basic(data acceptance.TestData) string { - template := testAccAzureRMDataShare_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share" "test" { - name = "acctest_ds_%d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDataShare_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDataShare_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share" "import" { - name = azurerm_data_share.test.name - account_id = azurerm_data_share_account.test.id - kind = azurerm_data_share.test.kind -} -`, config) -} - -func testAccAzureRMDataShare_complete(data acceptance.TestData) string { - template := testAccAzureRMDataShare_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share" "test" { - name = "acctest_ds_%d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" - description = "share desc" - terms = "share terms" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDataShare_update(data acceptance.TestData) string { - template := testAccAzureRMDataShare_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_data_share" "test" { - name = "acctest_ds_%d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" - description = "share desc 2" - terms = "share terms 2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMDataShare_snapshotSchedule(data acceptance.TestData, startTime string) string { - template := testAccAzureRMDataShare_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_data_share" "test" { - name = "acctest_ds_%[2]d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" - - snapshot_schedule { - name = "acctest-ss-%[2]d" - recurrence = "Day" - start_time = "%[3]s" - } -} -`, template, data.RandomInteger, startTime) -} - -func testAccAzureRMDataShare_snapshotScheduleUpdated(data acceptance.TestData, startTime string) string { - template := testAccAzureRMDataShare_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_data_share" "test" { - name = "acctest_ds_%[2]d" - account_id = azurerm_data_share_account.test.id - kind = "CopyBased" - - snapshot_schedule { - name = "acctest-ss2-%[2]d" - recurrence = "Hour" - start_time = "%[3]s" - } -} -`, template, data.RandomInteger, startTime) -} diff --git a/azurerm/internal/services/datashare/validate/account_id.go b/azurerm/internal/services/datashare/validate/account_id.go new file mode 100644 index 000000000000..70d8f29cf5f7 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" +) + +func AccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/datashare/validate/account_id_test.go b/azurerm/internal/services/datashare/validate/account_id_test.go new file mode 100644 index 000000000000..f02bf1dd7da2 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATASHARE/ACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datashare/validate/account_name.go b/azurerm/internal/services/datashare/validate/account_name.go new file mode 100644 index 000000000000..8c94a44dfe33 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/account_name.go @@ -0,0 +1,14 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func AccountName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[^<>%&:\\?/#*$^();,.\|+={}\[\]!~@]{3,90}$`), `Data share account name should have length of 3 - 90, and cannot contain <>%&:\?/#*$^();,.|+={}[]!~@.`, + ) +} diff --git a/azurerm/internal/services/datashare/validate/account_name_test.go b/azurerm/internal/services/datashare/validate/account_name_test.go new file mode 100644 index 000000000000..a03a01a2ec40 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/account_name_test.go @@ -0,0 +1,57 @@ +package validate + +import "testing" + +func TestAccountName(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "Invalid Character 1", + input: "DC\\", + valid: false, + }, + { + name: "Invalid Character 2", + input: "[abc]", + valid: false, + }, + { + name: "Valid Account Name", + input: "acc-test", + valid: true, + }, + { + name: "Invalid Character 3", + input: "test&", + valid: false, + }, + { + name: "Too Few Character", + input: "ab", + valid: false, + }, + { + name: "Valid Account Name 2", + input: "aa-BB_88", + valid: true, + }, + { + name: "Valid Account Name 3", + input: "aac-", + valid: true, + }, + } + validationFunction := AccountName() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/services/datashare/validate/data_set_id.go b/azurerm/internal/services/datashare/validate/data_set_id.go new file mode 100644 index 000000000000..b4d14ce40b84 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/data_set_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" +) + +func DataSetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DataSetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/datashare/validate/data_set_id_test.go b/azurerm/internal/services/datashare/validate/data_set_id_test.go new file mode 100644 index 000000000000..e752aa940385 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/data_set_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDataSetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/", + Valid: false, + }, + + { + // missing value for AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Valid: false, + }, + + { + // missing ShareName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", + Valid: false, + }, + + { + // missing value for ShareName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATASHARE/ACCOUNTS/ACCOUNT1/SHARES/SHARE1/DATASETS/DATASET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DataSetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datashare/validate/data_set_name.go b/azurerm/internal/services/datashare/validate/data_set_name.go new file mode 100644 index 000000000000..d60990ff04ba --- /dev/null +++ b/azurerm/internal/services/datashare/validate/data_set_name.go @@ -0,0 +1,14 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func DataSetName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[\w-]{2,90}$`), `Dataset name can only contain number, letters, - and _, and must be between 2 and 90 characters long.`, + ) +} diff --git a/azurerm/internal/services/datashare/validate/data_share.go b/azurerm/internal/services/datashare/validate/data_share.go deleted file mode 100644 index c40c8e51593e..000000000000 --- a/azurerm/internal/services/datashare/validate/data_share.go +++ /dev/null @@ -1,92 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - dataLakeParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" - StorageParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/parsers" -) - -func DataShareAccountName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`^[^<>%&:\\?/#*$^();,.\|+={}\[\]!~@]{3,90}$`), `Data share account name should have length of 3 - 90, and cannot contain <>%&:\?/#*$^();,.|+={}[]!~@.`, - ) -} - -func DatashareName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`^\w{2,90}$`), `DataShare name can only contain alphanumeric characters and _, and must be between 2 and 90 characters long.`, - ) -} - -func DatashareAccountID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.DataShareAccountID(v); err != nil { - errors = append(errors, fmt.Errorf("can not parse %q as a Datashare account id: %v", k, err)) - } - - return warnings, errors -} - -func DataShareSyncName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`^[^&%#/]{1,90}$`), `Data share snapshot schedule name should have length of 1 - 90, and cannot contain &%#/`, - ) -} - -func DataShareID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.DataShareID(v); err != nil { - errors = append(errors, fmt.Errorf("can not parse %q as a data share id: %v", k, err)) - } - - return warnings, errors -} - -func DatashareDataSetName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`^[\w-]{2,90}$`), `Dataset name can only contain number, letters, - and _, and must be between 2 and 90 characters long.`, - ) -} - -func DatalakeStoreID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := dataLakeParse.DataLakeStoreID(v); err != nil { - errors = append(errors, fmt.Errorf("can not parse %q as a Data Lake Store id: %v", k, err)) - } - - return warnings, errors -} - -func StorageAccountID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := StorageParse.ParseAccountID(v); err != nil { - errors = append(errors, fmt.Errorf("can not parse %q as a Storage Account id: %v", k, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/datashare/validate/data_share_test.go b/azurerm/internal/services/datashare/validate/data_share_test.go deleted file mode 100644 index 9542d7bd9401..000000000000 --- a/azurerm/internal/services/datashare/validate/data_share_test.go +++ /dev/null @@ -1,135 +0,0 @@ -package validate - -import "testing" - -func TestDataShareAccountName(t *testing.T) { - tests := []struct { - name string - input string - valid bool - }{ - { - name: "Invalid Character 1", - input: "DC\\", - valid: false, - }, - { - name: "Invalid Character 2", - input: "[abc]", - valid: false, - }, - { - name: "Valid Account Name", - input: "acc-test", - valid: true, - }, - { - name: "Invalid Character 3", - input: "test&", - valid: false, - }, - { - name: "Too Few Character", - input: "ab", - valid: false, - }, - { - name: "Valid Account Name 2", - input: "aa-BB_88", - valid: true, - }, - { - name: "Valid Account Name 3", - input: "aac-", - valid: true, - }, - } - var validationFunction = DataShareAccountName() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := validationFunction(tt.input, "") - valid := err == nil - if valid != tt.valid { - t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) - } - }) - } -} - -func TestDatashareName(t *testing.T) { - tests := []struct { - name string - input string - valid bool - }{ - { - name: "invalid character", - input: "9()", - valid: false, - }, - { - name: "less character", - input: "a", - valid: false, - }, - { - name: "invalid character2", - input: "adgeFG-98", - valid: false, - }, - { - name: "valid", - input: "dfakF88u7_", - valid: true, - }, - } - var validationFunction = DatashareName() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := validationFunction(tt.input, "") - valid := err == nil - if valid != tt.valid { - t.Errorf("expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) - } - }) - } -} - -func TestDatashareSyncName(t *testing.T) { - tests := []struct { - name string - input string - valid bool - }{ - { - name: "valid characters", - input: "*() _-$@!", - valid: true, - }, - { - name: "Empty", - input: "", - valid: false, - }, - { - name: "invalid characters", - input: "&^*", - valid: false, - }, - { - name: "invalid characters", - input: "dfwe%", - valid: false, - }, - } - var validationFunction = DataShareSyncName() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := validationFunction(tt.input, "") - valid := err == nil - if valid != tt.valid { - t.Errorf("expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) - } - }) - } -} diff --git a/azurerm/internal/services/datashare/validate/share_id.go b/azurerm/internal/services/datashare/validate/share_id.go new file mode 100644 index 000000000000..b0b355d78722 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/share_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" +) + +func ShareID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ShareID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/datashare/validate/share_id_test.go b/azurerm/internal/services/datashare/validate/share_id_test.go new file mode 100644 index 000000000000..5e777f0a6be5 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/share_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestShareID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/", + Valid: false, + }, + + { + // missing value for AccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1/shares/share1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DATASHARE/ACCOUNTS/ACCOUNT1/SHARES/SHARE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ShareID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/datashare/validate/share_name.go b/azurerm/internal/services/datashare/validate/share_name.go new file mode 100644 index 000000000000..6c7a200aece1 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/share_name.go @@ -0,0 +1,14 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func ShareName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^\w{2,90}$`), `DataShare name can only contain alphanumeric characters and _, and must be between 2 and 90 characters long.`, + ) +} diff --git a/azurerm/internal/services/datashare/validate/share_name_test.go b/azurerm/internal/services/datashare/validate/share_name_test.go new file mode 100644 index 000000000000..31384525c7a0 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/share_name_test.go @@ -0,0 +1,42 @@ +package validate + +import "testing" + +func TestShareName(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "invalid character", + input: "9()", + valid: false, + }, + { + name: "less character", + input: "a", + valid: false, + }, + { + name: "invalid character2", + input: "adgeFG-98", + valid: false, + }, + { + name: "valid", + input: "dfakF88u7_", + valid: true, + }, + } + validationFunction := ShareName() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/services/datashare/validate/snapshot_schedule_name.go b/azurerm/internal/services/datashare/validate/snapshot_schedule_name.go new file mode 100644 index 000000000000..551759a36f23 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/snapshot_schedule_name.go @@ -0,0 +1,14 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func SnapshotScheduleName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[^&%#/]{1,90}$`), `Data share snapshot schedule name should have length of 1 - 90, and cannot contain &%#/`, + ) +} diff --git a/azurerm/internal/services/datashare/validate/snapshot_schedule_name_test.go b/azurerm/internal/services/datashare/validate/snapshot_schedule_name_test.go new file mode 100644 index 000000000000..7fa0ef6d6127 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/snapshot_schedule_name_test.go @@ -0,0 +1,42 @@ +package validate + +import "testing" + +func TestSnapshotScheduleName(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "valid characters", + input: "*() _-$@!", + valid: true, + }, + { + name: "Empty", + input: "", + valid: false, + }, + { + name: "invalid characters", + input: "&^*", + valid: false, + }, + { + name: "invalid characters", + input: "dfwe%", + valid: false, + }, + } + validationFunction := SnapshotScheduleName() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/services/desktopvirtualization/migration/application_group_v0.go b/azurerm/internal/services/desktopvirtualization/migration/application_group_v0.go new file mode 100644 index 000000000000..d3515d769187 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/migration/application_group_v0.go @@ -0,0 +1,71 @@ +package migration + +import ( + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" +) + +func ApplicationGroupUpgradeV0Schema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "host_pool_id": { + Type: schema.TypeString, + Required: true, + }, + + "friendly_name": { + Type: schema.TypeString, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func ApplicationGroupUpgradeV0ToV1(rawState map[string]interface{}, _ interface{}) (map[string]interface{}, error) { + oldId := rawState["id"].(string) + id, err := parse.ApplicationGroupIDInsensitively(oldId) + if err != nil { + return nil, err + } + newId := id.ID() + log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newId) + rawState["id"] = newId + + oldHostPoolId := rawState["host_pool_id"].(string) + hostPoolId, err := parse.HostPoolIDInsensitively(oldHostPoolId) + if err != nil { + return nil, err + } + newHostPoolId := hostPoolId.ID() + log.Printf("[DEBUG] Updating Host Pool ID from %q to %q", oldHostPoolId, newHostPoolId) + rawState["host_pool_id"] = newHostPoolId + + return rawState, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/migration/host_pool_v0.go b/azurerm/internal/services/desktopvirtualization/migration/host_pool_v0.go new file mode 100644 index 000000000000..6fbb0a62caff --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/migration/host_pool_v0.go @@ -0,0 +1,115 @@ +package migration + +import ( + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" +) + +func HostPoolUpgradeV0Schema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "load_balancer_type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "friendly_name": { + Type: schema.TypeString, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "validate_environment": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "personal_desktop_assignment_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + + "maximum_sessions_allowed": { + Type: schema.TypeInt, + Optional: true, + Default: 999999, + }, + + "preferred_app_group_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Preferred App Group type to display", + }, + + "registration_info": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "expiration_date": { + Type: schema.TypeString, + Required: true, + }, + + "reset_token": { + Type: schema.TypeBool, + Computed: true, + }, + + "token": { + Type: schema.TypeString, + Sensitive: true, + Computed: true, + }, + }, + }, + }, + + "tags": tags.Schema(), + }, + } +} + +func HostPoolUpgradeV0ToV1(rawState map[string]interface{}, _ interface{}) (map[string]interface{}, error) { + oldId := rawState["id"].(string) + + id, err := parse.HostPoolIDInsensitively(oldId) + if err != nil { + return nil, err + } + newId := id.ID() + + log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newId) + rawState["id"] = newId + + return rawState, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/migration/workspace_application_group_association_v0.go b/azurerm/internal/services/desktopvirtualization/migration/workspace_application_group_association_v0.go new file mode 100644 index 000000000000..48fefc392f69 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/migration/workspace_application_group_association_v0.go @@ -0,0 +1,51 @@ +package migration + +import ( + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" +) + +func WorkspaceApplicationGroupAssociationUpgradeV0Schema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "workspace_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "application_group_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + }, + } +} + +func WorkspaceApplicationGroupAssociationUpgradeV0ToV1(rawState map[string]interface{}, _ interface{}) (map[string]interface{}, error) { + oldId := rawState["id"].(string) + + id, err := parse.WorkspaceApplicationGroupAssociationIDInsensitively(oldId) + if err != nil { + return nil, err + } + newId := id.ID() + + log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newId) + rawState["id"] = newId + + oldApplicationGroupId := rawState["application_group_id"].(string) + newApplicationGroupId := id.ApplicationGroup.ID() + log.Printf("[DEBUG] Updating Application Group ID from %q to %q", oldApplicationGroupId, newApplicationGroupId) + rawState["application_group_id"] = newApplicationGroupId + + oldWorkspaceId := rawState["workspace_id"].(string) + newWorkspaceId := id.Workspace.ID() + log.Printf("[DEBUG] Updating Workspace ID from %q to %q", oldWorkspaceId, newWorkspaceId) + rawState["workspace_id"] = newWorkspaceId + + return rawState, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/migration/workspace_v0.go b/azurerm/internal/services/desktopvirtualization/migration/workspace_v0.go new file mode 100644 index 000000000000..92affad35b86 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/migration/workspace_v0.go @@ -0,0 +1,53 @@ +package migration + +import ( + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" +) + +func WorkspaceUpgradeV0Schema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "friendly_name": { + Type: schema.TypeString, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func WorkspaceUpgradeV0ToV1(rawState map[string]interface{}, _ interface{}) (map[string]interface{}, error) { + oldId := rawState["id"].(string) + + id, err := parse.WorkspaceID(oldId) + if err != nil { + return nil, err + } + newId := id.ID() + + log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newId) + rawState["id"] = newId + + return rawState, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/application_group.go b/azurerm/internal/services/desktopvirtualization/parse/application_group.go new file mode 100644 index 000000000000..36121457a848 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/application_group.go @@ -0,0 +1,113 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ApplicationGroupId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewApplicationGroupID(subscriptionId, resourceGroup, name string) ApplicationGroupId { + return ApplicationGroupId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ApplicationGroupId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Application Group", segmentsStr) +} + +func (id ApplicationGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DesktopVirtualization/applicationGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ApplicationGroupID parses a ApplicationGroup ID into an ApplicationGroupId struct +func ApplicationGroupID(input string) (*ApplicationGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApplicationGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("applicationGroups"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// ApplicationGroupIDInsensitively parses an ApplicationGroup ID into an ApplicationGroupId struct, insensitively +// This should only be used to parse an ID for rewriting, the ApplicationGroupID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func ApplicationGroupIDInsensitively(input string) (*ApplicationGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApplicationGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'applicationGroups' segment + applicationGroupsKey := "applicationGroups" + for key := range id.Path { + if strings.EqualFold(key, applicationGroupsKey) { + applicationGroupsKey = key + break + } + } + if resourceId.Name, err = id.PopSegment(applicationGroupsKey); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/application_group_test.go b/azurerm/internal/services/desktopvirtualization/parse/application_group_test.go new file mode 100644 index 000000000000..f74cc7011743 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/application_group_test.go @@ -0,0 +1,229 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ApplicationGroupId{} + +func TestApplicationGroupIDFormatter(t *testing.T) { + actual := NewApplicationGroupID("12345678-1234-9876-4563-123456789012", "resGroup1", "applicationGroup1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/applicationGroup1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestApplicationGroupID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApplicationGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/applicationGroup1", + Expected: &ApplicationGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "applicationGroup1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DESKTOPVIRTUALIZATION/APPLICATIONGROUPS/APPLICATIONGROUP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApplicationGroupID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} + +func TestApplicationGroupIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApplicationGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/applicationGroup1", + Expected: &ApplicationGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "applicationGroup1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationgroups/applicationGroup1", + Expected: &ApplicationGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "applicationGroup1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/APPLICATIONGROUPS/applicationGroup1", + Expected: &ApplicationGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "applicationGroup1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/ApPlIcAtIoNgRoUpS/applicationGroup1", + Expected: &ApplicationGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "applicationGroup1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApplicationGroupIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/host_pool.go b/azurerm/internal/services/desktopvirtualization/parse/host_pool.go new file mode 100644 index 000000000000..f5f0d8053c1b --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/host_pool.go @@ -0,0 +1,113 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type HostPoolId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewHostPoolID(subscriptionId, resourceGroup, name string) HostPoolId { + return HostPoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id HostPoolId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Host Pool", segmentsStr) +} + +func (id HostPoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DesktopVirtualization/hostPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// HostPoolID parses a HostPool ID into an HostPoolId struct +func HostPoolID(input string) (*HostPoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := HostPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("hostPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// HostPoolIDInsensitively parses an HostPool ID into an HostPoolId struct, insensitively +// This should only be used to parse an ID for rewriting, the HostPoolID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func HostPoolIDInsensitively(input string) (*HostPoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := HostPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'hostPools' segment + hostPoolsKey := "hostPools" + for key := range id.Path { + if strings.EqualFold(key, hostPoolsKey) { + hostPoolsKey = key + break + } + } + if resourceId.Name, err = id.PopSegment(hostPoolsKey); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/host_pool_test.go b/azurerm/internal/services/desktopvirtualization/parse/host_pool_test.go new file mode 100644 index 000000000000..011182a71ef0 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/host_pool_test.go @@ -0,0 +1,229 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = HostPoolId{} + +func TestHostPoolIDFormatter(t *testing.T) { + actual := NewHostPoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/pool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestHostPoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *HostPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/pool1", + Expected: &HostPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "pool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DESKTOPVIRTUALIZATION/HOSTPOOLS/POOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HostPoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} + +func TestHostPoolIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *HostPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/pool1", + Expected: &HostPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "pool1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostpools/pool1", + Expected: &HostPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "pool1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/HOSTPOOLS/pool1", + Expected: &HostPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "pool1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/HoStPoOlS/pool1", + Expected: &HostPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "pool1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HostPoolIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_application_group.go b/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_application_group.go deleted file mode 100644 index 0751d87a423c..000000000000 --- a/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_application_group.go +++ /dev/null @@ -1,35 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -// VirtualDesktopApplicationGroupid - The id for the virtual desktop host pool -type VirtualDesktopApplicationGroupid struct { - ResourceGroup string - Name string -} - -// VirtualDesktopApplicationGroupID - Parses and validates the virtual desktop host pool -func VirtualDesktopApplicationGroupID(input string) (*VirtualDesktopApplicationGroupid, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Desktop Application Group ID %q: %+v", input, err) - } - - ApplicationGroup := VirtualDesktopApplicationGroupid{ - ResourceGroup: id.ResourceGroup, - } - - if ApplicationGroup.Name, err = id.PopSegment("applicationgroups"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &ApplicationGroup, nil -} diff --git a/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_host_pool_resource.go b/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_host_pool_resource.go deleted file mode 100644 index 032deb178948..000000000000 --- a/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_host_pool_resource.go +++ /dev/null @@ -1,35 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -// VirtualDesktopHostPoolid - The id for the virtual desktop host pool -type VirtualDesktopHostPoolid struct { - ResourceGroup string - Name string -} - -// VirtualDesktopHostPoolID - Parses and validates the virtual desktop host pool -func VirtualDesktopHostPoolID(input string) (*VirtualDesktopHostPoolid, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Desktop Host Pool ID %q: %+v", input, err) - } - - hostPool := VirtualDesktopHostPoolid{ - ResourceGroup: id.ResourceGroup, - } - - if hostPool.Name, err = id.PopSegment("hostpools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &hostPool, nil -} diff --git a/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_workspace_resource.go b/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_workspace_resource.go deleted file mode 100644 index 6983c41532d2..000000000000 --- a/azurerm/internal/services/desktopvirtualization/parse/virtual_desktop_workspace_resource.go +++ /dev/null @@ -1,35 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -// DesktopVirtualizationWorkspaceId - The id for the virtual desktop workspace -type DesktopVirtualizationWorkspaceId struct { - ResourceGroup string - Name string -} - -// DesktopVirtualizationWorkspaceID - Parses and validates the virtual desktop workspace -func DesktopVirtualizationWorkspaceID(input string) (*DesktopVirtualizationWorkspaceId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Desktop Workspace ID %q: %+v", input, err) - } - - workspace := DesktopVirtualizationWorkspaceId{ - ResourceGroup: id.ResourceGroup, - } - - if workspace.Name, err = id.PopSegment("workspaces"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &workspace, nil -} diff --git a/azurerm/internal/services/desktopvirtualization/parse/workspace.go b/azurerm/internal/services/desktopvirtualization/parse/workspace.go new file mode 100644 index 000000000000..80993ba4281c --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/workspace.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type WorkspaceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewWorkspaceID(subscriptionId, resourceGroup, name string) WorkspaceId { + return WorkspaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id WorkspaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Workspace", segmentsStr) +} + +func (id WorkspaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DesktopVirtualization/workspaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// WorkspaceID parses a Workspace ID into an WorkspaceId struct +func WorkspaceID(input string) (*WorkspaceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := WorkspaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("workspaces"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/workspace_application_group_association.go b/azurerm/internal/services/desktopvirtualization/parse/workspace_application_group_association.go new file mode 100644 index 000000000000..b344254cec42 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/workspace_application_group_association.go @@ -0,0 +1,72 @@ +package parse + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = WorkspaceApplicationGroupAssociationId{} + +type WorkspaceApplicationGroupAssociationId struct { + Workspace WorkspaceId + ApplicationGroup ApplicationGroupId +} + +func (id WorkspaceApplicationGroupAssociationId) ID() string { + workspaceId := id.Workspace.ID() + applicationGroupId := id.ApplicationGroup.ID() + return fmt.Sprintf("%s|%s", workspaceId, applicationGroupId) +} + +func NewWorkspaceApplicationGroupAssociationId(workspace WorkspaceId, applicationGroup ApplicationGroupId) WorkspaceApplicationGroupAssociationId { + return WorkspaceApplicationGroupAssociationId{ + Workspace: workspace, + ApplicationGroup: applicationGroup, + } +} + +func WorkspaceApplicationGroupAssociationID(input string) (*WorkspaceApplicationGroupAssociationId, error) { + segments := strings.Split(input, "|") + if len(segments) != 2 { + return nil, fmt.Errorf("expected an ID in the format {workspaceID}|{applicationGroupID} but got %q", input) + } + + workspaceId, err := WorkspaceID(segments[0]) + if err != nil { + return nil, fmt.Errorf("parsing Workspace ID for Workspace/Application Group Association %q: %+v", segments[0], err) + } + + applicationGroupId, err := ApplicationGroupID(segments[1]) + if err != nil { + return nil, fmt.Errorf("parsing Application Group ID for Workspace/Application Group Association %q: %+v", segments[1], err) + } + + return &WorkspaceApplicationGroupAssociationId{ + Workspace: *workspaceId, + ApplicationGroup: *applicationGroupId, + }, nil +} + +func WorkspaceApplicationGroupAssociationIDInsensitively(input string) (*WorkspaceApplicationGroupAssociationId, error) { + segments := strings.Split(input, "|") + if len(segments) != 2 { + return nil, fmt.Errorf("expected an ID in the format {workspaceID}|{applicationGroupID} but got %q", input) + } + + workspaceId, err := WorkspaceID(segments[0]) + if err != nil { + return nil, fmt.Errorf("parsing Workspace ID for Workspace/Application Group Association %q: %+v", segments[0], err) + } + + applicationGroupId, err := ApplicationGroupIDInsensitively(segments[1]) + if err != nil { + return nil, fmt.Errorf("parsing Application Group ID for Workspace/Application Group Association %q: %+v", segments[1], err) + } + + return &WorkspaceApplicationGroupAssociationId{ + Workspace: *workspaceId, + ApplicationGroup: *applicationGroupId, + }, nil +} diff --git a/azurerm/internal/services/desktopvirtualization/parse/workspace_test.go b/azurerm/internal/services/desktopvirtualization/parse/workspace_test.go new file mode 100644 index 000000000000..9b6b40c0b567 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/parse/workspace_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = WorkspaceId{} + +func TestWorkspaceIDFormatter(t *testing.T) { + actual := NewWorkspaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/workspaces/workspace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestWorkspaceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *WorkspaceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/workspaces/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/workspaces/workspace1", + Expected: &WorkspaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "workspace1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DESKTOPVIRTUALIZATION/WORKSPACES/WORKSPACE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := WorkspaceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/desktopvirtualization/registration.go b/azurerm/internal/services/desktopvirtualization/registration.go index 2212dcadaf68..ed625ab35c71 100644 --- a/azurerm/internal/services/desktopvirtualization/registration.go +++ b/azurerm/internal/services/desktopvirtualization/registration.go @@ -27,8 +27,8 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ "azurerm_virtual_desktop_workspace": resourceArmDesktopVirtualizationWorkspace(), - "azurerm_virtual_desktop_host_pool": resourceArmVirtualDesktopHostPool(), - "azurerm_virtual_desktop_application_group": resourceArmVirtualDesktopApplicationGroup(), - "azurerm_virtual_desktop_workspace_application_group_association": resourceArmVirtualDesktopWorkspaceApplicationGroupAssociation(), + "azurerm_virtual_desktop_host_pool": resourceVirtualDesktopHostPool(), + "azurerm_virtual_desktop_application_group": resourceVirtualDesktopApplicationGroup(), + "azurerm_virtual_desktop_workspace_application_group_association": resourceVirtualDesktopWorkspaceApplicationGroupAssociation(), } } diff --git a/azurerm/internal/services/desktopvirtualization/resourcesid.go b/azurerm/internal/services/desktopvirtualization/resourcesid.go new file mode 100644 index 000000000000..6b20621db9a9 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/resourcesid.go @@ -0,0 +1,5 @@ +package desktopvirtualization + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ApplicationGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/applicationGroup1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=HostPool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/pool1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Workspace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/workspaces/workspace1 diff --git a/azurerm/internal/services/desktopvirtualization/validate/application_group_id.go b/azurerm/internal/services/desktopvirtualization/validate/application_group_id.go new file mode 100644 index 000000000000..3a143c7dfaa3 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/validate/application_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" +) + +func ApplicationGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApplicationGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/desktopvirtualization/validate/application_group_id_test.go b/azurerm/internal/services/desktopvirtualization/validate/application_group_id_test.go new file mode 100644 index 000000000000..6213384eefd7 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/validate/application_group_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApplicationGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/applicationGroups/applicationGroup1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DESKTOPVIRTUALIZATION/APPLICATIONGROUPS/APPLICATIONGROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApplicationGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/desktopvirtualization/validate/host_pool_id.go b/azurerm/internal/services/desktopvirtualization/validate/host_pool_id.go new file mode 100644 index 000000000000..f299eadf7c94 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/validate/host_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" +) + +func HostPoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.HostPoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/desktopvirtualization/validate/host_pool_id_test.go b/azurerm/internal/services/desktopvirtualization/validate/host_pool_id_test.go new file mode 100644 index 000000000000..ba003dcd74b1 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/validate/host_pool_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestHostPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/hostPools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DESKTOPVIRTUALIZATION/HOSTPOOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := HostPoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/desktopvirtualization/validate/workspace_id.go b/azurerm/internal/services/desktopvirtualization/validate/workspace_id.go new file mode 100644 index 000000000000..e82931a9a01b --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/validate/workspace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" +) + +func WorkspaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.WorkspaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/desktopvirtualization/validate/workspace_id_test.go b/azurerm/internal/services/desktopvirtualization/validate/workspace_id_test.go new file mode 100644 index 000000000000..466e9eeb2973 --- /dev/null +++ b/azurerm/internal/services/desktopvirtualization/validate/workspace_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestWorkspaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/workspaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DesktopVirtualization/workspaces/workspace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DESKTOPVIRTUALIZATION/WORKSPACES/WORKSPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := WorkspaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource.go index d865bdeef41e..1a4297b065ff 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource.go @@ -3,6 +3,7 @@ package desktopvirtualization import ( "fmt" "log" + "regexp" "time" "github.com/Azure/azure-sdk-for-go/services/preview/desktopvirtualization/mgmt/2019-12-10-preview/desktopvirtualization" @@ -10,21 +11,25 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/migration" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmVirtualDesktopApplicationGroup() *schema.Resource { +var applicationGroupType = "azurerm_virtual_desktop_application_group" + +func resourceVirtualDesktopApplicationGroup() *schema.Resource { return &schema.Resource{ - Create: resourceArmVirtualDesktopApplicationGroupCreateUpdate, - Read: resourceArmVirtualDesktopApplicationGroupRead, - Update: resourceArmVirtualDesktopApplicationGroupCreateUpdate, - Delete: resourceArmVirtualDesktopApplicationGroupDelete, + Create: resourceVirtualDesktopApplicationGroupCreateUpdate, + Read: resourceVirtualDesktopApplicationGroupRead, + Update: resourceVirtualDesktopApplicationGroupCreateUpdate, + Delete: resourceVirtualDesktopApplicationGroupDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(60 * time.Minute), @@ -34,16 +39,31 @@ func resourceArmVirtualDesktopApplicationGroup() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.VirtualDesktopApplicationGroupID(id) + _, err := parse.ApplicationGroupID(id) return err }), + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + { + Type: migration.ApplicationGroupUpgradeV0Schema().CoreConfigSchema().ImpliedType(), + Upgrade: migration.ApplicationGroupUpgradeV0ToV1, + Version: 0, + }, + }, + Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DevSpaceName(), + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{1,260}$"), + "Virtual desktop application group name must be 1 - 260 characters long, contain only letters, numbers and hyphens.", + ), + ), }, "location": azure.SchemaLocation(), @@ -61,8 +81,9 @@ func resourceArmVirtualDesktopApplicationGroup() *schema.Resource { }, "host_pool_id": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.HostPoolID, }, "friendly_name": { @@ -82,26 +103,32 @@ func resourceArmVirtualDesktopApplicationGroup() *schema.Resource { } } -func resourceArmVirtualDesktopApplicationGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopApplicationGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.ApplicationGroupsClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() + subscriptionId := meta.(*clients.Client).Account.SubscriptionId - log.Printf("[INFO] preparing arguments for Virtual Desktop Host Pool creation") + log.Printf("[INFO] preparing arguments for Virtual Desktop Application Group creation") name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + locks.ByName(name, applicationGroupType) + defer locks.UnlockByName(name, applicationGroupType) + + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceId := parse.NewApplicationGroupID(subscriptionId, resourceGroup, name).ID() if d.IsNewResource() { existing, err := client.Get(ctx, resourceGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Checking for presence of existing Virtual Desktop Host Pool %q (Resource Group %q): %s", name, resourceGroup, err) + return fmt.Errorf("checking for presence of existing Virtual Desktop Application Group %q (Resource Group %q): %s", name, resourceGroup, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_virtual_desktop_application_group", *existing.ID) + if existing.ApplicationGroupProperties != nil { + return tf.ImportAsExistsError("azurerm_virtual_desktop_application_group", resourceId) } } @@ -120,29 +147,19 @@ func resourceArmVirtualDesktopApplicationGroupCreateUpdate(d *schema.ResourceDat } if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, context); err != nil { - return fmt.Errorf("Creating Virtual Desktop Host Pool %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - result, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Retrieving Virtual Desktop Host Pool %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("creating Virtual Desktop Application Group %q (Resource Group %q): %+v", name, resourceGroup, err) } - if result.ID == nil { - return fmt.Errorf("Cannot read Virtual Desktop Host Pool %q (Resource Group %q) ID", name, resourceGroup) - } - - d.SetId(*result.ID) - - return resourceArmVirtualDesktopApplicationGroupRead(d, meta) + d.SetId(resourceId) + return resourceVirtualDesktopApplicationGroupRead(d, meta) } -func resourceArmVirtualDesktopApplicationGroupRead(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopApplicationGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.ApplicationGroupsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.VirtualDesktopApplicationGroupID(d.Id()) + id, err := parse.ApplicationGroupID(d.Id()) if err != nil { return err } @@ -150,12 +167,12 @@ func resourceArmVirtualDesktopApplicationGroupRead(d *schema.ResourceData, meta resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Virtual Desktop Host Pool %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Virtual Desktop Application Group %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Making Read request on Virtual Desktop Host Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Virtual Desktop Application Group %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } d.Set("name", id.Name) @@ -166,36 +183,41 @@ func resourceArmVirtualDesktopApplicationGroupRead(d *schema.ResourceData, meta } if props := resp.ApplicationGroupProperties; props != nil { + d.Set("friendly_name", props.FriendlyName) + d.Set("description", props.Description) d.Set("type", string(props.ApplicationGroupType)) - if fn := props.FriendlyName; fn != nil { - d.Set("friendly_name", fn) - } - - if desc := props.Description; desc != nil { - d.Set("description", desc) - } + hostPoolIdStr := "" + if props.HostPoolArmPath != nil { + // TODO: raise an API bug + hostPoolId, err := parse.HostPoolIDInsensitively(*props.HostPoolArmPath) + if err != nil { + return fmt.Errorf("parsing Host Pool ID %q: %+v", *props.HostPoolArmPath, err) + } - if hstpl := props.HostPoolArmPath; hstpl != nil { - d.Set("host_pool_id", hstpl) + hostPoolIdStr = hostPoolId.ID() } + d.Set("host_pool_id", hostPoolIdStr) } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmVirtualDesktopApplicationGroupDelete(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopApplicationGroupDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.ApplicationGroupsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - id, err := parse.VirtualDesktopApplicationGroupID(d.Id()) + id, err := parse.ApplicationGroupID(d.Id()) if err != nil { return err } + locks.ByName(id.Name, applicationGroupType) + defer locks.UnlockByName(id.Name, applicationGroupType) + + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() if _, err = client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { - return fmt.Errorf("Deleting Virtual Desktop Host Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting Virtual Desktop Application Group %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource_test.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource_test.go index 8862217be7b9..fbff23b021ab 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource_test.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_application_group_resource_test.go @@ -1,170 +1,114 @@ package desktopvirtualization_test import ( + "context" "fmt" - "log" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMVirtualDesktopApplicationGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_application_group", "test") +type VirtualDesktopApplicationGroupResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationApplicationGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationApplicationGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, +func TestAccVirtualDesktopApplicationGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_application_group", "test") + r := VirtualDesktopApplicationGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func TestAccAzureRMVirtualDesktopApplicationGroup_complete(t *testing.T) { +func TestAccVirtualDesktopApplicationGroup_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_application_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationApplicationGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationApplicationGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, + r := VirtualDesktopApplicationGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), }, }) } -func TestAccAzureRMVirtualDesktopApplicationGroup_update(t *testing.T) { +func TestAccVirtualDesktopApplicationGroup_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_application_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationApplicationGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationApplicationGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationApplicationGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationApplicationGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := VirtualDesktopApplicationGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func TestAccAzureRMVirtualDesktopApplicationGroup_requiresImport(t *testing.T) { +func TestAccVirtualDesktopApplicationGroup_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_application_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationApplicationGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationApplicationGroupExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMVirtualDesktopApplicationGroup_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_virtual_desktop_application_group"), - }, + r := VirtualDesktopApplicationGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_virtual_desktop_application_group"), }, }) } -func testCheckAzureRMDesktopVirtualizationApplicationGroupExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.ApplicationGroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.VirtualDesktopApplicationGroupID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err == nil { - return nil - } - - if result.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Virtual Desktop Host Pool %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get virtualDesktopApplicationGroupClient: %+v", err) +func (VirtualDesktopApplicationGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApplicationGroupID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckAzureRMDesktopVirtualizationApplicationGroupDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.ApplicationGroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_virtual_desktop_application_group" { - continue - } - - log.Printf("[WARN] azurerm_virtual_desktop_application_group still exists in state file.") - - id, err := parse.VirtualDesktopApplicationGroupID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err == nil { - return fmt.Errorf("Virtual Desktop Host Pool still exists:\n%#v", result) - } - - if result.StatusCode != http.StatusNotFound { - return err - } + resp, err := clients.DesktopVirtualization.ApplicationGroupsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Virtual Desktop Application Group %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) } - return nil + return utils.Bool(resp.ApplicationGroupProperties != nil), nil } -func testAccAzureRMVirtualDesktopApplicationGroup_basic(data acceptance.TestData) string { +func (VirtualDesktopApplicationGroupResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -190,11 +134,10 @@ resource "azurerm_virtual_desktop_application_group" "test" { type = "Desktop" host_pool_id = azurerm_virtual_desktop_host_pool.test.id } - `, data.RandomInteger, data.Locations.Secondary, data.RandomIntOfLength(8)) } -func testAccAzureRMVirtualDesktopApplicationGroup_complete(data acceptance.TestData) string { +func (VirtualDesktopApplicationGroupResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -231,8 +174,7 @@ resource "azurerm_virtual_desktop_application_group" "test" { `, data.RandomInteger, data.Locations.Secondary, data.RandomIntOfLength(8)) } -func testAccAzureRMVirtualDesktopApplicationGroup_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMVirtualDesktopApplicationGroup_basic(data) +func (r VirtualDesktopApplicationGroupResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -243,5 +185,5 @@ resource "azurerm_virtual_desktop_application_group" "import" { type = azurerm_virtual_desktop_application_group.test.type host_pool_id = azurerm_virtual_desktop_application_group.test.host_pool_id } -`, template) +`, r.basic(data)) } diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource.go index 15f8ae80391f..84efbad1b5c4 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource.go @@ -11,8 +11,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/migration" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmVirtualDesktopHostPool() *schema.Resource { +func resourceVirtualDesktopHostPool() *schema.Resource { return &schema.Resource{ - Create: resourceArmVirtualDesktopHostPoolCreateUpdate, - Read: resourceArmVirtualDesktopHostPoolRead, - Update: resourceArmVirtualDesktopHostPoolCreateUpdate, - Delete: resourceArmVirtualDesktopHostPoolDelete, + Create: resourceVirtualDesktopHostPoolCreateUpdate, + Read: resourceVirtualDesktopHostPoolRead, + Update: resourceVirtualDesktopHostPoolCreateUpdate, + Delete: resourceVirtualDesktopHostPoolDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(60 * time.Minute), @@ -35,16 +35,25 @@ func resourceArmVirtualDesktopHostPool() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.VirtualDesktopHostPoolID(id) + _, err := parse.HostPoolID(id) return err }), + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + { + Type: migration.HostPoolUpgradeV0Schema().CoreConfigSchema().ImpliedType(), + Upgrade: migration.HostPoolUpgradeV0ToV1, + Version: 0, + }, + }, + Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DevSpaceName(), + ValidateFunc: validation.StringIsNotEmpty, }, "location": azure.SchemaLocation(), @@ -151,26 +160,28 @@ func resourceArmVirtualDesktopHostPool() *schema.Resource { } } -func resourceArmVirtualDesktopHostPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopHostPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.HostPoolsClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - log.Printf("[INFO] preparing arguments for Virtual Desktop Host Pool creation") + log.Printf("[INFO] preparing arguments for Virtual Desktop Host Pool create/update") name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + resourceId := parse.NewHostPoolID(subscriptionId, resourceGroup, name).ID() if d.IsNewResource() { existing, err := client.Get(ctx, resourceGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Checking for presence of existing Virtual Desktop Host Pool %q (Resource Group %q): %s", name, resourceGroup, err) + return fmt.Errorf("checking for presence of existing Virtual Desktop Host Pool %q (Resource Group %q): %s", name, resourceGroup, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_virtual_desktop_host_pool", *existing.ID) + if existing.HostPoolProperties != nil { + return tf.ImportAsExistsError("azurerm_virtual_desktop_host_pool", resourceId) } } @@ -197,26 +208,17 @@ func resourceArmVirtualDesktopHostPoolCreateUpdate(d *schema.ResourceData, meta return fmt.Errorf("Creating Virtual Desktop Host Pool %q (Resource Group %q): %+v", name, resourceGroup, err) } - result, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Retrieving Virtual Desktop Host Pool %q (Resource Group %q): %+v", name, resourceGroup, err) - } + d.SetId(resourceId) - if result.ID == nil { - return fmt.Errorf("Reading Virtual Desktop Host Pool %q (Resource Group %q) ID", name, resourceGroup) - } - - d.SetId(*result.ID) - - return resourceArmVirtualDesktopHostPoolRead(d, meta) + return resourceVirtualDesktopHostPoolRead(d, meta) } -func resourceArmVirtualDesktopHostPoolRead(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopHostPoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.HostPoolsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.VirtualDesktopHostPoolID(d.Id()) + id, err := parse.HostPoolID(d.Id()) if err != nil { return err } @@ -239,48 +241,40 @@ func resourceArmVirtualDesktopHostPoolRead(d *schema.ResourceData, meta interfac } if props := resp.HostPoolProperties; props != nil { - d.Set("type", string(props.HostPoolType)) - - if fn := props.FriendlyName; fn != nil { - d.Set("friendly_name", fn) - } - - if desc := props.Description; desc != nil { - d.Set("description", desc) - } - - if ve := props.ValidationEnvironment; ve != nil { - d.Set("validate_environment", ve) - } - - if mxsl := props.MaxSessionLimit; mxsl != nil { - d.Set("maximum_sessions_allowed", mxsl) + maxSessionLimit := 0 + if props.MaxSessionLimit != nil { + maxSessionLimit = int(*props.MaxSessionLimit) } + d.Set("description", props.Description) + d.Set("friendly_name", props.FriendlyName) + d.Set("maximum_sessions_allowed", maxSessionLimit) d.Set("load_balancer_type", string(props.LoadBalancerType)) d.Set("personal_desktop_assignment_type", string(props.PersonalDesktopAssignmentType)) d.Set("preferred_app_group_type", string(props.PreferredAppGroupType)) + d.Set("type", string(props.HostPoolType)) + d.Set("validate_environment", props.ValidationEnvironment) if err := d.Set("registration_info", flattenVirtualDesktopHostPoolRegistrationInfo(props.RegistrationInfo)); err != nil { - return fmt.Errorf("Setting `registration_info`: %+v", err) + return fmt.Errorf("setting `registration_info`: %+v", err) } } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmVirtualDesktopHostPoolDelete(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopHostPoolDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.HostPoolsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.VirtualDesktopHostPoolID(d.Id()) + id, err := parse.HostPoolID(d.Id()) if err != nil { return err } if _, err = client.Delete(ctx, id.ResourceGroup, id.Name, utils.Bool(true)); err != nil { - return fmt.Errorf("Deleting Virtual Desktop Host Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting Virtual Desktop Host Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return nil @@ -297,7 +291,7 @@ func expandVirtualDesktopHostPoolRegistrationInfo(d *schema.ResourceData) *deskt if len(oldInterfaces) != 0 && len(newInterfaces) == 0 { deleteConfig := desktopvirtualization.RegistrationInfo{ - RegistrationTokenOperation: (desktopvirtualization.Delete), + RegistrationTokenOperation: desktopvirtualization.Delete, } return &deleteConfig } @@ -308,7 +302,7 @@ func expandVirtualDesktopHostPoolRegistrationInfo(d *schema.ResourceData) *deskt ExpirationTime: &date.Time{ Time: expdt, }, - RegistrationTokenOperation: (desktopvirtualization.Update), + RegistrationTokenOperation: desktopvirtualization.Update, } return &configuration diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource_test.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource_test.go index 5917c0aad485..c67c3aa7384d 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource_test.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_host_pool_resource_test.go @@ -1,172 +1,114 @@ package desktopvirtualization_test import ( + "context" "fmt" - "log" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMVirtualDesktopHostPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_host_pool", "test") +type VirtualDesktopHostPoolResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationHostPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopHostPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationHostPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, +func TestAccVirtualDesktopHostPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_host_pool", "test") + r := VirtualDesktopHostPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func TestAccAzureRMVirtualDesktopHostPool_complete(t *testing.T) { +func TestAccVirtualDesktopHostPool_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_host_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationHostPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopHostPool_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationHostPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, + r := VirtualDesktopHostPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), }, }) } -func TestAccAzureRMVirtualDesktopHostPool_update(t *testing.T) { +func TestAccVirtualDesktopHostPool_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_host_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationHostPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopHostPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationHostPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccAzureRMVirtualDesktopHostPool_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationHostPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - { - Config: testAccAzureRMVirtualDesktopHostPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationHostPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := VirtualDesktopHostPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func TestAccAzureRMVirtualDesktopHostPool_requiresImport(t *testing.T) { +func TestAccVirtualDesktopHostPool_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_host_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationHostPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopHostPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationHostPoolExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMVirtualDesktopHostPool_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_virtual_desktop_host_pool"), - }, + r := VirtualDesktopHostPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_virtual_desktop_host_pool"), }, }) } -func testCheckAzureRMDesktopVirtualizationHostPoolExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.HostPoolsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.VirtualDesktopHostPoolID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err == nil { - return nil - } - - if result.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Virtual Desktop Host Pool %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get virtualDesktopHostPoolClient: %+v", err) +func (VirtualDesktopHostPoolResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.HostPoolID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckAzureRMDesktopVirtualizationHostPoolDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.HostPoolsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_virtual_desktop_host_pool" { - continue - } - log.Printf("[WARN] azurerm_virtual_desktop_host_pool still exists in state file.") - - id, err := parse.VirtualDesktopHostPoolID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err == nil { - return fmt.Errorf("Virtual Desktop Host Pool still exists:\n%#v", result) - } - - if result.StatusCode != http.StatusNotFound { - return err - } + resp, err := clients.DesktopVirtualization.HostPoolsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Virtual Desktop Host Pool %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) } - return nil + return utils.Bool(resp.HostPoolProperties != nil), nil } -func testAccAzureRMVirtualDesktopHostPool_basic(data acceptance.TestData) string { +func (VirtualDesktopHostPoolResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -185,11 +127,10 @@ resource "azurerm_virtual_desktop_host_pool" "test" { validate_environment = true load_balancer_type = "BreadthFirst" } - `, data.RandomInteger, data.Locations.Secondary, data.RandomString) } -func testAccAzureRMVirtualDesktopHostPool_complete(data acceptance.TestData) string { +func (VirtualDesktopHostPoolResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -230,8 +171,7 @@ resource "azurerm_virtual_desktop_host_pool" "test" { `, data.RandomInteger, data.Locations.Secondary, data.RandomString) } -func testAccAzureRMVirtualDesktopHostPool_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMVirtualDesktopHostPool_basic(data) +func (r VirtualDesktopHostPoolResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -240,9 +180,8 @@ resource "azurerm_virtual_desktop_host_pool" "import" { location = azurerm_virtual_desktop_host_pool.test.location resource_group_name = azurerm_virtual_desktop_host_pool.test.resource_group_name validate_environment = azurerm_virtual_desktop_host_pool.test.validate_environment - description = azurerm_virtual_desktop_host_pool.test.description type = azurerm_virtual_desktop_host_pool.test.type load_balancer_type = azurerm_virtual_desktop_host_pool.test.load_balancer_type } -`, template) +`, r.basic(data)) } diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource.go index 0398dcc526c9..d28a19123731 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource.go @@ -6,22 +6,24 @@ import ( "strings" "time" + "github.com/Azure/azure-sdk-for-go/services/preview/desktopvirtualization/mgmt/2019-12-10-preview/desktopvirtualization" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/migration" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/validate" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmVirtualDesktopWorkspaceApplicationGroupAssociation() *schema.Resource { +func resourceVirtualDesktopWorkspaceApplicationGroupAssociation() *schema.Resource { return &schema.Resource{ - Create: resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationCreate, - Read: resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationRead, - Delete: resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationDelete, + Create: resourceVirtualDesktopWorkspaceApplicationGroupAssociationCreate, + Read: resourceVirtualDesktopWorkspaceApplicationGroupAssociationRead, + Delete: resourceVirtualDesktopWorkspaceApplicationGroupAssociationDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(60 * time.Minute), @@ -31,180 +33,180 @@ func resourceArmVirtualDesktopWorkspaceApplicationGroupAssociation() *schema.Res }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DesktopVirtualizationWorkspaceID(id) + _, err := parse.WorkspaceApplicationGroupAssociationID(id) return err }), + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + { + Type: migration.WorkspaceApplicationGroupAssociationUpgradeV0Schema().CoreConfigSchema().ImpliedType(), + Upgrade: migration.WorkspaceApplicationGroupAssociationUpgradeV0ToV1, + Version: 0, + }, + }, + Schema: map[string]*schema.Schema{ "workspace_id": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azure.ValidateResourceID, + ValidateFunc: validate.WorkspaceID, }, "application_group_id": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azure.ValidateResourceID, + ValidateFunc: validate.ApplicationGroupID, }, }, } } -func resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationCreate(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopWorkspaceApplicationGroupAssociationCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.WorkspacesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for Virtual Desktop Workspace <-> Application Group Association creation.") - - workspaceID := d.Get("workspace_id").(string) - applicationGroupReferenceID := d.Get("application_group_id").(string) - - wsID, err := azure.ParseAzureResourceID(workspaceID) + workspaceId, err := parse.WorkspaceID(d.Get("workspace_id").(string)) if err != nil { return err } - - agID, err := azure.ParseAzureResourceID(applicationGroupReferenceID) + applicationGroupId, err := parse.ApplicationGroupID(d.Get("application_group_id").(string)) if err != nil { return err } + associationId := parse.NewWorkspaceApplicationGroupAssociationId(*workspaceId, *applicationGroupId).ID() - workspaceName := wsID.Path["workspaces"] - resourceGroup := wsID.ResourceGroup - agName := agID.Path["applicationgroups"] - - resourceID := fmt.Sprintf("%s|%s", workspaceID, applicationGroupReferenceID) - - locks.ByName(workspaceName, "Microsoft.DesktopVirtualization/workspaces") - defer locks.UnlockByName(workspaceName, "Microsoft.DesktopVirtualization/workspaces") + locks.ByName(workspaceId.Name, workspaceResourceType) + defer locks.UnlockByName(workspaceId.Name, workspaceResourceType) - locks.ByName(agName, "Microsoft.DesktopVirtualization/applicationgroups") - defer locks.UnlockByName(agName, "Microsoft.DesktopVirtualization/applicationgroups") + locks.ByName(applicationGroupId.Name, applicationGroupType) + defer locks.UnlockByName(applicationGroupId.Name, applicationGroupType) - read, err := client.Get(ctx, resourceGroup, workspaceName) + workspace, err := client.Get(ctx, workspaceId.ResourceGroup, workspaceId.Name) if err != nil { - if utils.ResponseWasNotFound(read.Response) { - return fmt.Errorf("Retrieving Virtual Desktop Workspace %q (Resource Group %q) was not found", workspaceName, resourceGroup) + if utils.ResponseWasNotFound(workspace.Response) { + return fmt.Errorf("Virtual Desktop Workspace %q (Resource Group %q) was not found", workspaceId.Name, workspaceId.ResourceGroup) } - return fmt.Errorf("Retrieving Virtual Desktop Workspace for Association %q (Resource Group %q): %+v", workspaceName, resourceGroup, err) + return fmt.Errorf("retrieving Virtual Desktop Workspace for Association %q (Resource Group %q): %+v", workspaceId.Name, workspaceId.ResourceGroup, err) } - refs := read.ApplicationGroupReferences + applicationGroupAssociations := []string{} + if props := workspace.WorkspaceProperties; props != nil && props.ApplicationGroupReferences != nil { + applicationGroupAssociations = *props.ApplicationGroupReferences + } - output := make([]string, 0) - output = append(output, *refs...) - if utils.SliceContainsValue(output, applicationGroupReferenceID) { - return tf.ImportAsExistsError("azurerm_virtual_desktop_workspace_application_group_association", resourceID) + applicationGroupIdStr := applicationGroupId.ID() + if associationExists(workspace.WorkspaceProperties, applicationGroupIdStr) { + return tf.ImportAsExistsError("azurerm_virtual_desktop_workspace_application_group_association", associationId) } - output = append(output, applicationGroupReferenceID) + applicationGroupAssociations = append(applicationGroupAssociations, applicationGroupIdStr) - read.ApplicationGroupReferences = &output + workspace.WorkspaceProperties.ApplicationGroupReferences = &applicationGroupAssociations - if _, err = client.CreateOrUpdate(ctx, resourceGroup, workspaceName, read); err != nil { - return fmt.Errorf("Updating Virtual Desktop Workspace Association for Application Group %q (Resource Group %q): %+v", workspaceName, resourceGroup, err) + if _, err = client.CreateOrUpdate(ctx, workspaceId.ResourceGroup, workspaceId.Name, workspace); err != nil { + return fmt.Errorf("creating association between Virtual Desktop Workspace %q (Resource Group %q) and Application Group %q (Resource Group %q): %+v", workspaceId.Name, workspaceId.ResourceGroup, applicationGroupId.Name, applicationGroupId.ResourceGroup, err) } - d.SetId(resourceID) - - return resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationRead(d, meta) + d.SetId(associationId) + return resourceVirtualDesktopWorkspaceApplicationGroupAssociationRead(d, meta) } -func resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationRead(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopWorkspaceApplicationGroupAssociationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - splitID := strings.Split(d.Id(), "|") - if len(splitID) != 2 { - return fmt.Errorf("Expected ID to be in the format {workspaceID}/{networkSecurityGroup} but got %q", d.Id()) - } - - wsID, err := parse.DesktopVirtualizationWorkspaceID(splitID[0]) + id, err := parse.WorkspaceApplicationGroupAssociationID(d.Id()) if err != nil { return err } - read, err := client.Get(ctx, wsID.ResourceGroup, wsID.Name) + workspace, err := client.Get(ctx, id.Workspace.ResourceGroup, id.Workspace.Name) if err != nil { - if utils.ResponseWasNotFound(read.Response) { - log.Printf("[DEBUG] Virtual Desktop Workspace %q was not found in Resource Group %q - removing from state!", wsID.Name, wsID.ResourceGroup) + if utils.ResponseWasNotFound(workspace.Response) { + log.Printf("[DEBUG] Virtual Desktop Workspace %q was not found in Resource Group %q - removing from state!", id.Workspace.Name, id.Workspace.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Making Read request on Virtual Desktop Desktop Workspace %q (Resource Group %q): %+v", wsID.Name, wsID.ResourceGroup, err) + return fmt.Errorf("retrieving Virtual Desktop Desktop Workspace %q (Resource Group %q): %+v", id.Workspace.Name, id.Workspace.ResourceGroup, err) } - output := make([]string, 0) - output = append(output, *read.ApplicationGroupReferences...) - - if !utils.SliceContainsValue(output, splitID[1]) { - log.Printf("[DEBUG] Association between Virtual Desktop Workspace %q (Resource Group %q) and Virtual Desktop Application Group %q was not found - removing from state!", wsID.Name, wsID.ResourceGroup, splitID[1]) + applicationGroupId := id.ApplicationGroup.ID() + exists := associationExists(workspace.WorkspaceProperties, applicationGroupId) + if !exists { + log.Printf("[DEBUG] Association between Virtual Desktop Workspace %q (Resource Group %q) and Application Group %q (Resource Group %q) was not found - removing from state!", id.Workspace.Name, id.Workspace.ResourceGroup, id.ApplicationGroup.Name, id.ApplicationGroup.ResourceGroup) d.SetId("") return nil } - d.Set("application_group_id", splitID[1]) + d.Set("workspace_id", id.Workspace.ID()) + d.Set("application_group_id", applicationGroupId) return nil } -func resourceArmVirtualDesktopWorkspaceApplicationGroupAssociationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceVirtualDesktopWorkspaceApplicationGroupAssociationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - splitID := strings.Split(d.Id(), "|") - if len(splitID) != 2 { - return fmt.Errorf("Expected ID to be in the format {workspaceID}/{networkSecurityGroup} but got %q", d.Id()) - } - - applicationGroupReferenceID := d.Get("application_group_id").(string) - - wsID, err := parse.DesktopVirtualizationWorkspaceID(splitID[0]) - if err != nil { - return err - } - - agID, err := parse.VirtualDesktopApplicationGroupID(splitID[1]) + id, err := parse.WorkspaceApplicationGroupAssociationID(d.Id()) if err != nil { return err } - locks.ByName(wsID.Name, "Microsoft.DesktopVirtualization/workspaces") - defer locks.UnlockByName(wsID.Name, "Microsoft.DesktopVirtualization/workspaces") + locks.ByName(id.Workspace.Name, workspaceResourceType) + defer locks.UnlockByName(id.Workspace.Name, workspaceResourceType) - locks.ByName(agID.Name, "Microsoft.DesktopVirtualization/applicationgroups") - defer locks.UnlockByName(agID.Name, "Microsoft.DesktopVirtualization/applicationgroups") + locks.ByName(id.ApplicationGroup.Name, applicationGroupType) + defer locks.UnlockByName(id.ApplicationGroup.Name, applicationGroupType) - read, err := client.Get(ctx, wsID.ResourceGroup, wsID.Name) + workspace, err := client.Get(ctx, id.Workspace.ResourceGroup, id.Workspace.Name) if err != nil { - if utils.ResponseWasNotFound(read.Response) { - return fmt.Errorf("Virtual Desktop Workspace %q (Resource Group %q) was not found", wsID.Name, wsID.ResourceGroup) + if utils.ResponseWasNotFound(workspace.Response) { + return fmt.Errorf("Virtual Desktop Workspace %q (Resource Group %q) was not found", id.Workspace.Name, id.Workspace.ResourceGroup) } - return fmt.Errorf("Retrieving Virtual Desktop Workspace %q (Resource Group %q): %+v", wsID.Name, wsID.ResourceGroup, err) + return fmt.Errorf("retrieving Virtual Desktop Workspace %q (Resource Group %q): %+v", id.Workspace.Name, id.Workspace.ResourceGroup, err) } - refs := read.ApplicationGroupReferences - if refs == nil { - return fmt.Errorf("ApplicationGroupReferences was nil for Virtual Desktop Workspace %q (Resource Group %q)", wsID.Name, wsID.ResourceGroup) - } + applicationGroupReferences := []string{} + applicationGroupId := id.ApplicationGroup.ID() + if workspace.WorkspaceProperties != nil && workspace.WorkspaceProperties.ApplicationGroupReferences != nil { + for _, referenceId := range *workspace.WorkspaceProperties.ApplicationGroupReferences { + if strings.EqualFold(referenceId, applicationGroupId) { + continue + } - output := make([]string, 0) - output = append(output, *refs...) - output = utils.RemoveFromStringArray(output, applicationGroupReferenceID) + applicationGroupReferences = append(applicationGroupReferences, referenceId) + } + } - read.ApplicationGroupReferences = &output + workspace.WorkspaceProperties.ApplicationGroupReferences = &applicationGroupReferences - if _, err = client.CreateOrUpdate(ctx, wsID.ResourceGroup, wsID.Name, read); err != nil { - return fmt.Errorf("Updating Virtual Desktop Workspace Association for Application Group %q (Resource Group %q): %+v", wsID.Name, wsID.ResourceGroup, err) + if _, err = client.CreateOrUpdate(ctx, id.Workspace.ResourceGroup, id.Workspace.Name, workspace); err != nil { + return fmt.Errorf("removing association between Virtual Desktop Workspace %q (Resource Group %q) and Application Group %q (Resource Group %q): %+v", id.Workspace.Name, id.Workspace.ResourceGroup, id.ApplicationGroup.Name, id.ApplicationGroup.ResourceGroup, err) } return nil } + +func associationExists(props *desktopvirtualization.WorkspaceProperties, applicationGroupId string) bool { + if props == nil || props.ApplicationGroupReferences == nil { + return false + } + + for _, id := range *props.ApplicationGroupReferences { + if strings.EqualFold(id, applicationGroupId) { + return true + } + } + + return false +} diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource_test.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource_test.go index f8e59bca0427..2636255c0df0 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource_test.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_application_group_association_resource_test.go @@ -1,8 +1,8 @@ package desktopvirtualization_test import ( + "context" "fmt" - "log" "net/http" "strings" "testing" @@ -10,173 +10,113 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace_application_group_association", "test") +type VirtualDesktopWorkspaceApplicationGroupAssociationResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(data.ResourceName), - ), - }, +func TestAccVirtualDesktopWorkspaceApplicationGroupAssociation_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace_application_group_association", "test") + r := VirtualDesktopWorkspaceApplicationGroupAssociationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_complete(t *testing.T) { +func TestAccVirtualDesktopWorkspaceApplicationGroupAssociation_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace_application_group_association", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_complete(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(data.ResourceName), - ), - }, + r := VirtualDesktopWorkspaceApplicationGroupAssociationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_update(t *testing.T) { +func TestAccVirtualDesktopWorkspaceApplicationGroupAssociation_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace_application_group_association", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_complete(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(data.ResourceName), - ), - }, + r := VirtualDesktopWorkspaceApplicationGroupAssociationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, }) } -func TestAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_requiresImport(t *testing.T) { +func TestAccVirtualDesktopWorkspaceApplicationGroupAssociation_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace_application_group_association", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_requiresImport), + r := VirtualDesktopWorkspaceApplicationGroupAssociationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), }, + data.RequiresImportErrorStep(r.requiresImport), }) } -func testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - toBeSplitID := rs.Primary.ID - splitID := strings.Split(toBeSplitID, "|") - if len(splitID) != 2 { - return fmt.Errorf("Expected ID to be in the format {workspaceID}/{applicationGroup} but got %q", toBeSplitID) - } - - id, err := parse.DesktopVirtualizationWorkspaceID(splitID[0]) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err == nil { - return nil - } - - if result.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Virtual Desktop Workspace Application Group Association %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) - } - - if result.ApplicationGroupReferences == nil { - return fmt.Errorf("No Virtual Desktop Workspace <==> Application Group Associations exists for Workspace %q (Resource Group: %q)", id.Name, id.ResourceGroup) - } - - output := make([]string, 0) - output = append(output, *result.ApplicationGroupReferences...) - - if !utils.SliceContainsValue(output, splitID[1]) { - return fmt.Errorf("No Virtual Desktop Workspace <==> Application Group Association exists for Workspace %q and Application Group %q (Resource Group: %q)", id.Name, splitID[1], id.ResourceGroup) - } - - return nil +func (t VirtualDesktopWorkspaceApplicationGroupAssociationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.WorkspaceApplicationGroupAssociationID(state.ID) + if err != nil { + return nil, err } -} - -func testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.ApplicationGroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_virtual_desktop_application_group" { - continue - } - - log.Printf("[WARN] azurerm_virtual_desktop_application_group still exists in state file.") - - id, err := parse.VirtualDesktopApplicationGroupID(rs.Primary.ID) - if err != nil { - return err - } + resp, err := clients.DesktopVirtualization.WorkspacesClient.Get(ctx, id.Workspace.ResourceGroup, id.Workspace.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Virtual Desktop Workspace Application Group Association %q (Resource Group: %q) does not exist", id.Workspace.Name, id.Workspace.ResourceGroup) + } - result, err := client.Get(ctx, id.ResourceGroup, id.Name) + if resp.StatusCode == http.StatusNotFound { + return utils.Bool(false), nil + } - if err == nil { - return fmt.Errorf("Virtual Desktop Host Pool still exists:\n%#v", result) - } + if resp.WorkspaceProperties == nil || resp.ApplicationGroupReferences == nil { + return utils.Bool(false), nil + } - if result.StatusCode != http.StatusNotFound { - return err + for _, app := range *resp.ApplicationGroupReferences { + if strings.EqualFold(app, id.ApplicationGroup.ID()) { + return utils.Bool(true), nil } } - return nil + return utils.Bool(false), nil } -func testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(data acceptance.TestData) string { +func (VirtualDesktopWorkspaceApplicationGroupAssociationResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -220,7 +160,7 @@ resource "azurerm_virtual_desktop_workspace_application_group_association" "test `, data.RandomInteger, data.Locations.Secondary, data.RandomIntOfLength(8), data.RandomIntOfLength(8), data.RandomIntOfLength(8)) } -func testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_complete(data acceptance.TestData) string { +func (VirtualDesktopWorkspaceApplicationGroupAssociationResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -288,8 +228,7 @@ resource "azurerm_virtual_desktop_workspace_application_group_association" "pers `, data.RandomInteger, data.Locations.Secondary, data.RandomIntOfLength(8), data.RandomIntOfLength(8), data.RandomIntOfLength(8), data.RandomIntOfLength(8), data.RandomIntOfLength(8)) } -func testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMVirtualDesktopWorkspaceApplicationGroupAssociation_basic(data) +func (r VirtualDesktopWorkspaceApplicationGroupAssociationResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -297,5 +236,5 @@ resource "azurerm_virtual_desktop_workspace_application_group_association" "impo workspace_id = azurerm_virtual_desktop_workspace_application_group_association.test.workspace_id application_group_id = azurerm_virtual_desktop_workspace_application_group_association.test.application_group_id } -`, template) +`, r.basic(data)) } diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource.go index 21efb536e4f2..07ce30251592 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource.go @@ -10,8 +10,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/migration" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" @@ -19,6 +20,8 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +var workspaceResourceType = "azurerm_virtual_desktop_workspace" + func resourceArmDesktopVirtualizationWorkspace() *schema.Resource { return &schema.Resource{ Create: resourceArmDesktopVirtualizationWorkspaceCreateUpdate, @@ -34,16 +37,25 @@ func resourceArmDesktopVirtualizationWorkspace() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DesktopVirtualizationWorkspaceID(id) + _, err := parse.WorkspaceID(id) return err }), + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + { + Type: migration.WorkspaceUpgradeV0Schema().CoreConfigSchema().ImpliedType(), + Upgrade: migration.WorkspaceUpgradeV0ToV1, + Version: 0, + }, + }, + Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.DevSpaceName(), + ValidateFunc: validation.StringIsNotEmpty, // TODO: determine more accurate requirements in time }, "location": azure.SchemaLocation(), @@ -69,24 +81,26 @@ func resourceArmDesktopVirtualizationWorkspace() *schema.Resource { func resourceArmDesktopVirtualizationWorkspaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - log.Printf("[INFO] preparing arguments for Virtual Desktop Workspace creation") + log.Printf("[INFO] preparing arguments for Virtual Desktop Workspace create/update") name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + resourceId := parse.NewWorkspaceID(subscriptionId, resourceGroup, name).ID() if d.IsNewResource() { existing, err := client.Get(ctx, resourceGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Virtual Desktop Workspace %q (Resource Group %q): %s", name, resourceGroup, err) + return fmt.Errorf("checking for presence of existing Virtual Desktop Workspace %q (Resource Group %q): %s", name, resourceGroup, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_virtual_desktop_workspace", *existing.ID) + if existing.WorkspaceProperties != nil { + return tf.ImportAsExistsError("azurerm_virtual_desktop_workspace", resourceId) } } @@ -103,19 +117,10 @@ func resourceArmDesktopVirtualizationWorkspaceCreateUpdate(d *schema.ResourceDat } if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, context); err != nil { - return fmt.Errorf("Error creating Desktop Virtualization Workspace %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - result, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error retrieving Desktop Virtualization Workspace %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if result.ID == nil { - return fmt.Errorf("Cannot read Desktop Virtualization Workspace %q (Resource Group %q) ID", name, resourceGroup) + return fmt.Errorf("creating Virtual Desktop Workspace %q (Resource Group %q): %+v", name, resourceGroup, err) } - d.SetId(*result.ID) + d.SetId(resourceId) return resourceArmDesktopVirtualizationWorkspaceRead(d, meta) } @@ -125,7 +130,7 @@ func resourceArmDesktopVirtualizationWorkspaceRead(d *schema.ResourceData, meta ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DesktopVirtualizationWorkspaceID(d.Id()) + id, err := parse.WorkspaceID(d.Id()) if err != nil { return err } @@ -133,12 +138,12 @@ func resourceArmDesktopVirtualizationWorkspaceRead(d *schema.ResourceData, meta resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Desktop Virtualization Workspace %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) + log.Printf("[DEBUG] Virtual Desktop Workspace %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Desktop Virtualization Workspace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Virtual Desktop Workspace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } d.Set("name", id.Name) @@ -149,13 +154,8 @@ func resourceArmDesktopVirtualizationWorkspaceRead(d *schema.ResourceData, meta } if props := resp.WorkspaceProperties; props != nil { - if desc := props.Description; desc != nil { - d.Set("description", desc) - } - - if fn := props.FriendlyName; fn != nil { - d.Set("friendly_name", fn) - } + d.Set("description", props.Description) + d.Set("friendly_name", props.FriendlyName) } return tags.FlattenAndSet(d, resp.Tags) @@ -163,16 +163,19 @@ func resourceArmDesktopVirtualizationWorkspaceRead(d *schema.ResourceData, meta func resourceArmDesktopVirtualizationWorkspaceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - id, err := parse.DesktopVirtualizationWorkspaceID(d.Id()) + id, err := parse.WorkspaceID(d.Id()) if err != nil { return err } + locks.ByName(id.Name, workspaceResourceType) + defer locks.UnlockByName(id.Name, workspaceResourceType) + + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() if _, err = client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { - return fmt.Errorf("Error deleting Desktop Virtualization Workspace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting Desktop Virtualization Workspace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource_test.go b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource_test.go index d06b5f734a0d..1b8dd2be1c6e 100644 --- a/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource_test.go +++ b/azurerm/internal/services/desktopvirtualization/virtual_desktop_workspace_resource_test.go @@ -1,172 +1,114 @@ package desktopvirtualization_test import ( + "context" "fmt" - "log" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/desktopvirtualization/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type AzureRMDesktopVirtualizationWorkspaceResource struct { +} + func TestAccAzureRMDesktopVirtualizationWorkspace_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := AzureRMDesktopVirtualizationWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } func TestAccAzureRMDesktopVirtualizationWorkspace_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := AzureRMDesktopVirtualizationWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } func TestAccAzureRMDesktopVirtualizationWorkspace_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationWorkspaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := AzureRMDesktopVirtualizationWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } func TestAccAzureRMDesktopVirtualizationWorkspace_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_desktop_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDesktopVirtualizationWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDesktopVirtualizationWorkspaceExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDesktopVirtualizationWorkspace_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_virtual_desktop_workspace"), - }, + r := AzureRMDesktopVirtualizationWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_virtual_desktop_workspace"), }, }) } -func testCheckAzureRMDesktopVirtualizationWorkspaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DesktopVirtualizationWorkspaceID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err == nil { - return nil - } - - if result.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Virtual Desktop Workspace %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get virtualDesktopWorspaceClient: %+v", err) +func (t AzureRMDesktopVirtualizationWorkspaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.WorkspaceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckAzureRMDesktopVirtualizationWorkspaceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DesktopVirtualization.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_virtual_desktop_workspace" { - continue - } - - log.Printf("[WARN] azurerm_virtual_desktop_workspace still exists in state file.") - - id, err := parse.DesktopVirtualizationWorkspaceID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err == nil { - return fmt.Errorf("Virtual Desktop Workspace still exists:\n%#v", result) - } - if result.StatusCode != http.StatusNotFound { - return err - } + resp, err := clients.DesktopVirtualization.WorkspacesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Virtual Desktop Workspace %q (Resource Group: %q): %v", id.Name, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.WorkspaceProperties != nil), nil } -func testAccAzureRMDesktopVirtualizationWorkspace_basic(data acceptance.TestData) string { +func (AzureRMDesktopVirtualizationWorkspaceResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -186,7 +128,7 @@ resource "azurerm_virtual_desktop_workspace" "test" { `, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) } -func testAccAzureRMDesktopVirtualizationWorkspace_complete(data acceptance.TestData) string { +func (AzureRMDesktopVirtualizationWorkspaceResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -208,8 +150,7 @@ resource "azurerm_virtual_desktop_workspace" "test" { `, data.RandomInteger, data.Locations.Secondary, data.RandomIntOfLength(8), data.RandomInteger) } -func testAccAzureRMDesktopVirtualizationWorkspace_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDesktopVirtualizationWorkspace_basic(data) +func (r AzureRMDesktopVirtualizationWorkspaceResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -218,5 +159,5 @@ resource "azurerm_virtual_desktop_workspace" "import" { location = azurerm_virtual_desktop_workspace.test.location resource_group_name = azurerm_virtual_desktop_workspace.test.resource_group_name } -`, template) +`, r.basic(data)) } diff --git a/azurerm/internal/services/devspace/devspace_controller_resource.go b/azurerm/internal/services/devspace/devspace_controller_resource.go index ee43a59990e0..fc16822f32fc 100644 --- a/azurerm/internal/services/devspace/devspace_controller_resource.go +++ b/azurerm/internal/services/devspace/devspace_controller_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDevSpaceController() *schema.Resource { +func resourceDevSpaceController() *schema.Resource { return &schema.Resource{ - Create: resourceArmDevSpaceControllerCreate, - Read: resourceArmDevSpaceControllerRead, - Update: resourceArmDevSpaceControllerUpdate, - Delete: resourceArmDevSpaceControllerDelete, + Create: resourceDevSpaceControllerCreate, + Read: resourceDevSpaceControllerRead, + Update: resourceDevSpaceControllerUpdate, + Delete: resourceDevSpaceControllerDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,10 +34,14 @@ func resourceArmDevSpaceController() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DevSpaceControllerID(id) + _, err := parse.ControllerID(id) return err }), + DeprecationMessage: `DevSpace Controllers are deprecated and will be retired on 31 October 2023 - at this time the Azure API does not allow new Controllers to be provisioned, but existing DevSpace Controllers can continue to be used. + +Since these are deprecated and can no longer be provisioned, version 3.0 of the Azure Provider will remove support for DevSpace Controllers.`, + Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, @@ -89,7 +93,7 @@ func resourceArmDevSpaceController() *schema.Resource { } } -func resourceArmDevSpaceControllerCreate(d *schema.ResourceData, meta interface{}) error { +func resourceDevSpaceControllerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevSpace.ControllersClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -149,17 +153,17 @@ func resourceArmDevSpaceControllerCreate(d *schema.ResourceData, meta interface{ } d.SetId(*result.ID) - return resourceArmDevSpaceControllerRead(d, meta) + return resourceDevSpaceControllerRead(d, meta) } -func resourceArmDevSpaceControllerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDevSpaceControllerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevSpace.ControllersClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for DevSpace Controller updating") - id, err := parse.DevSpaceControllerID(d.Id()) + id, err := parse.ControllerID(d.Id()) if err != nil { return err } @@ -177,15 +181,15 @@ func resourceArmDevSpaceControllerUpdate(d *schema.ResourceData, meta interface{ } d.SetId(*result.ID) - return resourceArmDevSpaceControllerRead(d, meta) + return resourceDevSpaceControllerRead(d, meta) } -func resourceArmDevSpaceControllerRead(d *schema.ResourceData, meta interface{}) error { +func resourceDevSpaceControllerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevSpace.ControllersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DevSpaceControllerID(d.Id()) + id, err := parse.ControllerID(d.Id()) if err != nil { return err } @@ -220,12 +224,12 @@ func resourceArmDevSpaceControllerRead(d *schema.ResourceData, meta interface{}) return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDevSpaceControllerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDevSpaceControllerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevSpace.ControllersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DevSpaceControllerID(d.Id()) + id, err := parse.ControllerID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/devspace/devspace_controller_resource_test.go b/azurerm/internal/services/devspace/devspace_controller_resource_test.go new file mode 100644 index 000000000000..3513bd2629d7 --- /dev/null +++ b/azurerm/internal/services/devspace/devspace_controller_resource_test.go @@ -0,0 +1,142 @@ +package devspace_test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +// temporarily works around the unused test, since the tests are skipped +var _ interface{} = DevSpaceControllerResource{} +var _ interface{} = DevSpaceControllerResource{}.basic(acceptance.TestData{}, "", "") +var _ interface{} = DevSpaceControllerResource{}.requiresImport(acceptance.TestData{}, "", "") + +type DevSpaceControllerResource struct { +} + +func TestAccDevSpaceController_basic(t *testing.T) { + t.Skip("A breaking API change has means new DevSpace Controllers cannot be provisioned, so skipping..") + + data := acceptance.BuildTestData(t, "azurerm_devspace_controller", "test") + r := DevSpaceControllerResource{} + clientId := os.Getenv("ARM_CLIENT_ID") + clientSecret := os.Getenv("ARM_CLIENT_SECRET") + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, clientId, clientSecret), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func TestAccDevSpaceController_requiresImport(t *testing.T) { + t.Skip("A breaking API change has means new DevSpace Controllers cannot be provisioned, so skipping..") + + data := acceptance.BuildTestData(t, "azurerm_devspace_controller", "test") + r := DevSpaceControllerResource{} + clientId := os.Getenv("ARM_CLIENT_ID") + clientSecret := os.Getenv("ARM_CLIENT_SECRET") + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, clientId, clientSecret), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data, clientId, clientSecret), + ExpectError: acceptance.RequiresImportError("azurerm_devspace_controller"), + }, + }) +} + +func (t DevSpaceControllerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ControllerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DevSpace.ControllersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving DevSpace Controller %q (Resource Group: %q): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ControllerProperties != nil), nil +} + +func (DevSpaceControllerResource) basic(data acceptance.TestData, clientId string, clientSecret string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-devspace-%d" + location = "%s" +} + +resource "azurerm_kubernetes_cluster" "test" { + name = "acctestaks%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + dns_prefix = "acctestaks1" + + linux_profile { + admin_username = "acctestuser1" + + ssh_key { + key_data = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" + } + } + + default_node_pool { + name = "default" + node_count = 1 + vm_size = "Standard_DS2_v2" + } + + service_principal { + client_id = "%s" + client_secret = "%s" + } +} + +resource "azurerm_devspace_controller" "test" { + name = "acctestdsc%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + target_container_host_resource_id = azurerm_kubernetes_cluster.test.id + target_container_host_credentials_base64 = base64encode(azurerm_kubernetes_cluster.test.kube_config_raw) + sku_name = "S1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, clientId, clientSecret, data.RandomInteger) +} + +func (r DevSpaceControllerResource) requiresImport(data acceptance.TestData, clientId string, clientSecret string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_devspace_controller" "import" { + name = azurerm_devspace_controller.test.name + location = azurerm_devspace_controller.test.location + resource_group_name = azurerm_devspace_controller.test.resource_group_name + target_container_host_resource_id = azurerm_devspace_controller.test.target_container_host_resource_id + target_container_host_credentials_base64 = base64encode(azurerm_kubernetes_cluster.test.kube_config_raw) + sku_name = azurerm_devspace_controller.test.sku_name +} +`, r.basic(data, clientId, clientSecret)) +} diff --git a/azurerm/internal/services/devspace/parse/controller.go b/azurerm/internal/services/devspace/parse/controller.go new file mode 100644 index 000000000000..b8dd4105e954 --- /dev/null +++ b/azurerm/internal/services/devspace/parse/controller.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ControllerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewControllerID(subscriptionId, resourceGroup, name string) ControllerId { + return ControllerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ControllerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Controller", segmentsStr) +} + +func (id ControllerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DevSpaces/controllers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ControllerID parses a Controller ID into an ControllerId struct +func ControllerID(input string) (*ControllerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ControllerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("controllers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/devspace/parse/controller_test.go b/azurerm/internal/services/devspace/parse/controller_test.go new file mode 100644 index 000000000000..90a9ad4a4504 --- /dev/null +++ b/azurerm/internal/services/devspace/parse/controller_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ControllerId{} + +func TestControllerIDFormatter(t *testing.T) { + actual := NewControllerID("12345678-1234-9876-4563-123456789012", "group1", "controller1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/controllers/controller1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestControllerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ControllerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/controllers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/controllers/controller1", + Expected: &ControllerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "controller1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DEVSPACES/CONTROLLERS/CONTROLLER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ControllerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/devspace/parse/devspace_controller.go b/azurerm/internal/services/devspace/parse/devspace_controller.go deleted file mode 100644 index 24987d487181..000000000000 --- a/azurerm/internal/services/devspace/parse/devspace_controller.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DevSpaceControllerId struct { - ResourceGroup string - Name string -} - -func DevSpaceControllerID(input string) (*DevSpaceControllerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DevSpace Controller ID %q: %+v", input, err) - } - - controller := DevSpaceControllerId{ - ResourceGroup: id.ResourceGroup, - } - - if controller.Name, err = id.PopSegment("controllers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &controller, nil -} diff --git a/azurerm/internal/services/devspace/parse/devspace_controller_test.go b/azurerm/internal/services/devspace/parse/devspace_controller_test.go deleted file mode 100644 index ffa14cbf739a..000000000000 --- a/azurerm/internal/services/devspace/parse/devspace_controller_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDevSpaceControllerId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DevSpaceControllerId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Controllers Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DevSpaces/controllers/", - Expected: nil, - }, - { - Name: "DevSpace Controller ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DevSpaces/controllers/Controller1", - Expected: &DevSpaceControllerId{ - Name: "Controller1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DevSpaces/Controllers/Controller1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DevSpaceControllerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/devspace/registration.go b/azurerm/internal/services/devspace/registration.go index 195ae43e4308..884282caec8f 100644 --- a/azurerm/internal/services/devspace/registration.go +++ b/azurerm/internal/services/devspace/registration.go @@ -2,6 +2,7 @@ package devspace import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" ) // TODO: this can be moved into Container @@ -27,7 +28,12 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { + if features.ThreePointOh() { + return map[string]*schema.Resource{} + } + + // TODO: remove this entire package in 3.0 return map[string]*schema.Resource{ - "azurerm_devspace_controller": resourceArmDevSpaceController(), + "azurerm_devspace_controller": resourceDevSpaceController(), } } diff --git a/azurerm/internal/services/devspace/resourceids.go b/azurerm/internal/services/devspace/resourceids.go new file mode 100644 index 000000000000..52721f7fe736 --- /dev/null +++ b/azurerm/internal/services/devspace/resourceids.go @@ -0,0 +1,3 @@ +package devspace + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Controller -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/controllers/controller1 diff --git a/azurerm/internal/services/devspace/tests/devspace_controller_resource_test.go b/azurerm/internal/services/devspace/tests/devspace_controller_resource_test.go deleted file mode 100644 index 417f4becaaa1..000000000000 --- a/azurerm/internal/services/devspace/tests/devspace_controller_resource_test.go +++ /dev/null @@ -1,184 +0,0 @@ -package tests - -import ( - "fmt" - "log" - "net/http" - "os" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace/parse" -) - -func TestAccAzureRMDevSpaceController_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_devspace_controller", "test") - clientId := os.Getenv("ARM_CLIENT_ID") - clientSecret := os.Getenv("ARM_CLIENT_SECRET") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDevSpaceControllerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDevSpaceController_basic(data, clientId, clientSecret), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDevSpaceControllerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func TestAccAzureRMDevSpaceController_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_devspace_controller", "test") - clientId := os.Getenv("ARM_CLIENT_ID") - clientSecret := os.Getenv("ARM_CLIENT_SECRET") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDevSpaceControllerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDevSpaceController_basic(data, clientId, clientSecret), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDevSpaceControllerExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDevSpaceController_requiresImport(data, clientId, clientSecret), - ExpectError: acceptance.RequiresImportError("azurerm_devspace_controller"), - }, - }, - }) -} - -func testCheckAzureRMDevSpaceControllerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DevSpace.ControllersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DevSpaceControllerID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err == nil { - return nil - } - - if result.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DevSpace Controller %q (Resource Group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get devSpaceControllerClient: %+v", err) - } -} - -func testCheckAzureRMDevSpaceControllerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DevSpace.ControllersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_devspace_controller" { - continue - } - - log.Printf("[WARN] azurerm_devspace_controller still exists in state file.") - - id, err := parse.DevSpaceControllerID(rs.Primary.ID) - if err != nil { - return err - } - - result, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err == nil { - return fmt.Errorf("DevSpace Controller still exists:\n%#v", result) - } - - if result.StatusCode != http.StatusNotFound { - return err - } - } - - return nil -} - -func testAccAzureRMDevSpaceController_basic(data acceptance.TestData, clientId string, clientSecret string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kubernetes_cluster" "test" { - name = "acctestaks%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - dns_prefix = "acctestaks1" - - linux_profile { - admin_username = "acctestuser1" - - ssh_key { - key_data = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" - } - } - - default_node_pool { - name = "default" - node_count = 1 - vm_size = "Standard_DS2_v2" - } - - service_principal { - client_id = "%s" - client_secret = "%s" - } -} - -resource "azurerm_devspace_controller" "test" { - name = "acctestdsc%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - target_container_host_resource_id = azurerm_kubernetes_cluster.test.id - target_container_host_credentials_base64 = base64encode(azurerm_kubernetes_cluster.test.kube_config_raw) - sku_name = "S1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, clientId, clientSecret, data.RandomInteger) -} - -func testAccAzureRMDevSpaceController_requiresImport(data acceptance.TestData, clientId string, clientSecret string) string { - template := testAccAzureRMDevSpaceController_basic(data, clientId, clientSecret) - return fmt.Sprintf(` -%s - -resource "azurerm_devspace_controller" "import" { - name = azurerm_devspace_controller.test.name - location = azurerm_devspace_controller.test.location - resource_group_name = azurerm_devspace_controller.test.resource_group_name - target_container_host_resource_id = azurerm_devspace_controller.test.target_container_host_resource_id - target_container_host_credentials_base64 = base64encode(azurerm_kubernetes_cluster.test.kube_config_raw) - sku_name = azurerm_devspace_controller.test.sku_name -} -`, template) -} diff --git a/azurerm/internal/services/devspace/validate/controller_id.go b/azurerm/internal/services/devspace/validate/controller_id.go new file mode 100644 index 000000000000..32fe97a2ab00 --- /dev/null +++ b/azurerm/internal/services/devspace/validate/controller_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace/parse" +) + +func ControllerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ControllerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/devspace/validate/controller_id_test.go b/azurerm/internal/services/devspace/validate/controller_id_test.go new file mode 100644 index 000000000000..28320768ed67 --- /dev/null +++ b/azurerm/internal/services/devspace/validate/controller_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestControllerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/controllers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevSpaces/controllers/controller1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DEVSPACES/CONTROLLERS/CONTROLLER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ControllerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource.go b/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource.go index e237a5168629..2e4546c844a5 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource.go +++ b/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource.go @@ -12,22 +12,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + computeParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" + computeValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/parse" - devtestValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDevTestLabGlobalVMShutdownSchedule() *schema.Resource { +func resourceDevTestGlobalVMShutdownSchedule() *schema.Resource { return &schema.Resource{ - Create: resourceArmDevTestLabGlobalVMShutdownScheduleCreateUpdate, - Read: resourceArmDevTestLabGlobalVMShutdownScheduleRead, - Update: resourceArmDevTestLabGlobalVMShutdownScheduleCreateUpdate, - Delete: resourceArmDevTestLabGlobalVMShutdownScheduleDelete, + Create: resourceDevTestGlobalVMShutdownScheduleCreateUpdate, + Read: resourceDevTestGlobalVMShutdownScheduleRead, + Update: resourceDevTestGlobalVMShutdownScheduleCreateUpdate, + Delete: resourceDevTestGlobalVMShutdownScheduleDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.GlobalScheduleID(id) + _, err := parse.ScheduleID(id) return err }), @@ -45,7 +46,7 @@ func resourceArmDevTestLabGlobalVMShutdownSchedule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: devtestValidate.GlobalScheduleVirtualMachineID, + ValidateFunc: computeValidate.VirtualMachineID, }, "enabled": { @@ -98,13 +99,13 @@ func resourceArmDevTestLabGlobalVMShutdownSchedule() *schema.Resource { } } -func resourceArmDevTestLabGlobalVMShutdownScheduleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestGlobalVMShutdownScheduleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.GlobalLabSchedulesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() vmID := d.Get("virtual_machine_id").(string) - id, err := parse.GlobalScheduleVirtualMachineID(vmID) + id, err := computeParse.VirtualMachineID(vmID) if err != nil { return err } @@ -149,12 +150,12 @@ func resourceArmDevTestLabGlobalVMShutdownScheduleCreateUpdate(d *schema.Resourc } if v, ok := d.GetOk("daily_recurrence_time"); ok { - dailyRecurrence := expandArmDevTestLabGlobalVMShutdownScheduleRecurrenceDaily(v) + dailyRecurrence := expandDevTestGlobalVMShutdownScheduleRecurrenceDaily(v) schedule.DailyRecurrence = dailyRecurrence } if _, ok := d.GetOk("notification_settings"); ok { - notificationSettings := expandArmDevTestLabGlobalVMShutdownScheduleNotificationSettings(d) + notificationSettings := expandDevTestGlobalVMShutdownScheduleNotificationSettings(d) schedule.NotificationSettings = notificationSettings } @@ -173,21 +174,20 @@ func resourceArmDevTestLabGlobalVMShutdownScheduleCreateUpdate(d *schema.Resourc d.SetId(*read.ID) - return resourceArmDevTestLabGlobalVMShutdownScheduleRead(d, meta) + return resourceDevTestGlobalVMShutdownScheduleRead(d, meta) } -func resourceArmDevTestLabGlobalVMShutdownScheduleRead(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestGlobalVMShutdownScheduleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.GlobalLabSchedulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.GlobalScheduleID(d.Id()) + id, err := parse.ScheduleID(d.Id()) if err != nil { return err } resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") @@ -205,11 +205,11 @@ func resourceArmDevTestLabGlobalVMShutdownScheduleRead(d *schema.ResourceData, m d.Set("timezone", props.TimeZoneID) d.Set("enabled", props.Status == dtl.EnableStatusEnabled) - if err := d.Set("daily_recurrence_time", flattenArmDevTestLabGlobalVMShutdownScheduleRecurrenceDaily(props.DailyRecurrence)); err != nil { + if err := d.Set("daily_recurrence_time", flattenDevTestGlobalVMShutdownScheduleRecurrenceDaily(props.DailyRecurrence)); err != nil { return fmt.Errorf("Error setting `dailyRecurrence`: %#v", err) } - if err := d.Set("notification_settings", flattenArmDevTestLabGlobalVMShutdownScheduleNotificationSettings(props.NotificationSettings)); err != nil { + if err := d.Set("notification_settings", flattenDevTestGlobalVMShutdownScheduleNotificationSettings(props.NotificationSettings)); err != nil { return fmt.Errorf("Error setting `notificationSettings`: %#v", err) } } @@ -217,12 +217,12 @@ func resourceArmDevTestLabGlobalVMShutdownScheduleRead(d *schema.ResourceData, m return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDevTestLabGlobalVMShutdownScheduleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestGlobalVMShutdownScheduleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.GlobalLabSchedulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.GlobalScheduleID(d.Id()) + id, err := parse.ScheduleID(d.Id()) if err != nil { return err } @@ -234,14 +234,14 @@ func resourceArmDevTestLabGlobalVMShutdownScheduleDelete(d *schema.ResourceData, return nil } -func expandArmDevTestLabGlobalVMShutdownScheduleRecurrenceDaily(dailyTime interface{}) *dtl.DayDetails { +func expandDevTestGlobalVMShutdownScheduleRecurrenceDaily(dailyTime interface{}) *dtl.DayDetails { time := dailyTime.(string) return &dtl.DayDetails{ Time: &time, } } -func flattenArmDevTestLabGlobalVMShutdownScheduleRecurrenceDaily(dailyRecurrence *dtl.DayDetails) interface{} { +func flattenDevTestGlobalVMShutdownScheduleRecurrenceDaily(dailyRecurrence *dtl.DayDetails) interface{} { if dailyRecurrence == nil { return nil } @@ -254,7 +254,7 @@ func flattenArmDevTestLabGlobalVMShutdownScheduleRecurrenceDaily(dailyRecurrence return result } -func expandArmDevTestLabGlobalVMShutdownScheduleNotificationSettings(d *schema.ResourceData) *dtl.NotificationSettings { +func expandDevTestGlobalVMShutdownScheduleNotificationSettings(d *schema.ResourceData) *dtl.NotificationSettings { notificationSettingsConfigs := d.Get("notification_settings").([]interface{}) notificationSettingsConfig := notificationSettingsConfigs[0].(map[string]interface{}) webhookUrl := notificationSettingsConfig["webhook_url"].(string) @@ -274,7 +274,7 @@ func expandArmDevTestLabGlobalVMShutdownScheduleNotificationSettings(d *schema.R } } -func flattenArmDevTestLabGlobalVMShutdownScheduleNotificationSettings(notificationSettings *dtl.NotificationSettings) []interface{} { +func flattenDevTestGlobalVMShutdownScheduleNotificationSettings(notificationSettings *dtl.NotificationSettings) []interface{} { if notificationSettings == nil { return []interface{}{} } diff --git a/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource_test.go index 50c623609568..34ce39f17d7f 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_global_vm_shutdown_schedule_resource_test.go @@ -1,181 +1,124 @@ package devtestlabs_test import ( + "context" "fmt" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func TestAccDevTestLabGlobalVMShutdownSchedule_autoShutdownBasic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dev_test_global_vm_shutdown_schedule", "test") +type DevTestGlobalVMShutdownScheduleResource struct { +} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabGlobalVMShutdownScheduleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLabGlobalVMShutdownSchedule_autoShutdownBasic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabGlobalVMShutdownScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "timezone", "Pacific Standard Time"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.time_in_minutes", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.webhook_url", ""), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence_time", "0100"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - ), - }, - data.ImportStep(), +func TestAccDevTestGlobalVMShutdownSchedule_autoShutdownBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dev_test_global_vm_shutdown_schedule", "test") + r := DevTestGlobalVMShutdownScheduleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoShutdownBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enabled").HasValue("true"), + check.That(data.ResourceName).Key("timezone").HasValue("Pacific Standard Time"), + check.That(data.ResourceName).Key("notification_settings.#").HasValue("1"), + check.That(data.ResourceName).Key("notification_settings.0.enabled").HasValue("false"), + check.That(data.ResourceName).Key("notification_settings.0.time_in_minutes").HasValue("30"), + check.That(data.ResourceName).Key("notification_settings.0.webhook_url").HasValue(""), + check.That(data.ResourceName).Key("daily_recurrence_time").HasValue("0100"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Production"), + ), }, + data.ImportStep(), }) } -func TestAccDevTestLabGlobalVMShutdownSchedule_autoShutdownComplete(t *testing.T) { +func TestAccDevTestGlobalVMShutdownSchedule_autoShutdownComplete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_global_vm_shutdown_schedule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabGlobalVMShutdownScheduleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLabGlobalVMShutdownSchedule_autoShutdownComplete(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabGlobalVMShutdownScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "timezone", "Central Standard Time"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.time_in_minutes", "15"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.webhook_url", "https://www.bing.com/2/4"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence_time", "1100"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Environment", "Production"), - ), - }, - data.ImportStep(), + r := DevTestGlobalVMShutdownScheduleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoShutdownComplete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enabled").HasValue("false"), + check.That(data.ResourceName).Key("timezone").HasValue("Central Standard Time"), + check.That(data.ResourceName).Key("notification_settings.#").HasValue("1"), + check.That(data.ResourceName).Key("notification_settings.0.enabled").HasValue("true"), + check.That(data.ResourceName).Key("notification_settings.0.time_in_minutes").HasValue("15"), + check.That(data.ResourceName).Key("notification_settings.0.webhook_url").HasValue("https://www.bing.com/2/4"), + check.That(data.ResourceName).Key("daily_recurrence_time").HasValue("1100"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Environment").HasValue("Production"), + ), }, + data.ImportStep(), }) } -func TestAccDevTestLabGlobalVMShutdownSchedule_autoShutdownUpdate(t *testing.T) { +func TestAccDevTestGlobalVMShutdownSchedule_autoShutdownUpdate(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_global_vm_shutdown_schedule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabGlobalVMShutdownScheduleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLabGlobalVMShutdownSchedule_autoShutdownBasic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabGlobalVMShutdownScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "timezone", "Pacific Standard Time"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.time_in_minutes", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.webhook_url", ""), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence_time", "0100"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - ), - }, - data.ImportStep(), - { - Config: testAccDevTestLabGlobalVMShutdownSchedule_autoShutdownComplete(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabGlobalVMShutdownScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "timezone", "Central Standard Time"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.time_in_minutes", "15"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.webhook_url", "https://www.bing.com/2/4"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence_time", "1100"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Environment", "Production"), - ), - }, + r := DevTestGlobalVMShutdownScheduleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoShutdownBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enabled").HasValue("true"), + check.That(data.ResourceName).Key("timezone").HasValue("Pacific Standard Time"), + check.That(data.ResourceName).Key("notification_settings.#").HasValue("1"), + check.That(data.ResourceName).Key("notification_settings.0.enabled").HasValue("false"), + check.That(data.ResourceName).Key("notification_settings.0.time_in_minutes").HasValue("30"), + check.That(data.ResourceName).Key("notification_settings.0.webhook_url").HasValue(""), + check.That(data.ResourceName).Key("daily_recurrence_time").HasValue("0100"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Production"), + ), + }, + data.ImportStep(), + { + Config: r.autoShutdownComplete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enabled").HasValue("false"), + check.That(data.ResourceName).Key("timezone").HasValue("Central Standard Time"), + check.That(data.ResourceName).Key("notification_settings.#").HasValue("1"), + check.That(data.ResourceName).Key("notification_settings.0.enabled").HasValue("true"), + check.That(data.ResourceName).Key("notification_settings.0.time_in_minutes").HasValue("15"), + check.That(data.ResourceName).Key("notification_settings.0.webhook_url").HasValue("https://www.bing.com/2/4"), + check.That(data.ResourceName).Key("daily_recurrence_time").HasValue("1100"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Environment").HasValue("Production"), + ), }, }) } -func testCheckDevTestLabGlobalVMShutdownScheduleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - targetResourceID := rs.Primary.Attributes["virtual_machine_id"] - exists, err := testCheckDevTestLabGlobalVMShutdownScheduleExistsInternal(targetResourceID) - - if err != nil { - return fmt.Errorf("Error checking if item has been created: %s", err) - } - if !exists { - return fmt.Errorf("Bad: Dev Test Lab Global Schedule %q does not exist", targetResourceID) - } - - return nil - } -} - -func testCheckDevTestLabGlobalVMShutdownScheduleDestroy(s *terraform.State) error { - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_global_vm_shutdown_schedule" { - continue - } - - targetResourceID := rs.Primary.Attributes["virtual_machine_id"] - exists, err := testCheckDevTestLabGlobalVMShutdownScheduleExistsInternal(targetResourceID) - - if err != nil { - return fmt.Errorf("Error checking if item has been destroyed: %s", err) - } - if exists { - return fmt.Errorf("Bad: Dev Test Lab Global Schedule %q still exists", targetResourceID) - } - } - - return nil -} - -func testCheckDevTestLabGlobalVMShutdownScheduleExistsInternal(vmID string) (bool, error) { - client := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.GlobalLabSchedulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - vm, err := parse.GlobalScheduleVirtualMachineID(vmID) +func (DevTestGlobalVMShutdownScheduleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ScheduleID(state.ID) if err != nil { - return false, fmt.Errorf("Bad: Failed to parse ID (id: %s): %+v", vmID, err) + return nil, err } - vmName := vm.Name - name := "shutdown-computevm-" + vmName // Auto-shutdown schedule must use this naming format for Compute VMs - resourceGroup := vm.ResourceGroup - - resp, err := client.Get(ctx, resourceGroup, name, "") + resp, err := clients.DevTestLabs.GlobalLabSchedulesClient.Get(ctx, id.ResourceGroup, id.Name, "") if err != nil { - if resp.Response.IsHTTPStatus(404) { - return false, nil - } - return false, fmt.Errorf("Bad: Get on devTestLabsGlobalSchedules client (id: %s): %+v", vmID, err) + return nil, fmt.Errorf("retrieving Dev Test Lab Global Schedule %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) } - return true, nil + return utils.Bool(resp.ScheduleProperties != nil), nil } -func testAccDevTestLabGlobalVMShutdownSchedule_template(data acceptance.TestData) string { +func (DevTestGlobalVMShutdownScheduleResource) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -239,8 +182,7 @@ resource "azurerm_linux_virtual_machine" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } -func testAccDevTestLabGlobalVMShutdownSchedule_autoShutdownBasic(data acceptance.TestData) string { - template := testAccDevTestLabGlobalVMShutdownSchedule_template(data) +func (r DevTestGlobalVMShutdownScheduleResource) autoShutdownBasic(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -258,11 +200,10 @@ resource "azurerm_dev_test_global_vm_shutdown_schedule" "test" { environment = "Production" } } -`, template) +`, r.template(data)) } -func testAccDevTestLabGlobalVMShutdownSchedule_autoShutdownComplete(data acceptance.TestData) string { - template := testAccDevTestLabGlobalVMShutdownSchedule_template(data) +func (r DevTestGlobalVMShutdownScheduleResource) autoShutdownComplete(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -285,5 +226,5 @@ resource "azurerm_dev_test_global_vm_shutdown_schedule" "test" { } } -`, template) +`, r.template(data)) } diff --git a/azurerm/internal/services/devtestlabs/dev_test_lab_data_source.go b/azurerm/internal/services/devtestlabs/dev_test_lab_data_source.go index 8a5d63b93cc8..362a05f43e77 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_lab_data_source.go +++ b/azurerm/internal/services/devtestlabs/dev_test_lab_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmDevTestLab() *schema.Resource { +func dataSourceDevTestLab() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmDevTestLabRead, + Read: dataSourceDevTestLabRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -72,7 +72,7 @@ func dataSourceArmDevTestLab() *schema.Resource { } } -func dataSourceArmDevTestLabRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceDevTestLabRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.LabsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/devtestlabs/dev_test_lab_data_source_test.go b/azurerm/internal/services/devtestlabs/dev_test_lab_data_source_test.go index 8e56635af02d..889d43149354 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_lab_data_source_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_lab_data_source_test.go @@ -6,46 +6,44 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" ) -func TestAccDataSourceAzureRMDevTestLab_basic(t *testing.T) { +type AzureRMDevTestLabDataSource struct { +} + +func TestAccAzureRMDevTestLabDataSource_basic(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_dev_test_lab", "test") + r := AzureRMDevTestLabDataSource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDevTestLab_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Premium"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("storage_type").HasValue("Premium"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func TestAccDataSourceAzureRMDevTestLab_complete(t *testing.T) { +func TestAccAzureRMDevTestLabDataSource_complete(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_dev_test_lab", "test") + r := AzureRMDevTestLabDataSource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDevTestLab_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Hello", "World"), - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("storage_type").HasValue("Standard"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Hello").HasValue("World"), + ), }, }) } -func testAccDataSourceDevTestLab_basic(data acceptance.TestData) string { +func (AzureRMDevTestLabDataSource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -69,7 +67,7 @@ data "azurerm_dev_test_lab" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDataSourceDevTestLab_complete(data acceptance.TestData) string { +func (AzureRMDevTestLabDataSource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_lab_resource.go b/azurerm/internal/services/devtestlabs/dev_test_lab_resource.go index 1afe323ead6b..676f990bfcbe 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_lab_resource.go +++ b/azurerm/internal/services/devtestlabs/dev_test_lab_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDevTestLab() *schema.Resource { +func resourceDevTestLab() *schema.Resource { return &schema.Resource{ - Create: resourceArmDevTestLabCreateUpdate, - Read: resourceArmDevTestLabRead, - Update: resourceArmDevTestLabCreateUpdate, - Delete: resourceArmDevTestLabDelete, + Create: resourceDevTestLabCreateUpdate, + Read: resourceDevTestLabRead, + Update: resourceDevTestLabCreateUpdate, + Delete: resourceDevTestLabDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -93,7 +93,7 @@ func resourceArmDevTestLab() *schema.Resource { } } -func resourceArmDevTestLabCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestLabCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.LabsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -148,10 +148,10 @@ func resourceArmDevTestLabCreateUpdate(d *schema.ResourceData, meta interface{}) d.SetId(*read.ID) - return resourceArmDevTestLabRead(d, meta) + return resourceDevTestLabRead(d, meta) } -func resourceArmDevTestLabRead(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestLabRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.LabsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -195,7 +195,7 @@ func resourceArmDevTestLabRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, read.Tags) } -func resourceArmDevTestLabDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestLabDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.LabsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/devtestlabs/dev_test_lab_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_lab_resource_test.go index 2435bf6068fb..f370a7677ed0 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_lab_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_lab_resource_test.go @@ -1,140 +1,91 @@ package devtestlabs_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type DevTestLabResource struct { +} + func TestAccDevTestLab_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_lab", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLab_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Premium"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), + r := DevTestLabResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("storage_type").HasValue("Premium"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, + data.ImportStep(), }) } func TestAccDevTestLab_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_lab", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLab_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabExists(data.ResourceName), - ), - }, - { - Config: testAccDevTestLab_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dev_test_lab"), - }, + r := DevTestLabResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dev_test_lab"), }, }) } func TestAccDevTestLab_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_lab", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLab_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Hello", "World"), - ), - }, - data.ImportStep(), + r := DevTestLabResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("storage_type").HasValue("Standard"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Hello").HasValue("World"), + ), }, + data.ImportStep(), }) } -func testCheckDevTestLabExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.LabsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - labName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for DevTest Lab: %s", labName) - } - - resp, err := conn.Get(ctx, resourceGroup, labName, "") - if err != nil { - return fmt.Errorf("Bad: Get devTestLabsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DevTest Lab %q (Resource Group: %q) does not exist", labName, resourceGroup) - } - - return nil +func (DevTestLabResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckDevTestLabDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.LabsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_lab" { - continue - } - - labName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, "") - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } + name := id.Path["labs"] - return fmt.Errorf("DevTest Lab still exists:\n%#v", resp) + resp, err := clients.DevTestLabs.LabsClient.Get(ctx, id.ResourceGroup, name, "") + if err != nil { + return nil, fmt.Errorf("retrieving Dev Test Lab %q (resource group: %q): %+v", name, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.LabProperties != nil), nil } -func testAccDevTestLab_basic(data acceptance.TestData) string { +func (DevTestLabResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -153,8 +104,7 @@ resource "azurerm_dev_test_lab" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDevTestLab_requiresImport(data acceptance.TestData) string { - template := testAccDevTestLab_basic(data) +func (r DevTestLabResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -163,10 +113,10 @@ resource "azurerm_dev_test_lab" "import" { location = azurerm_dev_test_lab.test.location resource_group_name = azurerm_dev_test_lab.test.resource_group_name } -`, template) +`, r.basic(data)) } -func testAccDevTestLab_complete(data acceptance.TestData) string { +func (DevTestLabResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource.go b/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource.go index caa329751f4e..83c0cbeba6fe 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource.go +++ b/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDevTestLabSchedules() *schema.Resource { +func resourceDevTestLabSchedules() *schema.Resource { return &schema.Resource{ - Create: resourceArmDevTestLabSchedulesCreateUpdate, - Read: resourceArmDevTestLabSchedulesRead, - Update: resourceArmDevTestLabSchedulesCreateUpdate, - Delete: resourceArmDevTestLabSchedulesDelete, + Create: resourceDevTestLabSchedulesCreateUpdate, + Read: resourceDevTestLabSchedulesRead, + Update: resourceDevTestLabSchedulesCreateUpdate, + Delete: resourceDevTestLabSchedulesDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -177,7 +177,7 @@ func resourceArmDevTestLabSchedules() *schema.Resource { } } -func resourceArmDevTestLabSchedulesCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestLabSchedulesCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.LabSchedulesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -225,24 +225,24 @@ func resourceArmDevTestLabSchedulesCreateUpdate(d *schema.ResourceData, meta int } if v, ok := d.GetOk("weekly_recurrence"); ok { - weekRecurrence := expandArmDevTestLabScheduleRecurrenceWeekly(v) + weekRecurrence := expandDevTestScheduleRecurrenceWeekly(v) schedule.WeeklyRecurrence = weekRecurrence } if v, ok := d.GetOk("daily_recurrence"); ok { - dailyRecurrence := expandArmDevTestLabScheduleRecurrenceDaily(v) + dailyRecurrence := expandDevTestScheduleRecurrenceDaily(v) schedule.DailyRecurrence = dailyRecurrence } if v, ok := d.GetOk("hourly_recurrence"); ok { - hourlyRecurrence := expandArmDevTestLabScheduleRecurrenceHourly(v) + hourlyRecurrence := expandDevTestScheduleRecurrenceHourly(v) schedule.HourlyRecurrence = hourlyRecurrence } if _, ok := d.GetOk("notification_settings"); ok { - notificationSettings := expandArmDevTestLabScheduleNotificationSettings(d) + notificationSettings := expandDevTestScheduleNotificationSettings(d) schedule.NotificationSettings = notificationSettings } @@ -261,10 +261,10 @@ func resourceArmDevTestLabSchedulesCreateUpdate(d *schema.ResourceData, meta int d.SetId(*read.ID) - return resourceArmDevTestLabSchedulesRead(d, meta) + return resourceDevTestLabSchedulesRead(d, meta) } -func resourceArmDevTestLabSchedulesRead(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestLabSchedulesRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DevTestLabs.LabSchedulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -278,7 +278,6 @@ func resourceArmDevTestLabSchedulesRead(d *schema.ResourceData, meta interface{} name := id.Path["schedules"] resp, err := client.Get(ctx, resGroup, devTestLabName, name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") @@ -320,7 +319,7 @@ func resourceArmDevTestLabSchedulesRead(d *schema.ResourceData, meta interface{} return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDevTestLabSchedulesDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDevTestLabSchedulesDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Compute.VMExtensionClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -341,7 +340,7 @@ func resourceArmDevTestLabSchedulesDelete(d *schema.ResourceData, meta interface return future.WaitForCompletionRef(ctx, client.Client) } -func expandArmDevTestLabScheduleRecurrenceDaily(recurrence interface{}) *dtl.DayDetails { +func expandDevTestScheduleRecurrenceDaily(recurrence interface{}) *dtl.DayDetails { dailyRecurrenceConfigs := recurrence.([]interface{}) dailyRecurrenceConfig := dailyRecurrenceConfigs[0].(map[string]interface{}) dailyTime := dailyRecurrenceConfig["time"].(string) @@ -365,7 +364,7 @@ func flattenAzureRmDevTestLabScheduleRecurrenceDaily(dailyRecurrence *dtl.DayDet return []interface{}{result} } -func expandArmDevTestLabScheduleRecurrenceWeekly(recurrence interface{}) *dtl.WeekDetails { +func expandDevTestScheduleRecurrenceWeekly(recurrence interface{}) *dtl.WeekDetails { weeklyRecurrenceConfigs := recurrence.([]interface{}) weeklyRecurrenceConfig := weeklyRecurrenceConfigs[0].(map[string]interface{}) weeklyTime := weeklyRecurrenceConfig["time"].(string) @@ -401,7 +400,7 @@ func flattenAzureRmDevTestLabScheduleRecurrenceWeekly(weeklyRecurrence *dtl.Week return []interface{}{result} } -func expandArmDevTestLabScheduleRecurrenceHourly(recurrence interface{}) *dtl.HourDetails { +func expandDevTestScheduleRecurrenceHourly(recurrence interface{}) *dtl.HourDetails { hourlyRecurrenceConfigs := recurrence.([]interface{}) hourlyRecurrenceConfig := hourlyRecurrenceConfigs[0].(map[string]interface{}) hourlyMinute := int32(hourlyRecurrenceConfig["minute"].(int)) @@ -425,7 +424,7 @@ func flattenAzureRmDevTestLabScheduleRecurrenceHourly(hourlyRecurrence *dtl.Hour return []interface{}{result} } -func expandArmDevTestLabScheduleNotificationSettings(d *schema.ResourceData) *dtl.NotificationSettings { +func expandDevTestScheduleNotificationSettings(d *schema.ResourceData) *dtl.NotificationSettings { notificationSettingsConfigs := d.Get("notification_settings").([]interface{}) notificationSettingsConfig := notificationSettingsConfigs[0].(map[string]interface{}) webhookUrl := notificationSettingsConfig["webhook_url"].(string) diff --git a/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource_test.go index ca1553ea83e1..a74cbc9f2125 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_lab_schedule_resource_test.go @@ -1,163 +1,119 @@ package devtestlabs_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type DevTestLabScheduleResource struct { +} + func TestAccDevTestLabSchedule_autoShutdownBasic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_schedule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabScheduleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLabSchedule_autoShutdownBasic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "status", "Disabled"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.status", "Disabled"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence.0.time", "0100"), - ), - }, - data.ImportStep(), - { - Config: testAccDevTestLabSchedule_autoShutdownBasicUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "status", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.status", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.time_in_minutes", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "notification_settings.0.webhook_url", "https://www.bing.com/2/4"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_recurrence.0.time", "0900"), - ), - }, + r := DevTestLabScheduleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoShutdownBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("status").HasValue("Disabled"), + check.That(data.ResourceName).Key("notification_settings.#").HasValue("1"), + check.That(data.ResourceName).Key("notification_settings.0.status").HasValue("Disabled"), + check.That(data.ResourceName).Key("daily_recurrence.#").HasValue("1"), + check.That(data.ResourceName).Key("daily_recurrence.0.time").HasValue("0100"), + ), + }, + data.ImportStep(), + { + Config: r.autoShutdownBasicUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("status").HasValue("Enabled"), + check.That(data.ResourceName).Key("notification_settings.#").HasValue("1"), + check.That(data.ResourceName).Key("notification_settings.0.status").HasValue("Enabled"), + check.That(data.ResourceName).Key("notification_settings.0.time_in_minutes").HasValue("30"), + check.That(data.ResourceName).Key("notification_settings.0.webhook_url").HasValue("https://www.bing.com/2/4"), + check.That(data.ResourceName).Key("daily_recurrence.#").HasValue("1"), + check.That(data.ResourceName).Key("daily_recurrence.0.time").HasValue("0900"), + ), }, }) } func TestAccDevTestLabSchedule_autoStartupBasic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_schedule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabScheduleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLabSchedule_autoStartupBasic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "status", "Disabled"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.0.time", "1100"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.0.week_days.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.0.week_days.1", "Tuesday"), - ), - }, - data.ImportStep("task_type"), - { - Config: testAccDevTestLabSchedule_autoStartupBasicUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabScheduleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "status", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.0.time", "1000"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.0.week_days.#", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "weekly_recurrence.0.week_days.1", "Thursday"), - ), - }, + r := DevTestLabScheduleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoStartupBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("status").HasValue("Disabled"), + check.That(data.ResourceName).Key("weekly_recurrence.#").HasValue("1"), + check.That(data.ResourceName).Key("weekly_recurrence.0.time").HasValue("1100"), + check.That(data.ResourceName).Key("weekly_recurrence.0.week_days.#").HasValue("2"), + check.That(data.ResourceName).Key("weekly_recurrence.0.week_days.1").HasValue("Tuesday"), + ), + }, + data.ImportStep("task_type"), + { + Config: r.autoStartupBasicUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("status").HasValue("Enabled"), + check.That(data.ResourceName).Key("weekly_recurrence.#").HasValue("1"), + check.That(data.ResourceName).Key("weekly_recurrence.0.time").HasValue("1000"), + check.That(data.ResourceName).Key("weekly_recurrence.0.week_days.#").HasValue("3"), + check.That(data.ResourceName).Key("weekly_recurrence.0.week_days.1").HasValue("Thursday"), + ), }, }) } func TestAccDevTestLabSchedule_concurrent(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_schedule", "test") + r := DevTestLabScheduleResource{} secondResourceName := "azurerm_dev_test_schedule.test2" - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLabScheduleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLabSchedule_concurrent(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLabScheduleExists(data.ResourceName), - testCheckDevTestLabScheduleExists(secondResourceName), - ), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.concurrent(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(secondResourceName).ExistsInAzure(r), + ), }, }) } -func testCheckDevTestLabScheduleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.LabSchedulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - devTestLabName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, devTestLabName, name, "") - if err != nil { - return fmt.Errorf("Bad: Get on devTestLabSchedulesClient: %s", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Dev Test Lab Schedule %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil +func (DevTestLabScheduleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckDevTestLabScheduleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.LabSchedulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_schedule" { - continue - } - - name := rs.Primary.Attributes["name"] - devTestLabName := rs.Primary.Attributes["azurerm_dev_test_lab"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, devTestLabName, name, "") - - if err != nil { - return nil - } + devTestLabName := id.Path["labs"] + name := id.Path["schedules"] - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Dev Test Lab Schedule still exists:\n%#v", resp.ScheduleProperties) - } + resp, err := clients.DevTestLabs.LabSchedulesClient.Get(ctx, id.ResourceGroup, devTestLabName, name, "") + if err != nil { + return nil, fmt.Errorf("retrieving Dev Test Lab Schedule %q (resource group: %q): %+v", name, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.ScheduleProperties != nil), nil } -func testAccDevTestLabSchedule_autoShutdownBasic(data acceptance.TestData) string { +func (DevTestLabScheduleResource) autoShutdownBasic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -194,7 +150,7 @@ resource "azurerm_dev_test_schedule" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDevTestLabSchedule_autoShutdownBasicUpdate(data acceptance.TestData) string { +func (DevTestLabScheduleResource) autoShutdownBasicUpdate(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -234,7 +190,7 @@ resource "azurerm_dev_test_schedule" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDevTestLabSchedule_autoStartupBasic(data acceptance.TestData) string { +func (DevTestLabScheduleResource) autoStartupBasic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -274,7 +230,7 @@ resource "azurerm_dev_test_schedule" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDevTestLabSchedule_autoStartupBasicUpdate(data acceptance.TestData) string { +func (DevTestLabScheduleResource) autoStartupBasicUpdate(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -316,7 +272,7 @@ resource "azurerm_dev_test_schedule" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDevTestLabSchedule_concurrent(data acceptance.TestData) string { +func (DevTestLabScheduleResource) concurrent(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_linux_virtual_machine_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_linux_virtual_machine_resource_test.go index 1bbaef6c895f..eb08501af35c 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_linux_virtual_machine_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_linux_virtual_machine_resource_test.go @@ -1,209 +1,154 @@ package devtestlabs_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type DevTestLinuxVirtualMachineResource struct { +} + func TestAccDevTestLinuxVirtualMachine_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_linux_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLinuxVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLinuxVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLinuxVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep( - // not returned from the API - "lab_subnet_name", - "lab_virtual_network_id", - "password", + r := DevTestLinuxVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), ), }, + data.ImportStep( + // not returned from the API + "lab_subnet_name", + "lab_virtual_network_id", + "password", + ), }) } func TestAccDevTestLinuxVirtualMachine_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_linux_virtual_machine", "test") + r := DevTestLinuxVirtualMachineResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLinuxVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLinuxVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLinuxVirtualMachineExists(data.ResourceName), - ), - }, - { - Config: testAccDevTestLinuxVirtualMachine_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dev_test_lab_linux_virtual_machine"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dev_test_lab_linux_virtual_machine"), }, }) } func TestAccDevTestLinuxVirtualMachine_basicSSH(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_linux_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLinuxVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLinuxVirtualMachine_basicSSH(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLinuxVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep( - // not returned from the API - "lab_subnet_name", - "lab_virtual_network_id", - "password", - "ssh_key", + r := DevTestLinuxVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicSSH(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), ), }, + data.ImportStep( + // not returned from the API + "lab_subnet_name", + "lab_virtual_network_id", + "password", + "ssh_key", + ), }) } func TestAccDevTestLinuxVirtualMachine_inboundNatRules(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_linux_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLinuxVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLinuxVirtualMachine_inboundNatRules(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLinuxVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "disallow_public_ip_address", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Acceptance", "Test"), - ), - }, - data.ImportStep( - // not returned from the API - "inbound_nat_rule", - "lab_subnet_name", - "lab_virtual_network_id", - "password", + r := DevTestLinuxVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.inboundNatRules(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("disallow_public_ip_address").HasValue("true"), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Acceptance").HasValue("Test"), ), }, + data.ImportStep( + // not returned from the API + "inbound_nat_rule", + "lab_subnet_name", + "lab_virtual_network_id", + "password", + ), }) } func TestAccDevTestLinuxVirtualMachine_updateStorage(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_linux_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestLinuxVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestLinuxVirtualMachine_storage(data, "Standard"), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLinuxVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccDevTestLinuxVirtualMachine_storage(data, "Premium"), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestLinuxVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "Canonical"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Premium"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := DevTestLinuxVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.storage(data, "Standard"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_type").HasValue("Standard"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.storage(data, "Premium"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("Canonical"), + check.That(data.ResourceName).Key("storage_type").HasValue("Premium"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func testCheckDevTestLinuxVirtualMachineExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.VirtualMachinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - virtualMachineName := rs.Primary.Attributes["name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, virtualMachineName, "") - if err != nil { - return fmt.Errorf("Bad: Get devTestVirtualMachinesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DevTest Linux Virtual Machine %q (Lab %q / Resource Group: %q) does not exist", virtualMachineName, labName, resourceGroup) - } - - return nil +func (DevTestLinuxVirtualMachineResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckDevTestLinuxVirtualMachineDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.VirtualMachinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_linux_virtual_machine" { - continue - } - - virtualMachineName := rs.Primary.Attributes["name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, virtualMachineName, "") - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } + labName := id.Path["labs"] + name := id.Path["virtualmachines"] - return fmt.Errorf("DevTest Linux Virtual Machine still exists:\n%#v", resp) + resp, err := clients.DevTestLabs.VirtualMachinesClient.Get(ctx, id.ResourceGroup, labName, name, "") + if err != nil { + return nil, fmt.Errorf("retrieving DevTest Linux Virtual Machine %q (Lab %q / Resource Group: %q): %v", name, labName, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.LabVirtualMachineProperties != nil), nil } -func testAccDevTestLinuxVirtualMachine_basic(data acceptance.TestData) string { - template := testAccDevTestLinuxVirtualMachine_template(data) +func (DevTestLinuxVirtualMachineResource) basic(data acceptance.TestData) string { + template := DevTestLinuxVirtualMachineResource{}.template(data) return fmt.Sprintf(` %s @@ -229,8 +174,8 @@ resource "azurerm_dev_test_linux_virtual_machine" "test" { `, template, data.RandomInteger) } -func testAccDevTestLinuxVirtualMachine_requiresImport(data acceptance.TestData) string { - template := testAccDevTestLinuxVirtualMachine_basic(data) +func (DevTestLinuxVirtualMachineResource) requiresImport(data acceptance.TestData) string { + template := DevTestLinuxVirtualMachineResource{}.basic(data) return fmt.Sprintf(` %s @@ -256,8 +201,8 @@ resource "azurerm_dev_test_linux_virtual_machine" "import" { `, template) } -func testAccDevTestLinuxVirtualMachine_basicSSH(data acceptance.TestData) string { - template := testAccDevTestLinuxVirtualMachine_template(data) +func (DevTestLinuxVirtualMachineResource) basicSSH(data acceptance.TestData) string { + template := DevTestLinuxVirtualMachineResource{}.template(data) return fmt.Sprintf(` %s @@ -283,8 +228,8 @@ resource "azurerm_dev_test_linux_virtual_machine" "test" { `, template, data.RandomInteger) } -func testAccDevTestLinuxVirtualMachine_inboundNatRules(data acceptance.TestData) string { - template := testAccDevTestLinuxVirtualMachine_template(data) +func (DevTestLinuxVirtualMachineResource) inboundNatRules(data acceptance.TestData) string { + template := DevTestLinuxVirtualMachineResource{}.template(data) return fmt.Sprintf(` %s @@ -325,8 +270,8 @@ resource "azurerm_dev_test_linux_virtual_machine" "test" { `, template, data.RandomInteger) } -func testAccDevTestLinuxVirtualMachine_storage(data acceptance.TestData, storageType string) string { - template := testAccDevTestLinuxVirtualMachine_template(data) +func (DevTestLinuxVirtualMachineResource) storage(data acceptance.TestData, storageType string) string { + template := DevTestLinuxVirtualMachineResource{}.template(data) return fmt.Sprintf(` %s @@ -352,7 +297,7 @@ resource "azurerm_dev_test_linux_virtual_machine" "test" { `, template, data.RandomInteger, storageType) } -func testAccDevTestLinuxVirtualMachine_template(data acceptance.TestData) string { +func (DevTestLinuxVirtualMachineResource) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_policy_resource.go b/azurerm/internal/services/devtestlabs/dev_test_policy_resource.go index 04fbfd5b65f4..a79c34378f1b 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_policy_resource.go +++ b/azurerm/internal/services/devtestlabs/dev_test_policy_resource.go @@ -214,7 +214,7 @@ func resourceArmDevTestPolicyDelete(d *schema.ResourceData, meta interface{}) er policySetName := id.Path["policysets"] name := id.Path["policies"] - read, err := client.Get(ctx, resourceGroup, policySetName, labName, name, "") + read, err := client.Get(ctx, resourceGroup, labName, policySetName, name, "") if err != nil { if utils.ResponseWasNotFound(read.Response) { // deleted outside of TF diff --git a/azurerm/internal/services/devtestlabs/dev_test_policy_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_policy_resource_test.go index 6a73ce2460f6..dc614c005514 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_policy_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_policy_resource_test.go @@ -1,140 +1,92 @@ package devtestlabs_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type DevTestPolicyResource struct { +} + func TestAccDevTestPolicy_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), + r := DevTestPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, + data.ImportStep(), }) } func TestAccDevTestPolicy_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccDevTestPolicy_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dev_test_policy"), - }, + r := DevTestPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dev_test_policy"), }, }) } func TestAccDevTestPolicy_complete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Acceptance", "Test"), - ), - }, - data.ImportStep(), + r := DevTestPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Acceptance").HasValue("Test"), + ), }, + data.ImportStep(), }) } -func testCheckDevTestPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.PoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - policyName := rs.Primary.Attributes["name"] - policySetName := rs.Primary.Attributes["policy_set_name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, policySetName, policyName, "") - if err != nil { - return fmt.Errorf("Bad: Get devTestPoliciesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DevTest Policy %q (Policy Set %q / Lab %q / Resource Group: %q) does not exist", policyName, policySetName, labName, resourceGroup) - } - - return nil +func (DevTestPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckDevTestPolicyDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.PoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_policy" { - continue - } - - policyName := rs.Primary.Attributes["name"] - policySetName := rs.Primary.Attributes["policy_set_name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, policySetName, policyName, "") - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } + labName := id.Path["labs"] + policySetName := id.Path["policysets"] + name := id.Path["policies"] - return fmt.Errorf("DevTest Policy still exists:\n%#v", resp) + resp, err := clients.DevTestLabs.PoliciesClient.Get(ctx, id.ResourceGroup, labName, policySetName, name, "") + if err != nil { + return nil, fmt.Errorf("retrieving DevTest Policy %q (Policy Set %q / Lab %q / Resource Group: %q): %v", name, policySetName, labName, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.PolicyProperties != nil), nil } -func testAccDevTestPolicy_basic(data acceptance.TestData) string { +func (DevTestPolicyResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -162,8 +114,7 @@ resource "azurerm_dev_test_policy" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } -func testAccDevTestPolicy_requiresImport(data acceptance.TestData) string { - template := testAccDevTestPolicy_basic(data) +func (r DevTestPolicyResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -175,10 +126,10 @@ resource "azurerm_dev_test_policy" "import" { threshold = "999" evaluator_type = "MaxValuePolicy" } -`, template) +`, r.basic(data)) } -func testAccDevTestPolicy_complete(data acceptance.TestData) string { +func (DevTestPolicyResource) complete(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_virtual_network_data_source_test.go b/azurerm/internal/services/devtestlabs/dev_test_virtual_network_data_source_test.go index 3aeb82efa6b3..777490a43b71 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_virtual_network_data_source_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_virtual_network_data_source_test.go @@ -7,10 +7,15 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" ) -func TestAccDataSourceArmDevTestVirtualNetwork_basic(t *testing.T) { +type ArmDevTestVirtualNetworkDataSource struct { +} + +func TestAccArmDevTestVirtualNetworkDataSource_basic(t *testing.T) { data := acceptance.BuildTestData(t, "data.azurerm_dev_test_virtual_network", "test") + r := ArmDevTestVirtualNetworkDataSource{} name := fmt.Sprintf("acctestdtvn%d", data.RandomInteger) labName := fmt.Sprintf("acctestdtl%d", data.RandomInteger) @@ -18,31 +23,27 @@ func TestAccDataSourceArmDevTestVirtualNetwork_basic(t *testing.T) { subnetName := name + "Subnet" subnetResourceID := fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s/subnets/%s", os.Getenv("ARM_SUBSCRIPTION_ID"), resGroup, name, subnetName) - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArmDevTestVirtualNetwork_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", name), - resource.TestCheckResourceAttr(data.ResourceName, "lab_name", labName), - resource.TestCheckResourceAttr(data.ResourceName, "resource_group_name", resGroup), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_subnets.0.allow_public_ip", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_subnets.0.lab_subnet_name", subnetName), - resource.TestCheckResourceAttr(data.ResourceName, "allowed_subnets.0.resource_id", subnetResourceID), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_overrides.0.lab_subnet_name", subnetName), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_overrides.0.resource_id", subnetResourceID), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_overrides.0.use_in_vm_creation_permission", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_overrides.0.use_public_ip_address_permission", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_overrides.0.virtual_network_pool_name", ""), - ), - }, + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(name), + check.That(data.ResourceName).Key("lab_name").HasValue(labName), + check.That(data.ResourceName).Key("resource_group_name").HasValue(resGroup), + check.That(data.ResourceName).Key("allowed_subnets.0.allow_public_ip").HasValue("Allow"), + check.That(data.ResourceName).Key("allowed_subnets.0.lab_subnet_name").HasValue(subnetName), + check.That(data.ResourceName).Key("allowed_subnets.0.resource_id").HasValue(subnetResourceID), + check.That(data.ResourceName).Key("subnet_overrides.0.lab_subnet_name").HasValue(subnetName), + check.That(data.ResourceName).Key("subnet_overrides.0.resource_id").HasValue(subnetResourceID), + check.That(data.ResourceName).Key("subnet_overrides.0.use_in_vm_creation_permission").HasValue("Allow"), + check.That(data.ResourceName).Key("subnet_overrides.0.use_public_ip_address_permission").HasValue("Allow"), + check.That(data.ResourceName).Key("subnet_overrides.0.virtual_network_pool_name").HasValue(""), + ), }, }) } -func testAccDataSourceArmDevTestVirtualNetwork_basic(data acceptance.TestData) string { +func (ArmDevTestVirtualNetworkDataSource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_virtual_network_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_virtual_network_resource_test.go index 70671ebaefc7..5928065434c6 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_virtual_network_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_virtual_network_resource_test.go @@ -1,17 +1,23 @@ package devtestlabs_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type DevTestVirtualNetworkResource struct { +} + func TestValidateDevTestVirtualNetworkName(t *testing.T) { validNames := []string{ "valid-name", @@ -42,128 +48,75 @@ func TestValidateDevTestVirtualNetworkName(t *testing.T) { func TestAccDevTestVirtualNetwork_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_virtual_network", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestVirtualNetworkDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestVirtualNetwork_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestVirtualNetworkExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), + r := DevTestVirtualNetworkResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, + data.ImportStep(), }) } func TestAccDevTestVirtualNetwork_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_virtual_network", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestVirtualNetworkDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestVirtualNetwork_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestVirtualNetworkExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccDevTestVirtualNetwork_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dev_test_virtual_network"), - }, + r := DevTestVirtualNetworkResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dev_test_virtual_network"), }, }) } func TestAccDevTestVirtualNetwork_subnet(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_virtual_network", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestVirtualNetworkDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestVirtualNetwork_subnets(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestVirtualNetworkExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subnet.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "subnet.0.use_public_ip_address", "Deny"), - resource.TestCheckResourceAttr(data.ResourceName, "subnet.0.use_in_virtual_machine_creation", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), + r := DevTestVirtualNetworkResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subnets(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subnet.#").HasValue("1"), + check.That(data.ResourceName).Key("subnet.0.use_public_ip_address").HasValue("Deny"), + check.That(data.ResourceName).Key("subnet.0.use_in_virtual_machine_creation").HasValue("Allow"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, + data.ImportStep(), }) } -func testCheckDevTestVirtualNetworkExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.VirtualNetworksClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - virtualNetworkName := rs.Primary.Attributes["name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, virtualNetworkName, "") - if err != nil { - return fmt.Errorf("Bad: Get devTestVirtualNetworksClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DevTest Virtual Network %q (Lab %q / Resource Group: %q) does not exist", virtualNetworkName, labName, resourceGroup) - } - - return nil +func (DevTestVirtualNetworkResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckDevTestVirtualNetworkDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.VirtualNetworksClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_virtual_network" { - continue - } - - virtualNetworkName := rs.Primary.Attributes["name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, virtualNetworkName, "") - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } + labName := id.Path["labs"] + name := id.Path["virtualnetworks"] - return fmt.Errorf("DevTest Virtual Network still exists:\n%#v", resp) + resp, err := clients.DevTestLabs.VirtualNetworksClient.Get(ctx, id.ResourceGroup, labName, name, "") + if err != nil { + return nil, fmt.Errorf("retrieving DevTest Virtual Network %q (Lab %q / Resource Group: %q): %v", name, labName, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.VirtualNetworkProperties != nil), nil } -func testAccDevTestVirtualNetwork_basic(data acceptance.TestData) string { +func (DevTestVirtualNetworkResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} @@ -188,8 +141,7 @@ resource "azurerm_dev_test_virtual_network" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } -func testAccDevTestVirtualNetwork_requiresImport(data acceptance.TestData) string { - template := testAccDevTestVirtualNetwork_basic(data) +func (r DevTestVirtualNetworkResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -198,10 +150,10 @@ resource "azurerm_dev_test_virtual_network" "import" { lab_name = azurerm_dev_test_virtual_network.test.lab_name resource_group_name = azurerm_dev_test_virtual_network.test.resource_group_name } -`, template) +`, r.basic(data)) } -func testAccDevTestVirtualNetwork_subnets(data acceptance.TestData) string { +func (DevTestVirtualNetworkResource) subnets(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/dev_test_windows_virtual_machine_resource_test.go b/azurerm/internal/services/devtestlabs/dev_test_windows_virtual_machine_resource_test.go index 8156f3fc6b8f..02b853f54166 100644 --- a/azurerm/internal/services/devtestlabs/dev_test_windows_virtual_machine_resource_test.go +++ b/azurerm/internal/services/devtestlabs/dev_test_windows_virtual_machine_resource_test.go @@ -1,182 +1,130 @@ package devtestlabs_test import ( + "context" "fmt" - "net/http" "testing" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +type DevTestVirtualMachineResource struct { +} + func TestAccDevTestVirtualMachine_basic(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_windows_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestWindowsVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestWindowsVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestWindowsVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "MicrosoftWindowsServer"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep( - // not returned from the API - "lab_subnet_name", - "lab_virtual_network_id", - "password", + r := DevTestVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("MicrosoftWindowsServer"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), ), }, + data.ImportStep( + // not returned from the API + "lab_subnet_name", + "lab_virtual_network_id", + "password", + ), }) } func TestAccDevTestVirtualMachine_requiresImport(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_windows_virtual_machine", "test") + r := DevTestVirtualMachineResource{} - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestWindowsVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestWindowsVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestWindowsVirtualMachineExists(data.ResourceName), - ), - }, - { - Config: testAccDevTestWindowsVirtualMachine_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dev_test_windows_virtual_machine"), - }, + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dev_test_windows_virtual_machine"), }, }) } func TestAccDevTestWindowsVirtualMachine_inboundNatRules(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_windows_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestWindowsVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestWindowsVirtualMachine_inboundNatRules(data), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestWindowsVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "disallow_public_ip_address", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "MicrosoftWindowsServer"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Acceptance", "Test"), - ), - }, - data.ImportStep( - // not returned from the API - "inbound_nat_rule", - "lab_subnet_name", - "lab_virtual_network_id", - "password", + r := DevTestVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.inboundNatRules(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("disallow_public_ip_address").HasValue("true"), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("MicrosoftWindowsServer"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Acceptance").HasValue("Test"), ), }, + data.ImportStep( + // not returned from the API + "inbound_nat_rule", + "lab_subnet_name", + "lab_virtual_network_id", + "password", + ), }) } func TestAccDevTestWindowsVirtualMachine_updateStorage(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_dev_test_windows_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckDevTestWindowsVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDevTestWindowsVirtualMachine_storage(data, "Standard"), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestWindowsVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "MicrosoftWindowsServer"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccDevTestWindowsVirtualMachine_storage(data, "Premium"), - Check: resource.ComposeTestCheckFunc( - testCheckDevTestWindowsVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "gallery_image_reference.0.publisher", "MicrosoftWindowsServer"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_type", "Premium"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, + r := DevTestVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.storage(data, "Standard"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("MicrosoftWindowsServer"), + check.That(data.ResourceName).Key("storage_type").HasValue("Standard"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.storage(data, "Premium"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("gallery_image_reference.0.publisher").HasValue("MicrosoftWindowsServer"), + check.That(data.ResourceName).Key("storage_type").HasValue("Premium"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), }, }) } -func testCheckDevTestWindowsVirtualMachineExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.VirtualMachinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - virtualMachineName := rs.Primary.Attributes["name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, virtualMachineName, "") - if err != nil { - return fmt.Errorf("Bad: Get devTestVirtualMachinesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DevTest Windows Virtual Machine %q (Lab %q / Resource Group: %q) does not exist", virtualMachineName, labName, resourceGroup) - } - - return nil +func (DevTestVirtualMachineResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err } -} - -func testCheckDevTestWindowsVirtualMachineDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).DevTestLabs.VirtualMachinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dev_test_windows_virtual_machine" { - continue - } - - virtualMachineName := rs.Primary.Attributes["name"] - labName := rs.Primary.Attributes["lab_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, labName, virtualMachineName, "") - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } + labName := id.Path["labs"] + name := id.Path["virtualmachines"] - return fmt.Errorf("DevTest Windows Virtual Machine still exists:\n%#v", resp) + resp, err := clients.DevTestLabs.VirtualMachinesClient.Get(ctx, id.ResourceGroup, labName, name, "") + if err != nil { + return nil, fmt.Errorf("retrieving DevTest Windows Virtual Machine %q (Lab %q / Resource Group: %q): %v", name, labName, id.ResourceGroup, err) } - return nil + return utils.Bool(resp.LabVirtualMachineProperties != nil), nil } -func testAccDevTestWindowsVirtualMachine_basic(data acceptance.TestData) string { - template := testAccDevTestWindowsVirtualMachine_template(data) +func (r DevTestVirtualMachineResource) basic(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -199,11 +147,10 @@ resource "azurerm_dev_test_windows_virtual_machine" "test" { version = "latest" } } -`, template, data.RandomInteger%1000000) +`, r.template(data), data.RandomInteger%1000000) } -func testAccDevTestWindowsVirtualMachine_requiresImport(data acceptance.TestData) string { - template := testAccDevTestWindowsVirtualMachine_basic(data) +func (r DevTestVirtualMachineResource) requiresImport(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -226,11 +173,10 @@ resource "azurerm_dev_test_windows_virtual_machine" "import" { version = "latest" } } -`, template) +`, r.template(data)) } -func testAccDevTestWindowsVirtualMachine_inboundNatRules(data acceptance.TestData) string { - template := testAccDevTestWindowsVirtualMachine_template(data) +func (r DevTestVirtualMachineResource) inboundNatRules(data acceptance.TestData) string { return fmt.Sprintf(` %s @@ -268,11 +214,10 @@ resource "azurerm_dev_test_windows_virtual_machine" "test" { "Acceptance" = "Test" } } -`, template, data.RandomInteger%1000000) +`, r.template(data), data.RandomInteger%1000000) } -func testAccDevTestWindowsVirtualMachine_storage(data acceptance.TestData, storageType string) string { - template := testAccDevTestWindowsVirtualMachine_template(data) +func (r DevTestVirtualMachineResource) storage(data acceptance.TestData, storageType string) string { return fmt.Sprintf(` %s @@ -295,10 +240,10 @@ resource "azurerm_dev_test_windows_virtual_machine" "test" { version = "latest" } } -`, template, data.RandomInteger%1000000, storageType) +`, r.template(data), data.RandomInteger%1000000, storageType) } -func testAccDevTestWindowsVirtualMachine_template(data acceptance.TestData) string { +func (DevTestVirtualMachineResource) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features {} diff --git a/azurerm/internal/services/devtestlabs/parse/global_schedule.go b/azurerm/internal/services/devtestlabs/parse/global_schedule.go deleted file mode 100644 index 82c85fb2008e..000000000000 --- a/azurerm/internal/services/devtestlabs/parse/global_schedule.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type GlobalScheduleId struct { - ResourceGroup string - Name string -} - -func GlobalScheduleID(input string) (*GlobalScheduleId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Global Schedule ID %q: %+v", input, err) - } - - service := GlobalScheduleId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("schedules"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/devtestlabs/parse/global_schedule_test.go b/azurerm/internal/services/devtestlabs/parse/global_schedule_test.go deleted file mode 100644 index f892b91ae5d8..000000000000 --- a/azurerm/internal/services/devtestlabs/parse/global_schedule_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package parse - -import "testing" - -func TestGlobalSchedule(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *GlobalScheduleId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No DevTest Lab segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/", - Expected: nil, - }, - { - Name: "No schedule name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DevTestLab/schedules/", - Expected: nil, - }, - { - Name: "Case incorrect in path element", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DevTestLab/Schedules/schedule1", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DevTestLab/schedules/schedule1", - Expected: &GlobalScheduleId{ - ResourceGroup: "myGroup1", - Name: "schedule1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := GlobalScheduleID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/devtestlabs/parse/global_schedule_virtual_machine.go b/azurerm/internal/services/devtestlabs/parse/global_schedule_virtual_machine.go deleted file mode 100644 index b77a4b2930dc..000000000000 --- a/azurerm/internal/services/devtestlabs/parse/global_schedule_virtual_machine.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type GlobalScheduleVirtualMachineId struct { - ResourceGroup string - Name string -} - -func GlobalScheduleVirtualMachineID(input string) (*GlobalScheduleVirtualMachineId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Machine ID %q: %+v", input, err) - } - - service := GlobalScheduleVirtualMachineId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("virtualMachines"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/devtestlabs/parse/global_schedule_virtual_machine_test.go b/azurerm/internal/services/devtestlabs/parse/global_schedule_virtual_machine_test.go deleted file mode 100644 index 6d0ef73f4c09..000000000000 --- a/azurerm/internal/services/devtestlabs/parse/global_schedule_virtual_machine_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package parse - -import "testing" - -func TestGlobalScheduleVirtualMachine(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *GlobalScheduleVirtualMachineId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No virtual machine segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/", - Expected: nil, - }, - { - Name: "No virtual machine name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/virtualMachines/", - Expected: nil, - }, - { - Name: "Case incorrect in path element", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/VirtualMachines/machine1", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/virtualMachines/machine1", - Expected: &GlobalScheduleVirtualMachineId{ - ResourceGroup: "myGroup1", - Name: "machine1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := GlobalScheduleVirtualMachineID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/devtestlabs/parse/schedule.go b/azurerm/internal/services/devtestlabs/parse/schedule.go new file mode 100644 index 000000000000..f9a6694705a1 --- /dev/null +++ b/azurerm/internal/services/devtestlabs/parse/schedule.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ScheduleId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewScheduleID(subscriptionId, resourceGroup, name string) ScheduleId { + return ScheduleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ScheduleId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Schedule", segmentsStr) +} + +func (id ScheduleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DevTestLab/schedules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ScheduleID parses a Schedule ID into an ScheduleId struct +func ScheduleID(input string) (*ScheduleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ScheduleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("schedules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/devtestlabs/parse/schedule_test.go b/azurerm/internal/services/devtestlabs/parse/schedule_test.go new file mode 100644 index 000000000000..696dbed9066d --- /dev/null +++ b/azurerm/internal/services/devtestlabs/parse/schedule_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ScheduleId{} + +func TestScheduleIDFormatter(t *testing.T) { + actual := NewScheduleID("12345678-1234-9876-4563-123456789012", "group1", "schedule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/schedules/schedule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestScheduleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ScheduleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/schedules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/schedules/schedule1", + Expected: &ScheduleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "schedule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DEVTESTLAB/SCHEDULES/SCHEDULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ScheduleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/devtestlabs/registration.go b/azurerm/internal/services/devtestlabs/registration.go index b6a1865944eb..3c27d8af1ecc 100644 --- a/azurerm/internal/services/devtestlabs/registration.go +++ b/azurerm/internal/services/devtestlabs/registration.go @@ -21,18 +21,20 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_dev_test_lab": dataSourceArmDevTestLab(), - "azurerm_dev_test_virtual_network": dataSourceArmDevTestVirtualNetwork()} + "azurerm_dev_test_lab": dataSourceDevTestLab(), + "azurerm_dev_test_virtual_network": dataSourceArmDevTestVirtualNetwork(), + } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_dev_test_global_vm_shutdown_schedule": resourceArmDevTestLabGlobalVMShutdownSchedule(), - "azurerm_dev_test_lab": resourceArmDevTestLab(), - "azurerm_dev_test_schedule": resourceArmDevTestLabSchedules(), + "azurerm_dev_test_global_vm_shutdown_schedule": resourceDevTestGlobalVMShutdownSchedule(), + "azurerm_dev_test_lab": resourceDevTestLab(), + "azurerm_dev_test_schedule": resourceDevTestLabSchedules(), "azurerm_dev_test_linux_virtual_machine": resourceArmDevTestLinuxVirtualMachine(), "azurerm_dev_test_policy": resourceArmDevTestPolicy(), "azurerm_dev_test_virtual_network": resourceArmDevTestVirtualNetwork(), - "azurerm_dev_test_windows_virtual_machine": resourceArmDevTestWindowsVirtualMachine()} + "azurerm_dev_test_windows_virtual_machine": resourceArmDevTestWindowsVirtualMachine(), + } } diff --git a/azurerm/internal/services/devtestlabs/resourceids.go b/azurerm/internal/services/devtestlabs/resourceids.go new file mode 100644 index 000000000000..351dcc8f6f27 --- /dev/null +++ b/azurerm/internal/services/devtestlabs/resourceids.go @@ -0,0 +1,3 @@ +package devtestlabs + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Schedule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/schedules/schedule1 diff --git a/azurerm/internal/services/devtestlabs/validate/global_schedule.go b/azurerm/internal/services/devtestlabs/validate/global_schedule.go deleted file mode 100644 index 06299e03ebde..000000000000 --- a/azurerm/internal/services/devtestlabs/validate/global_schedule.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/parse" -) - -func GlobalScheduleID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.GlobalScheduleID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/devtestlabs/validate/global_schedule_test.go b/azurerm/internal/services/devtestlabs/validate/global_schedule_test.go deleted file mode 100644 index 17dde18cd8c0..000000000000 --- a/azurerm/internal/services/devtestlabs/validate/global_schedule_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package validate - -import "testing" - -func TestGlobalScheduleID(t *testing.T) { - cases := []struct { - ID string - Valid bool - }{ - { - ID: "", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.DevTestLab/schedules/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DevTestLab/Schedules/schedule1", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DevTestLab/schedules/schedule1", - Valid: true, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.ID) - _, errors := GlobalScheduleID(tc.ID, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/azurerm/internal/services/devtestlabs/validate/global_schedule_virtual_machine.go b/azurerm/internal/services/devtestlabs/validate/global_schedule_virtual_machine.go deleted file mode 100644 index a4bc05537244..000000000000 --- a/azurerm/internal/services/devtestlabs/validate/global_schedule_virtual_machine.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/parse" -) - -func GlobalScheduleVirtualMachineID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.GlobalScheduleVirtualMachineID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/devtestlabs/validate/global_schedule_virtual_machine_test.go b/azurerm/internal/services/devtestlabs/validate/global_schedule_virtual_machine_test.go deleted file mode 100644 index 347317fad2de..000000000000 --- a/azurerm/internal/services/devtestlabs/validate/global_schedule_virtual_machine_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package validate - -import "testing" - -func TestGlobalScheduleVirtualMachineID(t *testing.T) { - cases := []struct { - ID string - Valid bool - }{ - { - ID: "", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/microsoft.compute/virtualMachines/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/VirtualMachines/machine1", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.compute/virtualMachines/machine1", - Valid: true, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.ID) - _, errors := GlobalScheduleVirtualMachineID(tc.ID, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/azurerm/internal/services/devtestlabs/validate/schedule_id.go b/azurerm/internal/services/devtestlabs/validate/schedule_id.go new file mode 100644 index 000000000000..4f8a25e1bdfc --- /dev/null +++ b/azurerm/internal/services/devtestlabs/validate/schedule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/parse" +) + +func ScheduleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ScheduleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/devtestlabs/validate/schedule_id_test.go b/azurerm/internal/services/devtestlabs/validate/schedule_id_test.go new file mode 100644 index 000000000000..177ed8a83c2e --- /dev/null +++ b/azurerm/internal/services/devtestlabs/validate/schedule_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestScheduleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/schedules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DevTestLab/schedules/schedule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DEVTESTLAB/SCHEDULES/SCHEDULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ScheduleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/digitaltwins/client/client.go b/azurerm/internal/services/digitaltwins/client/client.go new file mode 100644 index 000000000000..041d8e512655 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/client/client.go @@ -0,0 +1,24 @@ +package client + +import ( + "github.com/Azure/azure-sdk-for-go/services/digitaltwins/mgmt/2020-10-31/digitaltwins" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" +) + +type Client struct { + EndpointClient *digitaltwins.EndpointClient + InstanceClient *digitaltwins.Client +} + +func NewClient(o *common.ClientOptions) *Client { + endpointClient := digitaltwins.NewEndpointClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&endpointClient.Client, o.ResourceManagerAuthorizer) + + InstanceClient := digitaltwins.NewClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&InstanceClient.Client, o.ResourceManagerAuthorizer) + + return &Client{ + EndpointClient: &endpointClient, + InstanceClient: &InstanceClient, + } +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventgrid_resource.go b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventgrid_resource.go new file mode 100644 index 000000000000..e1fa60122d03 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventgrid_resource.go @@ -0,0 +1,184 @@ +package digitaltwins + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/digitaltwins/mgmt/2020-10-31/digitaltwins" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDigitalTwinsEndpointEventGrid() *schema.Resource { + return &schema.Resource{ + Create: resourceDigitalTwinsEndpointEventGridCreateUpdate, + Read: resourceDigitalTwinsEndpointEventGridRead, + Update: resourceDigitalTwinsEndpointEventGridCreateUpdate, + Delete: resourceDigitalTwinsEndpointEventGridDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DigitalTwinsEndpointID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceName, + }, + + "digital_twins_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceID, + }, + + "eventgrid_topic_endpoint": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.IsURLWithHTTPS, + }, + + "eventgrid_topic_primary_access_key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "eventgrid_topic_secondary_access_key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "dead_letter_storage_secret": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} +func resourceDigitalTwinsEndpointEventGridCreateUpdate(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + digitalTwinsId, err := parse.DigitalTwinsInstanceID(d.Get("digital_twins_id").(string)) + if err != nil { + return err + } + + id := parse.NewDigitalTwinsEndpointID(subscriptionId, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name).ID() + + if d.IsNewResource() { + existing, err := client.Get(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Digital Twins Endpoint %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_digital_twins_endpoint_eventgrid", id) + } + } + + properties := digitaltwins.EndpointResource{ + Properties: &digitaltwins.EventGrid{ + EndpointType: digitaltwins.EndpointTypeEventGrid, + TopicEndpoint: utils.String(d.Get("eventgrid_topic_endpoint").(string)), + AccessKey1: utils.String(d.Get("eventgrid_topic_primary_access_key").(string)), + AccessKey2: utils.String(d.Get("eventgrid_topic_secondary_access_key").(string)), + DeadLetterSecret: utils.String(d.Get("dead_letter_storage_secret").(string)), + }, + } + + future, err := client.CreateOrUpdate(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name, properties) + if err != nil { + return fmt.Errorf("creating/updating Digital Twins EventGrid Endpoint %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation/update of the Digital Twins EventGrid Endpoint %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + if _, err := client.Get(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name); err != nil { + return fmt.Errorf("retrieving Digital Twins EventGrid Endpoint %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + d.SetId(id) + + return resourceDigitalTwinsEndpointEventGridRead(d, meta) +} + +func resourceDigitalTwinsEndpointEventGridRead(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsEndpointID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Digital Twins EventGrid Endpoint %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Digital Twins EventGrid Endpoint %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + d.Set("name", id.EndpointName) + d.Set("digital_twins_id", parse.NewDigitalTwinsInstanceID(subscriptionId, id.ResourceGroup, id.DigitalTwinsInstanceName).ID()) + if resp.Properties != nil { + if _, ok := resp.Properties.AsEventGrid(); !ok { + return fmt.Errorf("retrieving Digital Twins Endpoint %q (Resource Group %q / Instance %q) is not type Event Grid", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName) + } + } + + return nil +} + +func resourceDigitalTwinsEndpointEventGridDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsEndpointID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + return fmt.Errorf("deleting Digital Twins EventGrid Endpoint %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of the Digital Twins EventGrid Endpoint %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + return nil +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventgrid_resource_test.go b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventgrid_resource_test.go new file mode 100644 index 000000000000..35eeb8be10e1 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventgrid_resource_test.go @@ -0,0 +1,262 @@ +package digitaltwins_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DigitalTwinsEndpointEventGridResource struct{} + +func TestAccDigitalTwinsEndpointEventGrid_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventgrid", "test") + r := DigitalTwinsEndpointEventGridResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key"), + }) +} + +func TestAccDigitalTwinsEndpointEventGrid_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventgrid", "test") + r := DigitalTwinsEndpointEventGridResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDigitalTwinsEndpointEventGrid_updateEventGrid(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventgrid", "test") + r := DigitalTwinsEndpointEventGridResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key"), + { + Config: r.updateEventGrid(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key"), + { + Config: r.updateEventGridRestore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key"), + }) +} + +func TestAccDigitalTwinsEndpointEventGrid_updateDeadLetter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventgrid", "test") + r := DigitalTwinsEndpointEventGridResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key"), + { + Config: r.updateDeadLetter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key", "dead_letter_storage_secret"), + { + Config: r.updateDeadLetterRestore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventgrid_topic_endpoint", "eventgrid_topic_primary_access_key", "eventgrid_topic_secondary_access_key", "dead_letter_storage_secret"), + }) +} + +func (r DigitalTwinsEndpointEventGridResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DigitalTwinsEndpointID(state.ID) + if err != nil { + return nil, err + } + resp, err := client.DigitalTwins.EndpointClient.Get(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Digital Twins EventGrid Endpoint %q (Resource Group %q / Digital Twins Instance Name %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + + return utils.Bool(true), nil +} + +func (r DigitalTwinsEndpointEventGridResource) template(data acceptance.TestData) string { + iR := DigitalTwinsInstanceResource{} + digitalTwinsInstance := iR.basic(data) + return fmt.Sprintf(` +%[1]s + +resource "azurerm_eventgrid_topic" "test" { + name = "acctesteg-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, digitalTwinsInstance, data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventGridResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_endpoint_eventgrid" "test" { + name = "acctest-EG-%d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventgrid_topic_endpoint = azurerm_eventgrid_topic.test.endpoint + eventgrid_topic_primary_access_key = azurerm_eventgrid_topic.test.primary_access_key + eventgrid_topic_secondary_access_key = azurerm_eventgrid_topic.test.secondary_access_key +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventGridResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_endpoint_eventgrid" "import" { + name = azurerm_digital_twins_endpoint_eventgrid.test.name + digital_twins_id = azurerm_digital_twins_endpoint_eventgrid.test.digital_twins_id + eventgrid_topic_endpoint = azurerm_digital_twins_endpoint_eventgrid.test.eventgrid_topic_endpoint + eventgrid_topic_primary_access_key = azurerm_digital_twins_endpoint_eventgrid.test.eventgrid_topic_primary_access_key + eventgrid_topic_secondary_access_key = azurerm_digital_twins_endpoint_eventgrid.test.eventgrid_topic_secondary_access_key +} +`, r.basic(data)) +} + +func (r DigitalTwinsEndpointEventGridResource) updateEventGrid(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_eventgrid_topic" "test_alt" { + name = "acctesteg-alt-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_digital_twins_endpoint_eventgrid" "test" { + name = "acctest-EG-%[2]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventgrid_topic_endpoint = azurerm_eventgrid_topic.test_alt.endpoint + eventgrid_topic_primary_access_key = azurerm_eventgrid_topic.test_alt.primary_access_key + eventgrid_topic_secondary_access_key = azurerm_eventgrid_topic.test_alt.secondary_access_key +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventGridResource) updateEventGridRestore(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_eventgrid_topic" "test_alt" { + name = "acctesteg-alt-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_digital_twins_endpoint_eventgrid" "test" { + name = "acctest-EG-%[2]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventgrid_topic_endpoint = azurerm_eventgrid_topic.test.endpoint + eventgrid_topic_primary_access_key = azurerm_eventgrid_topic.test.primary_access_key + eventgrid_topic_secondary_access_key = azurerm_eventgrid_topic.test.secondary_access_key +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventGridResource) updateDeadLetter(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_digital_twins_endpoint_eventgrid" "test" { + name = "acctest-EG-%[3]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventgrid_topic_endpoint = azurerm_eventgrid_topic.test.endpoint + eventgrid_topic_primary_access_key = azurerm_eventgrid_topic.test.primary_access_key + eventgrid_topic_secondary_access_key = azurerm_eventgrid_topic.test.secondary_access_key + dead_letter_storage_secret = "${azurerm_storage_container.test.id}?${azurerm_storage_account.test.primary_access_key}" + +} +`, r.template(data), data.RandomString, data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventGridResource) updateDeadLetterRestore(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_digital_twins_endpoint_eventgrid" "test" { + name = "acctest-EG-%[3]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventgrid_topic_endpoint = azurerm_eventgrid_topic.test.endpoint + eventgrid_topic_primary_access_key = azurerm_eventgrid_topic.test.primary_access_key + eventgrid_topic_secondary_access_key = azurerm_eventgrid_topic.test.secondary_access_key + +} +`, r.template(data), data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventhub_resource.go b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventhub_resource.go new file mode 100644 index 000000000000..e56b47cd15c1 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventhub_resource.go @@ -0,0 +1,179 @@ +package digitaltwins + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/digitaltwins/mgmt/2020-10-31/digitaltwins" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDigitalTwinsEndpointEventHub() *schema.Resource { + return &schema.Resource{ + Create: resourceDigitalTwinsEndpointEventHubCreateUpdate, + Read: resourceDigitalTwinsEndpointEventHubRead, + Update: resourceDigitalTwinsEndpointEventHubCreateUpdate, + Delete: resourceDigitalTwinsEndpointEventHubDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DigitalTwinsEndpointID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceName, + }, + + "digital_twins_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceID, + }, + + "eventhub_primary_connection_string": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "eventhub_secondary_connection_string": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "dead_letter_storage_secret": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} +func resourceDigitalTwinsEndpointEventHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + digitalTwinsId, err := parse.DigitalTwinsInstanceID(d.Get("digital_twins_id").(string)) + if err != nil { + return err + } + + id := parse.NewDigitalTwinsEndpointID(subscriptionId, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name).ID() + + if d.IsNewResource() { + existing, err := client.Get(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Digital Twins Endpoint %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_digital_twins_endpoint_eventhub", id) + } + } + + properties := digitaltwins.EndpointResource{ + Properties: &digitaltwins.EventHub{ + EndpointType: digitaltwins.EndpointTypeEventHub, + ConnectionStringPrimaryKey: utils.String(d.Get("eventhub_primary_connection_string").(string)), + ConnectionStringSecondaryKey: utils.String(d.Get("eventhub_secondary_connection_string").(string)), + DeadLetterSecret: utils.String(d.Get("dead_letter_storage_secret").(string)), + }, + } + + future, err := client.CreateOrUpdate(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name, properties) + if err != nil { + return fmt.Errorf("creating/updating Digital Twins Endpoint EventHub %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation/update of the Digital Twins Endpoint EventHub %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + if _, err := client.Get(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name); err != nil { + return fmt.Errorf("retrieving Digital Twins Endpoint EventHub %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + d.SetId(id) + + return resourceDigitalTwinsEndpointEventHubRead(d, meta) +} + +func resourceDigitalTwinsEndpointEventHubRead(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsEndpointID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Digital Twins Event Hub Endpoint %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Digital Twins Endpoint EventHub %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + d.Set("name", id.EndpointName) + d.Set("digital_twins_id", parse.NewDigitalTwinsInstanceID(subscriptionId, id.ResourceGroup, id.DigitalTwinsInstanceName).ID()) + if resp.Properties != nil { + if _, ok := resp.Properties.AsEventHub(); !ok { + return fmt.Errorf("retrieving Digital Twins Endpoint %q (Resource Group %q / Instance %q) is not type EventHub", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName) + } + } + return nil +} + +func resourceDigitalTwinsEndpointEventHubDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsEndpointID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + return fmt.Errorf("deleting Digital Twins Endpoint EventHub %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of the Digital Twins Endpoint EventHub %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + return nil +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventhub_resource_test.go b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventhub_resource_test.go new file mode 100644 index 000000000000..b228abd39f5c --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_eventhub_resource_test.go @@ -0,0 +1,304 @@ +package digitaltwins_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DigitalTwinsEndpointEventHubResource struct{} + +func TestAccDigitalTwinsEndpointEventHub_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventhub", "test") + r := DigitalTwinsEndpointEventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string"), + }) +} + +func TestAccDigitalTwinsEndpointEventHub_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventhub", "test") + r := DigitalTwinsEndpointEventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDigitalTwinsEndpointEventHub_updateEventHub(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventhub", "test") + r := DigitalTwinsEndpointEventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string"), + { + Config: r.updateEventHub(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string"), + { + Config: r.updateEventHubRestore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string"), + }) +} + +func TestAccDigitalTwinsEndpointEventHub_updateDeadLetter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_eventhub", "test") + r := DigitalTwinsEndpointEventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string"), + { + Config: r.updateDeadLetter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string", "dead_letter_storage_secret"), + { + Config: r.updateDeadLetterRestore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("eventhub_primary_connection_string", "eventhub_secondary_connection_string", "dead_letter_storage_secret"), + }) +} + +func (r DigitalTwinsEndpointEventHubResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DigitalTwinsEndpointID(state.ID) + if err != nil { + return nil, err + } + resp, err := client.DigitalTwins.EndpointClient.Get(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Digital Twins EventGrid Endpoint %q (Resource Group %q / Digital Twins Instance Name %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + + return utils.Bool(true), nil +} + +func (r DigitalTwinsEndpointEventHubResource) template(data acceptance.TestData) string { + iR := DigitalTwinsInstanceResource{} + digitalTwinsInstance := iR.basic(data) + return fmt.Sprintf(` +%[1]s + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%[2]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + name = "acctest-r%[2]d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = false + send = true + manage = false +} +`, digitalTwinsInstance, data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventHubResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_endpoint_eventhub" "test" { + name = "acctest-EH-%d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventhub_primary_connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string + eventhub_secondary_connection_string = azurerm_eventhub_authorization_rule.test.secondary_connection_string +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventHubResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_endpoint_eventhub" "import" { + name = azurerm_digital_twins_endpoint_eventhub.test.name + digital_twins_id = azurerm_digital_twins_endpoint_eventhub.test.digital_twins_id + eventhub_primary_connection_string = azurerm_digital_twins_endpoint_eventhub.test.eventhub_primary_connection_string + eventhub_secondary_connection_string = azurerm_digital_twins_endpoint_eventhub.test.eventhub_secondary_connection_string +} +`, r.basic(data)) +} + +func (r DigitalTwinsEndpointEventHubResource) updateEventHub(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub" "test_alt" { + name = "acctesteventhub-alt-%[2]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test_alt" { + name = "acctest-r%[2]d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test_alt.name + resource_group_name = azurerm_resource_group.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_digital_twins_endpoint_eventhub" "test" { + name = "acctest-EH-%[2]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventhub_primary_connection_string = azurerm_eventhub_authorization_rule.test_alt.primary_connection_string + eventhub_secondary_connection_string = azurerm_eventhub_authorization_rule.test_alt.secondary_connection_string +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventHubResource) updateEventHubRestore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub" "test_alt" { + name = "acctesteventhub-alt-%[2]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test_alt" { + name = "acctest-r%[2]d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test_alt.name + resource_group_name = azurerm_resource_group.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_digital_twins_endpoint_eventhub" "test" { + name = "acctest-EH-%[2]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventhub_primary_connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string + eventhub_secondary_connection_string = azurerm_eventhub_authorization_rule.test.secondary_connection_string +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventHubResource) updateDeadLetter(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_digital_twins_endpoint_eventhub" "test" { + name = "acctest-EH-%[3]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventhub_primary_connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string + eventhub_secondary_connection_string = azurerm_eventhub_authorization_rule.test.secondary_connection_string + dead_letter_storage_secret = "${azurerm_storage_container.test.id}?${azurerm_storage_account.test.primary_access_key}" +} +`, r.template(data), data.RandomString, data.RandomInteger) +} + +func (r DigitalTwinsEndpointEventHubResource) updateDeadLetterRestore(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_digital_twins_endpoint_eventhub" "test" { + name = "acctest-EH-%[3]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + eventhub_primary_connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string + eventhub_secondary_connection_string = azurerm_eventhub_authorization_rule.test.secondary_connection_string +} +`, r.template(data), data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_endpoint_servicebus_resource.go b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_servicebus_resource.go new file mode 100644 index 000000000000..b21b4e843f23 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_servicebus_resource.go @@ -0,0 +1,179 @@ +package digitaltwins + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/digitaltwins/mgmt/2020-10-31/digitaltwins" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDigitalTwinsEndpointServiceBus() *schema.Resource { + return &schema.Resource{ + Create: resourceDigitalTwinsEndpointServiceBusCreateUpdate, + Read: resourceDigitalTwinsEndpointServiceBusRead, + Update: resourceDigitalTwinsEndpointServiceBusCreateUpdate, + Delete: resourceDigitalTwinsEndpointServiceBusDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DigitalTwinsEndpointID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceName, + }, + + "digital_twins_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceID, + }, + + "servicebus_primary_connection_string": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "servicebus_secondary_connection_string": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "dead_letter_storage_secret": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} +func resourceDigitalTwinsEndpointServiceBusCreateUpdate(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + digitalTwinsId, err := parse.DigitalTwinsInstanceID(d.Get("digital_twins_id").(string)) + if err != nil { + return err + } + + id := parse.NewDigitalTwinsEndpointID(subscriptionId, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name).ID() + + if d.IsNewResource() { + existing, err := client.Get(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Digital Twins Endpoint %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_digital_twins_endpoint_servicebus", id) + } + } + + properties := digitaltwins.EndpointResource{ + Properties: &digitaltwins.ServiceBus{ + EndpointType: digitaltwins.EndpointTypeServiceBus, + PrimaryConnectionString: utils.String(d.Get("servicebus_primary_connection_string").(string)), + SecondaryConnectionString: utils.String(d.Get("servicebus_secondary_connection_string").(string)), + DeadLetterSecret: utils.String(d.Get("dead_letter_storage_secret").(string)), + }, + } + + future, err := client.CreateOrUpdate(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name, properties) + if err != nil { + return fmt.Errorf("creating/updating Digital Twins Endpoint ServiceBus %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation/update of the Digital Twins Endpoint ServiceBus %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + if _, err := client.Get(ctx, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, name); err != nil { + return fmt.Errorf("retrieving Digital Twins Endpoint ServiceBus %q (Resource Group %q / Instance %q): %+v", name, digitalTwinsId.ResourceGroup, digitalTwinsId.Name, err) + } + + d.SetId(id) + + return resourceDigitalTwinsEndpointServiceBusRead(d, meta) +} + +func resourceDigitalTwinsEndpointServiceBusRead(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsEndpointID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Digital Twins ServiceBus Endpoint %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Digital Twins Endpoint ServiceBus %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + d.Set("name", id.EndpointName) + d.Set("digital_twins_id", parse.NewDigitalTwinsInstanceID(subscriptionId, id.ResourceGroup, id.DigitalTwinsInstanceName).ID()) + if resp.Properties != nil { + if _, ok := resp.Properties.AsServiceBus(); !ok { + return fmt.Errorf("retrieving Digital Twins Endpoint %q (Resource Group %q / Instance %q) is not type ServiceBus", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName) + } + } + return nil +} + +func resourceDigitalTwinsEndpointServiceBusDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.EndpointClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsEndpointID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + return fmt.Errorf("deleting Digital Twins Endpoint ServiceBus %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of the Digital Twins Endpoint ServiceBus %q (Resource Group %q / Instance %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + return nil +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_endpoint_servicebus_resource_test.go b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_servicebus_resource_test.go new file mode 100644 index 000000000000..057fa7dfac96 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_endpoint_servicebus_resource_test.go @@ -0,0 +1,308 @@ +package digitaltwins_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DigitalTwinsEndpointServiceBusResource struct{} + +func TestAccDigitalTwinsEndpointServicebus_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_servicebus", "test") + r := DigitalTwinsEndpointServiceBusResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string"), + }) +} + +func TestAccDigitalTwinsEndpointServicebus_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_servicebus", "test") + r := DigitalTwinsEndpointServiceBusResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDigitalTwinsEndpointServicebus_updateServiceBus(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_servicebus", "test") + r := DigitalTwinsEndpointServiceBusResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string"), + { + Config: r.updateServiceBus(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string"), + { + Config: r.updateServiceBusRestore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string"), + }) +} + +func TestAccDigitalTwinsEndpointServicebus_updateDeadLetter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_endpoint_servicebus", "test") + r := DigitalTwinsEndpointServiceBusResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string"), + { + Config: r.updateDeadLetter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string", "dead_letter_storage_secret"), + { + Config: r.updateDeadLetterRestore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("servicebus_primary_connection_string", "servicebus_secondary_connection_string", "dead_letter_storage_secret"), + }) +} + +func (r DigitalTwinsEndpointServiceBusResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DigitalTwinsEndpointID(state.ID) + if err != nil { + return nil, err + } + resp, err := client.DigitalTwins.EndpointClient.Get(ctx, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Digital Twins Service Bus Endpoint %q (Resource Group %q / Digital Twins Instance Name %q): %+v", id.EndpointName, id.ResourceGroup, id.DigitalTwinsInstanceName, err) + } + + return utils.Bool(true), nil +} + +func (r DigitalTwinsEndpointServiceBusResource) template(data acceptance.TestData) string { + iR := DigitalTwinsInstanceResource{} + digitalTwinsInstance := iR.basic(data) + return fmt.Sprintf(` +%[1]s + +resource "azurerm_servicebus_namespace" "test" { + name = "acctestservicebusnamespace-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_servicebus_topic" "test" { + name = "acctestservicebustopic-%[2]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_servicebus_topic_authorization_rule" "test" { + name = "acctest-rule-%[2]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + topic_name = azurerm_servicebus_topic.test.name + + listen = false + send = true + manage = false +} +`, digitalTwinsInstance, data.RandomInteger) +} + +func (r DigitalTwinsEndpointServiceBusResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_endpoint_servicebus" "test" { + name = "acctest-EndpointSB-%d" + digital_twins_id = azurerm_digital_twins_instance.test.id + servicebus_primary_connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string + servicebus_secondary_connection_string = azurerm_servicebus_topic_authorization_rule.test.secondary_connection_string +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointServiceBusResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_endpoint_servicebus" "import" { + name = azurerm_digital_twins_endpoint_servicebus.test.name + digital_twins_id = azurerm_digital_twins_endpoint_servicebus.test.digital_twins_id + servicebus_primary_connection_string = azurerm_digital_twins_endpoint_servicebus.test.servicebus_primary_connection_string + servicebus_secondary_connection_string = azurerm_digital_twins_endpoint_servicebus.test.servicebus_secondary_connection_string +} +`, r.basic(data)) +} + +func (r DigitalTwinsEndpointServiceBusResource) updateServiceBus(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_servicebus_namespace" "test_alt" { + name = "acctestservicebusnamespace-alt-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "basic" +} + +resource "azurerm_servicebus_topic" "test_alt" { + name = "acctestservicebustopic-alt-%[2]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_servicebus_topic_authorization_rule" "test_alt" { + name = "acctest-rule-alt-%[2]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + topic_name = azurerm_servicebus_topic.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_digital_twins_endpoint_servicebus" "test" { + name = "acctest-EndpointSB-%[2]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + servicebus_primary_connection_string = azurerm_servicebus_topic_authorization_rule.test_alt.primary_connection_string + servicebus_secondary_connection_string = azurerm_servicebus_topic_authorization_rule.test_alt.secondary_connection_string +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointServiceBusResource) updateServiceBusRestore(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_servicebus_namespace" "test_alt" { + name = "acctestservicebusnamespace-alt-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "basic" +} + +resource "azurerm_servicebus_topic" "test_alt" { + name = "acctestservicebustopic-alt-%[2]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_servicebus_topic_authorization_rule" "test_alt" { + name = "acctest-rule-alt-%[2]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + topic_name = azurerm_servicebus_topic.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_digital_twins_endpoint_servicebus" "test" { + name = "acctest-EndpointSB-%[2]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + servicebus_primary_connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string + servicebus_secondary_connection_string = azurerm_servicebus_topic_authorization_rule.test.secondary_connection_string +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsEndpointServiceBusResource) updateDeadLetter(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_digital_twins_endpoint_servicebus" "test" { + name = "acctest-EndpointSB-%[3]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + servicebus_primary_connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string + servicebus_secondary_connection_string = azurerm_servicebus_topic_authorization_rule.test.secondary_connection_string + dead_letter_storage_secret = "${azurerm_storage_container.test.id}?${azurerm_storage_account.test.primary_access_key}" +} +`, r.template(data), data.RandomString, data.RandomInteger) +} + +func (r DigitalTwinsEndpointServiceBusResource) updateDeadLetterRestore(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_digital_twins_endpoint_servicebus" "test" { + name = "acctest-EndpointSB-%[3]d" + digital_twins_id = azurerm_digital_twins_instance.test.id + servicebus_primary_connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string + servicebus_secondary_connection_string = azurerm_servicebus_topic_authorization_rule.test.secondary_connection_string +} +`, r.template(data), data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_instance_data_source.go b/azurerm/internal/services/digitaltwins/digital_twins_instance_data_source.go new file mode 100644 index 000000000000..7c20371bae12 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_instance_data_source.go @@ -0,0 +1,76 @@ +package digitaltwins + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceDigitalTwinsInstance() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDigitalTwinsInstanceRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DigitalTwinsInstanceName, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "location": azure.SchemaLocationForDataSource(), + + "host_name": { + Type: schema.TypeString, + Computed: true, + }, + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceDigitalTwinsInstanceRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.InstanceClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + id := parse.NewDigitalTwinsInstanceID(subscriptionId, resourceGroup, name).ID() + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Digital Twins Instance %q (Resource Group %q) does not exist", name, resourceGroup) + } + return fmt.Errorf("retrieving Digital Twins Instance %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + if props := resp.Properties; props != nil { + d.Set("host_name", props.HostName) + } + + d.SetId(id) + + return tags.FlattenAndSet(d, resp.Tags) +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_instance_data_source_test.go b/azurerm/internal/services/digitaltwins/digital_twins_instance_data_source_test.go new file mode 100644 index 000000000000..d1bb8d2a4455 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_instance_data_source_test.go @@ -0,0 +1,38 @@ +package digitaltwins_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type DigitalTwinsInstanceDataSource struct { +} + +func TestAccDigitalTwinsInstanceDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_digital_twins_instance", "test") + r := DigitalTwinsInstanceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + }) +} + +func (DigitalTwinsInstanceDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_digital_twins_instance" "test" { + name = azurerm_digital_twins_instance.test.name + resource_group_name = azurerm_digital_twins_instance.test.resource_group_name +} +`, DigitalTwinsInstanceResource{}.basic(data)) +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_instance_resource.go b/azurerm/internal/services/digitaltwins/digital_twins_instance_resource.go new file mode 100644 index 000000000000..9aecc575add5 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_instance_resource.go @@ -0,0 +1,180 @@ +package digitaltwins + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/digitaltwins/mgmt/2020-10-31/digitaltwins" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDigitalTwinsInstance() *schema.Resource { + return &schema.Resource{ + Create: resourceDigitalTwinsInstanceCreate, + Read: resourceDigitalTwinsInstanceRead, + Update: resourceDigitalTwinsInstanceUpdate, + Delete: resourceDigitalTwinsInstanceDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DigitalTwinsInstanceID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DigitalTwinsInstanceName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "host_name": { + Type: schema.TypeString, + Computed: true, + }, + + "tags": tags.Schema(), + }, + } +} +func resourceDigitalTwinsInstanceCreate(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).DigitalTwins.InstanceClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + id := parse.NewDigitalTwinsInstanceID(subscriptionId, resourceGroup, name).ID() + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for existing Digital Twins Instance %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_digital_twins_instance", id) + } + + properties := digitaltwins.Description{ + Location: utils.String(location.Normalize(d.Get("location").(string))), + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, properties) + if err != nil { + return fmt.Errorf("creating Digital Twins Instance %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation of the Digital Twins Instance %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + _, err = client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("retrieving Digital Twins Instance %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(id) + + return resourceDigitalTwinsInstanceRead(d, meta) +} + +func resourceDigitalTwinsInstanceRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.InstanceClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsInstanceID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Digital Twins Instance %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Digital Twins Instance %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + if props := resp.Properties; props != nil { + d.Set("host_name", props.HostName) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceDigitalTwinsInstanceUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.InstanceClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsInstanceID(d.Id()) + if err != nil { + return err + } + + props := digitaltwins.PatchDescription{} + + if d.HasChange("tags") { + props.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + if _, err := client.Update(ctx, id.ResourceGroup, id.Name, props); err != nil { + return fmt.Errorf("updating Digital Twins Instance %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return resourceDigitalTwinsInstanceRead(d, meta) +} + +func resourceDigitalTwinsInstanceDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DigitalTwins.InstanceClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DigitalTwinsInstanceID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("deleting Digital Twins Instance %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of the Digital Twins Instance %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/digitaltwins/digital_twins_instance_resource_test.go b/azurerm/internal/services/digitaltwins/digital_twins_instance_resource_test.go new file mode 100644 index 000000000000..152be1d3a845 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/digital_twins_instance_resource_test.go @@ -0,0 +1,185 @@ +package digitaltwins_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DigitalTwinsInstanceResource struct { +} + +func TestAccDigitalTwinsInstance_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_instance", "test") + r := DigitalTwinsInstanceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDigitalTwinsInstance_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_instance", "test") + r := DigitalTwinsInstanceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDigitalTwinsInstance_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_instance", "test") + r := DigitalTwinsInstanceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDigitalTwinsInstance_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_digital_twins_instance", "test") + r := DigitalTwinsInstanceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.updateTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("host_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (DigitalTwinsInstanceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DigitalTwinsInstanceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.DigitalTwins.InstanceClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Digital Twins Instance %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (DigitalTwinsInstanceResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-dtwin-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r DigitalTwinsInstanceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_instance" "test" { + name = "acctest-DT-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsInstanceResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_instance" "import" { + name = azurerm_digital_twins_instance.test.name + resource_group_name = azurerm_digital_twins_instance.test.resource_group_name + location = azurerm_digital_twins_instance.test.location +} +`, r.basic(data)) +} + +func (r DigitalTwinsInstanceResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_instance" "test" { + name = "acctest-DT-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + ENV = "Test" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r DigitalTwinsInstanceResource) updateTags(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_digital_twins_instance" "test" { + name = "acctest-DT-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + ENV = "Stage" + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/digitaltwins/parse/digital_twins_endpoint.go b/azurerm/internal/services/digitaltwins/parse/digital_twins_endpoint.go new file mode 100644 index 000000000000..f87d1c685426 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/parse/digital_twins_endpoint.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DigitalTwinsEndpointId struct { + SubscriptionId string + ResourceGroup string + DigitalTwinsInstanceName string + EndpointName string +} + +func NewDigitalTwinsEndpointID(subscriptionId, resourceGroup, digitalTwinsInstanceName, endpointName string) DigitalTwinsEndpointId { + return DigitalTwinsEndpointId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DigitalTwinsInstanceName: digitalTwinsInstanceName, + EndpointName: endpointName, + } +} + +func (id DigitalTwinsEndpointId) String() string { + segments := []string{ + fmt.Sprintf("Endpoint Name %q", id.EndpointName), + fmt.Sprintf("Digital Twins Instance Name %q", id.DigitalTwinsInstanceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Digital Twins Endpoint", segmentsStr) +} + +func (id DigitalTwinsEndpointId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DigitalTwins/digitalTwinsInstances/%s/endpoints/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DigitalTwinsInstanceName, id.EndpointName) +} + +// DigitalTwinsEndpointID parses a DigitalTwinsEndpoint ID into an DigitalTwinsEndpointId struct +func DigitalTwinsEndpointID(input string) (*DigitalTwinsEndpointId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DigitalTwinsEndpointId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DigitalTwinsInstanceName, err = id.PopSegment("digitalTwinsInstances"); err != nil { + return nil, err + } + if resourceId.EndpointName, err = id.PopSegment("endpoints"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/digitaltwins/parse/digital_twins_endpoint_test.go b/azurerm/internal/services/digitaltwins/parse/digital_twins_endpoint_test.go new file mode 100644 index 000000000000..00727446e872 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/parse/digital_twins_endpoint_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DigitalTwinsEndpointId{} + +func TestDigitalTwinsEndpointIDFormatter(t *testing.T) { + actual := NewDigitalTwinsEndpointID("12345678-1234-9876-4563-123456789012", "group1", "instance1", "endpoint1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/endpoints/endpoint1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDigitalTwinsEndpointID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DigitalTwinsEndpointId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DigitalTwinsInstanceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/", + Error: true, + }, + + { + // missing value for DigitalTwinsInstanceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/", + Error: true, + }, + + { + // missing EndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/", + Error: true, + }, + + { + // missing value for EndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/endpoints/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/endpoints/endpoint1", + Expected: &DigitalTwinsEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + DigitalTwinsInstanceName: "instance1", + EndpointName: "endpoint1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DIGITALTWINS/DIGITALTWINSINSTANCES/INSTANCE1/ENDPOINTS/ENDPOINT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DigitalTwinsEndpointID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DigitalTwinsInstanceName != v.Expected.DigitalTwinsInstanceName { + t.Fatalf("Expected %q but got %q for DigitalTwinsInstanceName", v.Expected.DigitalTwinsInstanceName, actual.DigitalTwinsInstanceName) + } + if actual.EndpointName != v.Expected.EndpointName { + t.Fatalf("Expected %q but got %q for EndpointName", v.Expected.EndpointName, actual.EndpointName) + } + } +} diff --git a/azurerm/internal/services/digitaltwins/parse/digital_twins_instance.go b/azurerm/internal/services/digitaltwins/parse/digital_twins_instance.go new file mode 100644 index 000000000000..175d06df4320 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/parse/digital_twins_instance.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DigitalTwinsInstanceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewDigitalTwinsInstanceID(subscriptionId, resourceGroup, name string) DigitalTwinsInstanceId { + return DigitalTwinsInstanceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id DigitalTwinsInstanceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Digital Twins Instance", segmentsStr) +} + +func (id DigitalTwinsInstanceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DigitalTwins/digitalTwinsInstances/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DigitalTwinsInstanceID parses a DigitalTwinsInstance ID into an DigitalTwinsInstanceId struct +func DigitalTwinsInstanceID(input string) (*DigitalTwinsInstanceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DigitalTwinsInstanceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("digitalTwinsInstances"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/digitaltwins/parse/digital_twins_instance_test.go b/azurerm/internal/services/digitaltwins/parse/digital_twins_instance_test.go new file mode 100644 index 000000000000..bcf0d735c173 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/parse/digital_twins_instance_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DigitalTwinsInstanceId{} + +func TestDigitalTwinsInstanceIDFormatter(t *testing.T) { + actual := NewDigitalTwinsInstanceID("12345678-1234-9876-4563-123456789012", "group1", "instance1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDigitalTwinsInstanceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DigitalTwinsInstanceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1", + Expected: &DigitalTwinsInstanceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "instance1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DIGITALTWINS/DIGITALTWINSINSTANCES/INSTANCE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DigitalTwinsInstanceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/digitaltwins/registration.go b/azurerm/internal/services/digitaltwins/registration.go new file mode 100644 index 000000000000..e0c9bf61387c --- /dev/null +++ b/azurerm/internal/services/digitaltwins/registration.go @@ -0,0 +1,36 @@ +package digitaltwins + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +type Registration struct{} + +// Name is the name of this Service +func (r Registration) Name() string { + return "Digital Twins" +} + +// WebsiteCategories returns a list of categories which can be used for the sidebar +func (r Registration) WebsiteCategories() []string { + return []string{ + "Digital Twins", + } +} + +// SupportedDataSources returns the supported Data Sources supported by this Service +func (r Registration) SupportedDataSources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_digital_twins_instance": dataSourceDigitalTwinsInstance(), + } +} + +// SupportedResources returns the supported Resources supported by this Service +func (r Registration) SupportedResources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_digital_twins_instance": resourceDigitalTwinsInstance(), + "azurerm_digital_twins_endpoint_eventgrid": resourceDigitalTwinsEndpointEventGrid(), + "azurerm_digital_twins_endpoint_eventhub": resourceDigitalTwinsEndpointEventHub(), + "azurerm_digital_twins_endpoint_servicebus": resourceDigitalTwinsEndpointServiceBus(), + } +} diff --git a/azurerm/internal/services/digitaltwins/resourceids.go b/azurerm/internal/services/digitaltwins/resourceids.go new file mode 100644 index 000000000000..84e431bd1eb5 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/resourceids.go @@ -0,0 +1,5 @@ +package digitaltwins + +// leaving the DigitalTwins prefix here to avoid stuttering the property name for now +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DigitalTwinsInstance -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DigitalTwinsEndpoint -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/endpoints/endpoint1 diff --git a/azurerm/internal/services/digitaltwins/validate/digital_twins_endpoint_id.go b/azurerm/internal/services/digitaltwins/validate/digital_twins_endpoint_id.go new file mode 100644 index 000000000000..277b9c72b5a7 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/validate/digital_twins_endpoint_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" +) + +func DigitalTwinsEndpointID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DigitalTwinsEndpointID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/digitaltwins/validate/digital_twins_endpoint_id_test.go b/azurerm/internal/services/digitaltwins/validate/digital_twins_endpoint_id_test.go new file mode 100644 index 000000000000..4c6299e842b8 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/validate/digital_twins_endpoint_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDigitalTwinsEndpointID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DigitalTwinsInstanceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/", + Valid: false, + }, + + { + // missing value for DigitalTwinsInstanceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/", + Valid: false, + }, + + { + // missing EndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/", + Valid: false, + }, + + { + // missing value for EndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/endpoints/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1/endpoints/endpoint1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DIGITALTWINS/DIGITALTWINSINSTANCES/INSTANCE1/ENDPOINTS/ENDPOINT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DigitalTwinsEndpointID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_id.go b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_id.go new file mode 100644 index 000000000000..778ac177c7f6 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/digitaltwins/parse" +) + +func DigitalTwinsInstanceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DigitalTwinsInstanceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_id_test.go b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_id_test.go new file mode 100644 index 000000000000..2d836b0c073f --- /dev/null +++ b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDigitalTwinsInstanceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.DigitalTwins/digitalTwinsInstances/instance1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.DIGITALTWINS/DIGITALTWINSINSTANCES/INSTANCE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DigitalTwinsInstanceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_name.go b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_name.go new file mode 100644 index 000000000000..4259cdb26830 --- /dev/null +++ b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_name.go @@ -0,0 +1,30 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func DigitalTwinsInstanceName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) + return + } + + if len(v) < 3 { + errors = append(errors, fmt.Errorf("length should equal to or greater than %d, got %q", 3, v)) + return + } + + if len(v) > 63 { + errors = append(errors, fmt.Errorf("length should be equal to or less than %d, got %q", 63, v)) + return + } + + if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) { + errors = append(errors, fmt.Errorf("%q must begin with a letter or number, end with a letter or number and contain only letters, numbers, and hyphens, got %v", k, v)) + return + } + return +} diff --git a/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_name_test.go b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_name_test.go new file mode 100644 index 000000000000..6196d1fad62e --- /dev/null +++ b/azurerm/internal/services/digitaltwins/validate/digital_twins_instance_name_test.go @@ -0,0 +1,69 @@ +package validate + +import ( + "testing" +) + +func TestDigitalTwinsInstanceName(t *testing.T) { + tests := []struct { + Name string + Input string + Valid bool + }{ + { + Name: "Empty", + Input: "", + Valid: false, + }, + { + Name: "Too short", + Input: "a", + Valid: false, + }, + { + Name: "Invalid character", + Input: "digital_twins", + Valid: false, + }, + { + Name: "Valid Name", + Input: "Digital-12-Twins", + Valid: true, + }, + { + Name: "End with `-`", + Input: "Digital-12-", + Valid: false, + }, + { + Name: "Start with `-`", + Input: "-Digital-12", + Valid: false, + }, + { + Name: "Invalid character", + Input: "digital.twins", + Valid: false, + }, + { + Name: "Too long", + Input: "digitalTwinsdigitalTwinsdigitalTwinsdigitalTwinsdigitalTwinsdigi", + Valid: false, + }, + { + Name: "Max characters", + Input: "digitalTwinsdigitalTwinsdigitalTwinsdigitalTwinsdigitalTwins123", + Valid: true, + }, + } + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + _, err := DigitalTwinsInstanceName(tt.Input, "Name") + valid := err == nil + if valid != tt.Valid { + t.Errorf("Expected valid status %t but got %t for input %s", tt.Valid, valid, tt.Input) + } + }) + } +} diff --git a/azurerm/internal/services/dns/dns_a_record_resource.go b/azurerm/internal/services/dns/dns_a_record_resource.go index f909ea87a76e..546d551cd8a3 100644 --- a/azurerm/internal/services/dns/dns_a_record_resource.go +++ b/azurerm/internal/services/dns/dns_a_record_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsARecord() *schema.Resource { +func resourceDnsARecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsARecordCreateUpdate, - Read: resourceArmDnsARecordRead, - Update: resourceArmDnsARecordCreateUpdate, - Delete: resourceArmDnsARecordDelete, + Create: resourceDnsARecordCreateUpdate, + Read: resourceDnsARecordRead, + Update: resourceDnsARecordCreateUpdate, + Delete: resourceDnsARecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -31,7 +31,7 @@ func resourceArmDnsARecord() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsARecordID(id) + _, err := parse.ARecordID(id) return err }), @@ -81,7 +81,7 @@ func resourceArmDnsARecord() *schema.Resource { } } -func resourceArmDnsARecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsARecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -144,31 +144,31 @@ func resourceArmDnsARecordCreateUpdate(d *schema.ResourceData, meta interface{}) d.SetId(*resp.ID) - return resourceArmDnsARecordRead(d, meta) + return resourceDnsARecordRead(d, meta) } -func resourceArmDnsARecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsARecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsARecordID(d.Id()) + id, err := parse.ARecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.A) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.AName, dns.A) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS A record %s: %+v", id.Name, err) + return fmt.Errorf("Error reading DNS A record %s: %+v", id.AName, err) } - d.Set("name", id.Name) + d.Set("name", id.AName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("fqdn", resp.Fqdn) d.Set("ttl", resp.TTL) @@ -186,19 +186,19 @@ func resourceArmDnsARecordRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsARecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsARecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsARecordID(d.Id()) + id, err := parse.ARecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.A, "") + resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.AName, dns.A, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS A Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS A Record %s: %+v", id.AName, err) } return nil diff --git a/azurerm/internal/services/dns/dns_a_record_resource_test.go b/azurerm/internal/services/dns/dns_a_record_resource_test.go new file mode 100644 index 000000000000..dcc3c061b7da --- /dev/null +++ b/azurerm/internal/services/dns/dns_a_record_resource_test.go @@ -0,0 +1,442 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type TestAccDnsARecordResource struct { +} + +func TestAccDnsARecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsARecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_a_record"), + }, + }) +} + +func TestAccDnsARecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccDnsARecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsARecord_withAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + targetResourceName := "azurerm_public_ip.test" + targetResourceName2 := "azurerm_public_ip.test2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withAlias(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + ), + }, + { + Config: r.withAliasUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName2, "id"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsARecord_RecordsToAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + targetResourceName := "azurerm_public_ip.test" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AliasToRecordsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.AliasToRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + check.That(data.ResourceName).Key("records.#").HasValue("0"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsARecord_AliasToRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") + r := TestAccDnsARecordResource{} + targetResourceName := "azurerm_public_ip.test" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AliasToRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + ), + }, + { + Config: r.AliasToRecordsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + check.That(data.ResourceName).Key("target_resource_id").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func (TestAccDnsARecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ARecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.AName, dns.A) + if err != nil { + return nil, fmt.Errorf("retrieving DNS A record %s (resource group: %s): %v", id.AName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (TestAccDnsARecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r TestAccDnsARecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dns_a_record" "import" { + name = azurerm_dns_a_record.test.name + resource_group_name = azurerm_dns_a_record.test.resource_group_name + zone_name = azurerm_dns_a_record.test.zone_name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] +} +`, r.basic(data)) +} + +func (TestAccDnsARecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5", "1.2.3.7"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (TestAccDnsARecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (TestAccDnsARecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (TestAccDnsARecordResource) withAlias(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_public_ip" "test" { + name = "mypublicip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + ip_version = "IPv4" +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_public_ip.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (TestAccDnsARecordResource) withAliasUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_public_ip" "test2" { + name = "mypublicip%d2" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + ip_version = "IPv4" +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_public_ip.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (TestAccDnsARecordResource) AliasToRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_public_ip" "test" { + name = "mypublicip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + ip_version = "IPv4" +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_public_ip.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (TestAccDnsARecordResource) AliasToRecordsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_aaaa_record_resource.go b/azurerm/internal/services/dns/dns_aaaa_record_resource.go index 04c7edb1a4e7..54a805c9bd6c 100644 --- a/azurerm/internal/services/dns/dns_aaaa_record_resource.go +++ b/azurerm/internal/services/dns/dns_aaaa_record_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsAAAARecord() *schema.Resource { +func resourceDnsAAAARecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsAaaaRecordCreateUpdate, - Read: resourceArmDnsAaaaRecordRead, - Update: resourceArmDnsAaaaRecordCreateUpdate, - Delete: resourceArmDnsAaaaRecordDelete, + Create: resourceDnsAaaaRecordCreateUpdate, + Read: resourceDnsAaaaRecordRead, + Update: resourceDnsAaaaRecordCreateUpdate, + Delete: resourceDnsAaaaRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,7 +34,7 @@ func resourceArmDnsAAAARecord() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsAaaaRecordID(id) + _, err := parse.AaaaRecordID(id) return err }), @@ -85,7 +85,7 @@ func resourceArmDnsAAAARecord() *schema.Resource { } } -func resourceArmDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -148,31 +148,31 @@ func resourceArmDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta interface d.SetId(*resp.ID) - return resourceArmDnsAaaaRecordRead(d, meta) + return resourceDnsAaaaRecordRead(d, meta) } -func resourceArmDnsAaaaRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsAaaaRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsAaaaRecordID(d.Id()) + id, err := parse.AaaaRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.AAAA) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.AAAAName, dns.AAAA) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS AAAA record %s: %v", id.Name, err) + return fmt.Errorf("Error reading DNS AAAA record %s: %v", id.AAAAName, err) } - d.Set("name", id.Name) + d.Set("name", id.AAAAName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("fqdn", resp.Fqdn) d.Set("ttl", resp.TTL) @@ -190,19 +190,19 @@ func resourceArmDnsAaaaRecordRead(d *schema.ResourceData, meta interface{}) erro return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsAaaaRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsAaaaRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsAaaaRecordID(d.Id()) + id, err := parse.AaaaRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.AAAA, "") + resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.AAAAName, dns.AAAA, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS AAAA Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS AAAA Record %s: %+v", id.AAAAName, err) } return nil diff --git a/azurerm/internal/services/dns/dns_aaaa_record_resource_test.go b/azurerm/internal/services/dns/dns_aaaa_record_resource_test.go new file mode 100644 index 000000000000..a8caf317c6c8 --- /dev/null +++ b/azurerm/internal/services/dns/dns_aaaa_record_resource_test.go @@ -0,0 +1,492 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsAAAARecordResource struct { +} + +func TestAccDnsAAAARecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsAAAARecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_aaaa_record"), + }, + }) +} + +func TestAccDnsAAAARecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccDnsAAAARecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsAAAARecord_withAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + targetResourceName := "azurerm_public_ip.test" + targetResourceName2 := "azurerm_public_ip.test2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withAlias(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + ), + }, + { + Config: r.withAliasUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName2, "id"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsAAAARecord_RecordsToAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + targetResourceName := "azurerm_public_ip.test" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AliasToRecordsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.AliasToRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + resource.TestCheckNoResourceAttr(data.ResourceName, "records"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsAaaaRecord_AliasToRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + targetResourceName := "azurerm_public_ip.test" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AliasToRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + ), + }, + { + Config: r.AliasToRecordsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + check.That(data.ResourceName).Key("target_resource_id").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsAAAARecord_uncompressed(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") + r := DnsAAAARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.uncompressed(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + { + Config: r.uncompressed(data), // just use the same for updating + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsAAAARecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AaaaRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.AAAAName, dns.AAAA) + if err != nil { + return nil, fmt.Errorf("retrieving DNS AAAA record %s (resource group: %s): %v", id.AAAAName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsAAAARecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) requiresImport(data acceptance.TestData) string { + template := DnsAAAARecordResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dns_aaaa_record" "import" { + name = azurerm_dns_aaaa_record.test.name + resource_group_name = azurerm_dns_aaaa_record.test.resource_group_name + zone_name = azurerm_dns_aaaa_record.test.zone_name + ttl = 300 + records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] +} +`, template) +} + +func (DnsAAAARecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006", "::1"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) withAlias(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_public_ip" "test" { + name = "mypublicip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + ip_version = "IPv6" +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myaaaarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_public_ip.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) withAliasUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_public_ip" "test2" { + name = "mypublicip%d2" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + ip_version = "IPv6" +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myaaaarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_public_ip.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) AliasToRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_public_ip" "test" { + name = "mypublicip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + ip_version = "IPv6" +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_public_ip.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) AliasToRecordsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["3a62:353:8885:293c:a218:45cc:9ee9:4e27", "3a62:353:8885:293c:a218:45cc:9ee9:4e28"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsAAAARecordResource) uncompressed(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_aaaa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["2607:f8b0:4005:0800:0000:0000:0000:1003", "2201:1234:1234::1"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_caa_record_resource.go b/azurerm/internal/services/dns/dns_caa_record_resource.go index a5ddf687166e..d603f29625f3 100644 --- a/azurerm/internal/services/dns/dns_caa_record_resource.go +++ b/azurerm/internal/services/dns/dns_caa_record_resource.go @@ -21,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsCaaRecord() *schema.Resource { +func resourceDnsCaaRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsCaaRecordCreateUpdate, - Read: resourceArmDnsCaaRecordRead, - Update: resourceArmDnsCaaRecordCreateUpdate, - Delete: resourceArmDnsCaaRecordDelete, + Create: resourceDnsCaaRecordCreateUpdate, + Read: resourceDnsCaaRecordRead, + Update: resourceDnsCaaRecordCreateUpdate, + Delete: resourceDnsCaaRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -36,7 +36,7 @@ func resourceArmDnsCaaRecord() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsCaaRecordID(id) + _, err := parse.CaaRecordID(id) return err }), @@ -81,7 +81,7 @@ func resourceArmDnsCaaRecord() *schema.Resource { }, }, }, - Set: resourceArmDnsCaaRecordHash, + Set: resourceDnsCaaRecordHash, }, "ttl": { @@ -99,7 +99,7 @@ func resourceArmDnsCaaRecord() *schema.Resource { } } -func resourceArmDnsCaaRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsCaaRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -150,31 +150,31 @@ func resourceArmDnsCaaRecordCreateUpdate(d *schema.ResourceData, meta interface{ d.SetId(*resp.ID) - return resourceArmDnsCaaRecordRead(d, meta) + return resourceDnsCaaRecordRead(d, meta) } -func resourceArmDnsCaaRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsCaaRecordRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsCaaRecordID(d.Id()) + id, err := parse.CaaRecordID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CAA) + resp, err := client.Get(ctx, id.ResourceGroup, id.DnszoneName, id.CAAName, dns.CAA) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS CAA record %s: %v", id.Name, err) + return fmt.Errorf("Error reading DNS CAA record %s: %v", id.CAAName, err) } - d.Set("name", id.Name) + d.Set("name", id.CAAName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -185,19 +185,19 @@ func resourceArmDnsCaaRecordRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsCaaRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsCaaRecordDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsCaaRecordID(d.Id()) + id, err := parse.CaaRecordID(d.Id()) if err != nil { return err } - resp, err := client.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CAA, "") + resp, err := client.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.CAAName, dns.CAA, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS CAA Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS CAA Record %s: %+v", id.CAAName, err) } return nil @@ -241,7 +241,7 @@ func expandAzureRmDnsCaaRecords(d *schema.ResourceData) *[]dns.CaaRecord { return &records } -func resourceArmDnsCaaRecordHash(v interface{}) int { +func resourceDnsCaaRecordHash(v interface{}) int { var buf bytes.Buffer if m, ok := v.(map[string]interface{}); ok { diff --git a/azurerm/internal/services/dns/dns_caa_record_resource_test.go b/azurerm/internal/services/dns/dns_caa_record_resource_test.go new file mode 100644 index 000000000000..394ae29ac544 --- /dev/null +++ b/azurerm/internal/services/dns/dns_caa_record_resource_test.go @@ -0,0 +1,337 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsCaaRecordResource struct { +} + +func TestAccDnsCaaRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") + r := DnsCaaRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsCaaRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") + r := DnsCaaRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_caa_record"), + }, + }) +} + +func TestAccDnsCaaRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") + r := DnsCaaRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("4"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("5"), + ), + }, + }) +} + +func TestAccDnsCaaRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") + r := DnsCaaRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsCaaRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CaaRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.CAAName, dns.CAA) + if err != nil { + return nil, fmt.Errorf("retrieving DNS CAA record %s (resource group: %s): %v", id.CAAName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsCaaRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_caa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + flags = 0 + tag = "issue" + value = "example.com" + } + + record { + flags = 0 + tag = "issue" + value = "example.net" + } + + record { + flags = 1 + tag = "issuewild" + value = ";" + } + + record { + flags = 0 + tag = "iodef" + value = "mailto:terraform@nonexist.tld" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCaaRecordResource) requiresImport(data acceptance.TestData) string { + template := DnsCaaRecordResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dns_caa_record" "import" { + name = azurerm_dns_caa_record.test.name + resource_group_name = azurerm_dns_caa_record.test.resource_group_name + zone_name = azurerm_dns_caa_record.test.zone_name + ttl = 300 + + record { + flags = 0 + tag = "issue" + value = "example.com" + } + + record { + flags = 0 + tag = "issue" + value = "example.net" + } + + record { + flags = 1 + tag = "issuewild" + value = ";" + } + + record { + flags = 0 + tag = "iodef" + value = "mailto:terraform@nonexist.tld" + } +} +`, template) +} + +func (DnsCaaRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_caa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + flags = 0 + tag = "issue" + value = "example.com" + } + + record { + flags = 0 + tag = "issue" + value = "example.net" + } + + record { + flags = 1 + tag = "issuewild" + value = ";" + } + + record { + flags = 0 + tag = "iodef" + value = "mailto:terraform@nonexist.tld" + } + + record { + flags = 0 + tag = "issue" + value = "letsencrypt.org" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCaaRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_caa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + flags = 0 + tag = "issue" + value = "example.net" + } + + record { + flags = 1 + tag = "issuewild" + value = ";" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCaaRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_caa_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + flags = 0 + tag = "issue" + value = "example.net" + } + + record { + flags = 1 + tag = "issuewild" + value = ";" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_cname_record_resource.go b/azurerm/internal/services/dns/dns_cname_record_resource.go index 26ae7386d368..e6c24c58ecce 100644 --- a/azurerm/internal/services/dns/dns_cname_record_resource.go +++ b/azurerm/internal/services/dns/dns_cname_record_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsCNameRecord() *schema.Resource { +func resourceDnsCNameRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsCNameRecordCreateUpdate, - Read: resourceArmDnsCNameRecordRead, - Update: resourceArmDnsCNameRecordCreateUpdate, - Delete: resourceArmDnsCNameRecordDelete, + Create: resourceDnsCNameRecordCreateUpdate, + Read: resourceDnsCNameRecordRead, + Update: resourceDnsCNameRecordCreateUpdate, + Delete: resourceDnsCNameRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -32,7 +32,7 @@ func resourceArmDnsCNameRecord() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsCNameRecordID(id) + _, err := parse.CnameRecordID(id) return err }), @@ -78,7 +78,7 @@ func resourceArmDnsCNameRecord() *schema.Resource { } } -func resourceArmDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -145,31 +145,31 @@ func resourceArmDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta interfac d.SetId(*resp.ID) - return resourceArmDnsCNameRecordRead(d, meta) + return resourceDnsCNameRecordRead(d, meta) } -func resourceArmDnsCNameRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsCNameRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsCNameRecordID(d.Id()) + id, err := parse.CnameRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CNAME) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.CNAMEName, dns.CNAME) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving CNAME Record %s (DNS Zone %q / Resource Group %q): %+v", id.Name, id.ZoneName, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving CNAME Record %s (DNS Zone %q / Resource Group %q): %+v", id.CNAMEName, id.DnszoneName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.CNAMEName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("fqdn", resp.Fqdn) d.Set("ttl", resp.TTL) @@ -191,19 +191,19 @@ func resourceArmDnsCNameRecordRead(d *schema.ResourceData, meta interface{}) err return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsCNameRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsCNameRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsCNameRecordID(d.Id()) + id, err := parse.CnameRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CNAME, "") + resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.CNAMEName, dns.CNAME, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting CNAME Record %q (DNS Zone %q / Resource Group %q): %+v", id.Name, id.ZoneName, id.ResourceGroup, err) + return fmt.Errorf("Error deleting CNAME Record %q (DNS Zone %q / Resource Group %q): %+v", id.CNAMEName, id.DnszoneName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/dns/dns_cname_record_resource_test.go b/azurerm/internal/services/dns/dns_cname_record_resource_test.go new file mode 100644 index 000000000000..9764c1e3525a --- /dev/null +++ b/azurerm/internal/services/dns/dns_cname_record_resource_test.go @@ -0,0 +1,480 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsCNameRecordResource struct { +} + +func TestAccDnsCNameRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsCNameRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_cname_record"), + }, + }) +} + +func TestAccDnsCNameRecord_subdomain(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subdomain(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record").HasValue("test.contoso.com"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsCNameRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccDnsCNameRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsCNameRecord_withAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + targetResourceName := "azurerm_dns_cname_record.target" + targetResourceName2 := "azurerm_dns_cname_record.target2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withAlias(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + ), + }, + { + Config: r.withAliasUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName2, "id"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsCNameRecord_RecordToAlias(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + targetResourceName := "azurerm_dns_cname_record.target2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AliasToRecordUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.AliasToRecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + check.That(data.ResourceName).Key("record").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMDnsCNameRecord_AliasToRecord(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") + r := DnsCNameRecordResource{} + targetResourceName := "azurerm_dns_cname_record.target2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AliasToRecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), + ), + }, + { + Config: r.AliasToRecordUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("target_resource_id").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func (DnsCNameRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CnameRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.CNAMEName, dns.CNAME) + if err != nil { + return nil, fmt.Errorf("retrieving DNS CNAME record %s (resource group: %s): %v", id.CNAMEName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsCNameRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r DnsCNameRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dns_cname_record" "import" { + name = azurerm_dns_cname_record.test.name + resource_group_name = azurerm_dns_cname_record.test.resource_group_name + zone_name = azurerm_dns_cname_record.test.zone_name + ttl = 300 + record = "contoso.com" +} +`, r.basic(data)) +} + +func (DnsCNameRecordResource) subdomain(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "test.contoso.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.co.uk" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.com" + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.com" + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) withAlias(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "target" { + name = "mycnametarget%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.com" +} + +resource "azurerm_dns_cname_record" "test" { + name = "mycnamerecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_dns_cname_record.target.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) withAliasUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "target2" { + name = "mycnametarget%d2" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.co.uk" +} + +resource "azurerm_dns_cname_record" "test" { + name = "mycnamerecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_dns_cname_record.target2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) AliasToRecord(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "target2" { + name = "mycnametarget%d2" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "contoso.co.uk" +} + +resource "azurerm_dns_cname_record" "test" { + name = "mycnamerecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + target_resource_id = azurerm_dns_cname_record.target2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DnsCNameRecordResource) AliasToRecordUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_cname_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + record = "1.2.3.4" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_mx_record_resource.go b/azurerm/internal/services/dns/dns_mx_record_resource.go index 51634d52474c..cdf7d68f4fd0 100644 --- a/azurerm/internal/services/dns/dns_mx_record_resource.go +++ b/azurerm/internal/services/dns/dns_mx_record_resource.go @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsMxRecord() *schema.Resource { +func resourceDnsMxRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsMxRecordCreateUpdate, - Read: resourceArmDnsMxRecordRead, - Update: resourceArmDnsMxRecordCreateUpdate, - Delete: resourceArmDnsMxRecordDelete, + Create: resourceDnsMxRecordCreateUpdate, + Read: resourceDnsMxRecordRead, + Update: resourceDnsMxRecordCreateUpdate, + Delete: resourceDnsMxRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -35,7 +35,7 @@ func resourceArmDnsMxRecord() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsMxRecordID(id) + _, err := parse.MxRecordID(id) return err }), @@ -71,7 +71,7 @@ func resourceArmDnsMxRecord() *schema.Resource { }, }, }, - Set: resourceArmDnsMxRecordHash, + Set: resourceDnsMxRecordHash, }, "ttl": { @@ -89,7 +89,7 @@ func resourceArmDnsMxRecord() *schema.Resource { } } -func resourceArmDnsMxRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsMxRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -138,31 +138,31 @@ func resourceArmDnsMxRecordCreateUpdate(d *schema.ResourceData, meta interface{} d.SetId(*resp.ID) - return resourceArmDnsMxRecordRead(d, meta) + return resourceDnsMxRecordRead(d, meta) } -func resourceArmDnsMxRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsMxRecordRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsMxRecordID(d.Id()) + id, err := parse.MxRecordID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.MX) + resp, err := client.Get(ctx, id.ResourceGroup, id.DnszoneName, id.MXName, dns.MX) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS MX record %s: %v", id.Name, err) + return fmt.Errorf("Error reading DNS MX record %s: %v", id.MXName, err) } - d.Set("name", id.Name) + d.Set("name", id.MXName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -173,19 +173,19 @@ func resourceArmDnsMxRecordRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsMxRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsMxRecordDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsMxRecordID(d.Id()) + id, err := parse.MxRecordID(d.Id()) if err != nil { return err } - resp, err := client.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.MX, "") + resp, err := client.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.MXName, dns.MX, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS MX Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS MX Record %s: %+v", id.MXName, err) } return nil @@ -234,7 +234,7 @@ func expandAzureRmDnsMxRecords(d *schema.ResourceData) *[]dns.MxRecord { return &records } -func resourceArmDnsMxRecordHash(v interface{}) int { +func resourceDnsMxRecordHash(v interface{}) int { var buf bytes.Buffer if m, ok := v.(map[string]interface{}); ok { diff --git a/azurerm/internal/services/dns/dns_mx_record_resource_test.go b/azurerm/internal/services/dns/dns_mx_record_resource_test.go new file mode 100644 index 000000000000..322f7d949378 --- /dev/null +++ b/azurerm/internal/services/dns/dns_mx_record_resource_test.go @@ -0,0 +1,339 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsMxRecordResource struct { +} + +func TestAccDnsMxRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") + r := DnsMxRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsMxRecord_rootrecord(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") + r := DnsMxRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.rootrecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsMxRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") + r := DnsMxRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_mx_record"), + }, + }) +} + +func TestAccDnsMxRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") + r := DnsMxRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("3"), + ), + }, + }) +} + +func TestAccAzureRMDnsMxRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") + r := DnsMxRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsMxRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.MxRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.MXName, dns.MX) + if err != nil { + return nil, fmt.Errorf("retrieving DNS MX record %s (resource group: %s): %v", id.MXName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsMxRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_mx_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + preference = "10" + exchange = "mail1.contoso.com" + } + + record { + preference = "20" + exchange = "mail2.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsMxRecordResource) rootrecord(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_mx_record" "test" { + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + preference = "10" + exchange = "mail1.contoso.com" + } + + record { + preference = "20" + exchange = "mail2.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r DnsMxRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dns_mx_record" "import" { + name = azurerm_dns_mx_record.test.name + resource_group_name = azurerm_dns_mx_record.test.resource_group_name + zone_name = azurerm_dns_mx_record.test.zone_name + ttl = 300 + + record { + preference = "10" + exchange = "mail1.contoso.com" + } + + record { + preference = "20" + exchange = "mail2.contoso.com" + } +} +`, r.basic(data)) +} + +func (DnsMxRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_mx_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + preference = "10" + exchange = "mail1.contoso.com" + } + + record { + preference = "20" + exchange = "mail2.contoso.com" + } + + record { + preference = "50" + exchange = "mail3.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsMxRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_mx_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + preference = "10" + exchange = "mail1.contoso.com" + } + + record { + preference = "20" + exchange = "mail2.contoso.com" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsMxRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_mx_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + preference = "10" + exchange = "mail1.contoso.com" + } + + record { + preference = "20" + exchange = "mail2.contoso.com" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_ns_record_resource.go b/azurerm/internal/services/dns/dns_ns_record_resource.go index 2a76cb741b1e..7af7935770a9 100644 --- a/azurerm/internal/services/dns/dns_ns_record_resource.go +++ b/azurerm/internal/services/dns/dns_ns_record_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsNsRecord() *schema.Resource { +func resourceDnsNsRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsNsRecordCreate, - Read: resourceArmDnsNsRecordRead, - Update: resourceArmDnsNsRecordUpdate, - Delete: resourceArmDnsNsRecordDelete, + Create: resourceDnsNsRecordCreate, + Read: resourceDnsNsRecordRead, + Update: resourceDnsNsRecordUpdate, + Delete: resourceDnsNsRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -31,7 +31,7 @@ func resourceArmDnsNsRecord() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsNsRecordID(id) + _, err := parse.NsRecordID(id) return err }), @@ -73,7 +73,7 @@ func resourceArmDnsNsRecord() *schema.Resource { } } -func resourceArmDnsNsRecordCreate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsNsRecordCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -125,10 +125,10 @@ func resourceArmDnsNsRecordCreate(d *schema.ResourceData, meta interface{}) erro d.SetId(*resp.ID) - return resourceArmDnsNsRecordRead(d, meta) + return resourceDnsNsRecordRead(d, meta) } -func resourceArmDnsNsRecordUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsNsRecordUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -172,31 +172,31 @@ func resourceArmDnsNsRecordUpdate(d *schema.ResourceData, meta interface{}) erro return fmt.Errorf("Error updating DNS NS Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) } - return resourceArmDnsNsRecordRead(d, meta) + return resourceDnsNsRecordRead(d, meta) } -func resourceArmDnsNsRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsNsRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsNsRecordID(d.Id()) + id, err := parse.NsRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.NS) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.NSName, dns.NS) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS NS record %s: %+v", id.Name, err) + return fmt.Errorf("Error reading DNS NS record %s: %+v", id.NSName, err) } - d.Set("name", id.Name) + d.Set("name", id.NSName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -210,19 +210,19 @@ func resourceArmDnsNsRecordRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsNsRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsNsRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsNsRecordID(d.Id()) + id, err := parse.NsRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.NS, "") + resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.NSName, dns.NS, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS NS Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS NS Record %s: %+v", id.NSName, err) } return nil diff --git a/azurerm/internal/services/dns/dns_ns_record_resource_test.go b/azurerm/internal/services/dns/dns_ns_record_resource_test.go new file mode 100644 index 000000000000..c3ae45264139 --- /dev/null +++ b/azurerm/internal/services/dns/dns_ns_record_resource_test.go @@ -0,0 +1,245 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsNsRecordResource struct { +} + +func TestAccDnsNsRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") + r := DnsNsRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsNsRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") + r := DnsNsRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_ns_record"), + }, + }) +} + +func TestAccDnsNsRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") + r := DnsNsRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccDnsNsRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") + r := DnsNsRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsNsRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NsRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.NSName, dns.NS) + if err != nil { + return nil, fmt.Errorf("retrieving DNS NS record %s (resource group: %s): %v", id.NSName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsNsRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ns_record" "test" { + name = "mynsrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + records = ["ns1.contoso.com", "ns2.contoso.com"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsNsRecordResource) requiresImport(data acceptance.TestData) string { + template := DnsNsRecordResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dns_ns_record" "import" { + name = azurerm_dns_ns_record.test.name + resource_group_name = azurerm_dns_ns_record.test.resource_group_name + zone_name = azurerm_dns_ns_record.test.zone_name + ttl = 300 + + records = ["ns1.contoso.com", "ns2.contoso.com"] +} +`, template) +} + +func (DnsNsRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ns_record" "test" { + name = "mynsrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + records = ["ns1.contoso.com", "ns2.contoso.com", "ns3.contoso.com"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsNsRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ns_record" "test" { + name = "mynsrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + records = ["ns1.contoso.com", "ns2.contoso.com"] + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsNsRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ns_record" "test" { + name = "mynsrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + records = ["ns1.contoso.com", "ns2.contoso.com"] + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_ptr_record_resource.go b/azurerm/internal/services/dns/dns_ptr_record_resource.go index 021816d964a1..2ed8106157ac 100644 --- a/azurerm/internal/services/dns/dns_ptr_record_resource.go +++ b/azurerm/internal/services/dns/dns_ptr_record_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsPtrRecord() *schema.Resource { +func resourceDnsPtrRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsPtrRecordCreateUpdate, - Read: resourceArmDnsPtrRecordRead, - Update: resourceArmDnsPtrRecordCreateUpdate, - Delete: resourceArmDnsPtrRecordDelete, + Create: resourceDnsPtrRecordCreateUpdate, + Read: resourceDnsPtrRecordRead, + Update: resourceDnsPtrRecordCreateUpdate, + Delete: resourceDnsPtrRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -32,7 +32,7 @@ func resourceArmDnsPtrRecord() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsPtrRecordID(id) + _, err := parse.PtrRecordID(id) return err }), Schema: map[string]*schema.Schema{ @@ -71,7 +71,7 @@ func resourceArmDnsPtrRecord() *schema.Resource { } } -func resourceArmDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -121,33 +121,33 @@ func resourceArmDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta interface{ d.SetId(*resp.ID) - return resourceArmDnsPtrRecordRead(d, meta) + return resourceDnsPtrRecordRead(d, meta) } -func resourceArmDnsPtrRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsPtrRecordRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client) dnsClient := client.Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsPtrRecordID(d.Id()) + id, err := parse.PtrRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.PTR) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.PTRName, dns.PTR) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS PTR record %s: %+v", id.Name, err) + return fmt.Errorf("Error reading DNS PTR record %s: %+v", id.PTRName, err) } - d.Set("name", id.Name) + d.Set("name", id.PTRName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -157,24 +157,24 @@ func resourceArmDnsPtrRecordRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsPtrRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsPtrRecordDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client) dnsClient := client.Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsPtrRecordID(d.Id()) + id, err := parse.PtrRecordID(d.Id()) if err != nil { return err } - resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.PTR, "") + resp, err := dnsClient.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.PTRName, dns.PTR, "") if err != nil { if resp.StatusCode == http.StatusNotFound { return nil } - return fmt.Errorf("Error deleting DNS PTR Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS PTR Record %s: %+v", id.PTRName, err) } return nil diff --git a/azurerm/internal/services/dns/dns_ptr_record_resource_test.go b/azurerm/internal/services/dns/dns_ptr_record_resource_test.go new file mode 100644 index 000000000000..709a9c9c367b --- /dev/null +++ b/azurerm/internal/services/dns/dns_ptr_record_resource_test.go @@ -0,0 +1,241 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsPtrRecordResource struct { +} + +func TestAccDnsPtrRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") + r := DnsPtrRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsPtrRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") + r := DnsPtrRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_ptr_record"), + }, + }) +} + +func TestAccDnsPtrRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") + r := DnsPtrRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccDnsPtrRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") + r := DnsPtrRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsPtrRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.PtrRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.PTRName, dns.PTR) + if err != nil { + return nil, fmt.Errorf("retrieving DNS PTR record %s (resource group: %s): %v", id.PTRName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsPtrRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ptr_record" "test" { + name = "testptrrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["hashicorp.com", "microsoft.com"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r DnsPtrRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dns_ptr_record" "import" { + name = azurerm_dns_ptr_record.test.name + resource_group_name = azurerm_dns_ptr_record.test.resource_group_name + zone_name = azurerm_dns_ptr_record.test.zone_name + ttl = 300 + records = ["hashicorp.com", "microsoft.com"] +} +`, r.basic(data)) +} + +func (DnsPtrRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ptr_record" "test" { + name = "testptrrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["hashicorp.com", "microsoft.com", "reddit.com"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsPtrRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ptr_record" "test" { + name = "testptrrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["hashicorp.com", "microsoft.com"] + + tags = { + environment = "Dev" + cost_center = "Ops" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsPtrRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_ptr_record" "test" { + name = "testptrrecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + records = ["hashicorp.com", "microsoft.com"] + + tags = { + environment = "Stage" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_srv_record_resource.go b/azurerm/internal/services/dns/dns_srv_record_resource.go index f69a6a8805f7..c4580d9ee31f 100644 --- a/azurerm/internal/services/dns/dns_srv_record_resource.go +++ b/azurerm/internal/services/dns/dns_srv_record_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsSrvRecord() *schema.Resource { +func resourceDnsSrvRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsSrvRecordCreateUpdate, - Read: resourceArmDnsSrvRecordRead, - Update: resourceArmDnsSrvRecordCreateUpdate, - Delete: resourceArmDnsSrvRecordDelete, + Create: resourceDnsSrvRecordCreateUpdate, + Read: resourceDnsSrvRecordRead, + Update: resourceDnsSrvRecordCreateUpdate, + Delete: resourceDnsSrvRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -33,7 +33,7 @@ func resourceArmDnsSrvRecord() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsSrvRecordID(id) + _, err := parse.SrvRecordID(id) return err }), Schema: map[string]*schema.Schema{ @@ -76,7 +76,7 @@ func resourceArmDnsSrvRecord() *schema.Resource { }, }, }, - Set: resourceArmDnsSrvRecordHash, + Set: resourceDnsSrvRecordHash, }, "ttl": { @@ -94,7 +94,7 @@ func resourceArmDnsSrvRecord() *schema.Resource { } } -func resourceArmDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -145,31 +145,31 @@ func resourceArmDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta interface{ d.SetId(*resp.ID) - return resourceArmDnsSrvRecordRead(d, meta) + return resourceDnsSrvRecordRead(d, meta) } -func resourceArmDnsSrvRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsSrvRecordRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsSrvRecordID(d.Id()) + id, err := parse.SrvRecordID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.SRV) + resp, err := client.Get(ctx, id.ResourceGroup, id.DnszoneName, id.SRVName, dns.SRV) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS SRV record %s: %v", id.Name, err) + return fmt.Errorf("Error reading DNS SRV record %s: %v", id.SRVName, err) } - d.Set("name", id.Name) + d.Set("name", id.SRVName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -179,19 +179,19 @@ func resourceArmDnsSrvRecordRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsSrvRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsSrvRecordDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsSrvRecordID(d.Id()) + id, err := parse.SrvRecordID(d.Id()) if err != nil { return err } - resp, err := client.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.SRV, "") + resp, err := client.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.SRVName, dns.SRV, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS SRV Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS SRV Record %s: %+v", id.SRVName, err) } return nil @@ -238,7 +238,7 @@ func expandAzureRmDnsSrvRecords(d *schema.ResourceData) *[]dns.SrvRecord { return &records } -func resourceArmDnsSrvRecordHash(v interface{}) int { +func resourceDnsSrvRecordHash(v interface{}) int { var buf bytes.Buffer if m, ok := v.(map[string]interface{}); ok { diff --git a/azurerm/internal/services/dns/dns_srv_record_resource_test.go b/azurerm/internal/services/dns/dns_srv_record_resource_test.go new file mode 100644 index 000000000000..fa4afe1a43c3 --- /dev/null +++ b/azurerm/internal/services/dns/dns_srv_record_resource_test.go @@ -0,0 +1,311 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsSrvRecordResource struct { +} + +func TestAccDnsSrvRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") + r := DnsSrvRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsSrvRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") + r := DnsSrvRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_srv_record"), + }, + }) +} + +func TestAccDnsSrvRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") + r := DnsSrvRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("3"), + ), + }, + }) +} + +func TestAccDnsSrvRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") + r := DnsSrvRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsSrvRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SrvRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.SRVName, dns.SRV) + if err != nil { + return nil, fmt.Errorf("retrieving DNS SRV record %s (resource group: %s): %v", id.SRVName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsSrvRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_srv_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + + record { + priority = 2 + weight = 25 + port = 8080 + target = "target2.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r DnsSrvRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dns_srv_record" "import" { + name = azurerm_dns_srv_record.test.name + resource_group_name = azurerm_dns_srv_record.test.resource_group_name + zone_name = azurerm_dns_srv_record.test.zone_name + ttl = 300 + + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + + record { + priority = 2 + weight = 25 + port = 8080 + target = "target2.contoso.com" + } +} +`, r.basic(data)) +} + +func (DnsSrvRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_srv_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + + record { + priority = 2 + weight = 25 + port = 8080 + target = "target2.contoso.com" + } + + record { + priority = 3 + weight = 100 + port = 8080 + target = "target3.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsSrvRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_srv_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + + record { + priority = 2 + weight = 25 + port = 8080 + target = "target2.contoso.com" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsSrvRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_srv_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + + record { + priority = 2 + weight = 25 + port = 8080 + target = "target2.contoso.com" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_txt_record_resource.go b/azurerm/internal/services/dns/dns_txt_record_resource.go index 46537d0161cd..d84f04feb8aa 100644 --- a/azurerm/internal/services/dns/dns_txt_record_resource.go +++ b/azurerm/internal/services/dns/dns_txt_record_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsTxtRecord() *schema.Resource { +func resourceDnsTxtRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsTxtRecordCreateUpdate, - Read: resourceArmDnsTxtRecordRead, - Update: resourceArmDnsTxtRecordCreateUpdate, - Delete: resourceArmDnsTxtRecordDelete, + Create: resourceDnsTxtRecordCreateUpdate, + Read: resourceDnsTxtRecordRead, + Update: resourceDnsTxtRecordCreateUpdate, + Delete: resourceDnsTxtRecordDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -33,7 +33,7 @@ func resourceArmDnsTxtRecord() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DnsTxtRecordID(id) + _, err := parse.TxtRecordID(id) return err }), Schema: map[string]*schema.Schema{ @@ -79,7 +79,7 @@ func resourceArmDnsTxtRecord() *schema.Resource { } } -func resourceArmDnsTxtRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsTxtRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -130,31 +130,31 @@ func resourceArmDnsTxtRecordCreateUpdate(d *schema.ResourceData, meta interface{ d.SetId(*resp.ID) - return resourceArmDnsTxtRecordRead(d, meta) + return resourceDnsTxtRecordRead(d, meta) } -func resourceArmDnsTxtRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsTxtRecordRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsTxtRecordID(d.Id()) + id, err := parse.TxtRecordID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.TXT) + resp, err := client.Get(ctx, id.ResourceGroup, id.DnszoneName, id.TXTName, dns.TXT) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading DNS TXT record %s: %+v", id.Name, err) + return fmt.Errorf("Error reading DNS TXT record %s: %+v", id.TXTName, err) } - d.Set("name", id.Name) + d.Set("name", id.TXTName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("zone_name", id.ZoneName) + d.Set("zone_name", id.DnszoneName) d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -164,19 +164,19 @@ func resourceArmDnsTxtRecordRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmDnsTxtRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsTxtRecordDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.DnsTxtRecordID(d.Id()) + id, err := parse.TxtRecordID(d.Id()) if err != nil { return err } - resp, err := client.Delete(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.TXT, "") + resp, err := client.Delete(ctx, id.ResourceGroup, id.DnszoneName, id.TXTName, dns.TXT, "") if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting DNS TXT Record %s: %+v", id.Name, err) + return fmt.Errorf("Error deleting DNS TXT Record %s: %+v", id.TXTName, err) } return nil diff --git a/azurerm/internal/services/dns/dns_txt_record_resource_test.go b/azurerm/internal/services/dns/dns_txt_record_resource_test.go new file mode 100644 index 000000000000..1dab842eb3c4 --- /dev/null +++ b/azurerm/internal/services/dns/dns_txt_record_resource_test.go @@ -0,0 +1,278 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsTxtRecordResource struct { +} + +func TestAccDnsTxtRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") + r := DnsTxtRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsTxtRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") + r := DnsTxtRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_txt_record"), + }, + }) +} + +func TestAccDnsTxtRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") + r := DnsTxtRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("3"), + ), + }, + }) +} + +func TestAccDnsTxtRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") + r := DnsTxtRecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (DnsTxtRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.TxtRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.DnszoneName, id.TXTName, dns.TXT) + if err != nil { + return nil, fmt.Errorf("retrieving DNS TXT record %s (resource group: %s): %v", id.TXTName, id.ResourceGroup, err) + } + + return utils.Bool(resp.RecordSetProperties != nil), nil +} + +func (DnsTxtRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_txt_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r DnsTxtRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dns_txt_record" "import" { + name = azurerm_dns_txt_record.test.name + resource_group_name = azurerm_dns_txt_record.test.resource_group_name + zone_name = azurerm_dns_txt_record.test.zone_name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } +} +`, r.basic(data)) +} + +func (DnsTxtRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_txt_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } + + record { + value = "A wild 3rd record appears" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsTxtRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_txt_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "Another test txt string" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (DnsTxtRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_dns_txt_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "Another test txt string" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_zone_data_source.go b/azurerm/internal/services/dns/dns_zone_data_source.go index 96b6f3383256..a7b6ab6ad54a 100644 --- a/azurerm/internal/services/dns/dns_zone_data_source.go +++ b/azurerm/internal/services/dns/dns_zone_data_source.go @@ -14,9 +14,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmDnsZone() *schema.Resource { +func dataSourceDnsZone() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmDnsZoneRead, + Read: dataSourceDnsZoneRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -56,7 +56,7 @@ func dataSourceArmDnsZone() *schema.Resource { } } -func dataSourceArmDnsZoneRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceDnsZoneRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.ZonesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/dns/dns_zone_data_source_test.go b/azurerm/internal/services/dns/dns_zone_data_source_test.go new file mode 100644 index 000000000000..833467ae2f9a --- /dev/null +++ b/azurerm/internal/services/dns/dns_zone_data_source_test.go @@ -0,0 +1,130 @@ +package dns_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type AzureRMDNSZoneDataSource struct { +} + +func TestAccAzureRMDNSZoneDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_dns_zone", "test") + r := AzureRMDNSZoneDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func TestAccAzureRMDNSZoneDataSource_tags(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_dns_zone", "test") + r := AzureRMDNSZoneDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.tags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + }) +} + +func TestAccAzureRMDNSZoneDataSource_withoutResourceGroupName(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_dns_zone", "test") + r := AzureRMDNSZoneDataSource{} + // resource group of DNS zone is always small case + resourceGroupName := fmt.Sprintf("acctestrg-%d", data.RandomInteger) + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.onlyName(data, resourceGroupName), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("resource_group_name").HasValue(resourceGroupName), + ), + }, + }) +} + +func (AzureRMDNSZoneDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +data "azurerm_dns_zone" "test" { + name = azurerm_dns_zone.test.name + resource_group_name = azurerm_dns_zone.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AzureRMDNSZoneDataSource) tags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + tags = { + hello = "world" + } +} + +data "azurerm_dns_zone" "test" { + name = azurerm_dns_zone.test.name + resource_group_name = azurerm_dns_zone.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (AzureRMDNSZoneDataSource) onlyName(data acceptance.TestData, resourceGroupName string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "%s" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +data "azurerm_dns_zone" "test" { + name = azurerm_dns_zone.test.name +} +`, resourceGroupName, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/dns_zone_resource.go b/azurerm/internal/services/dns/dns_zone_resource.go index 1b622a47586d..1ef11f51ad4b 100644 --- a/azurerm/internal/services/dns/dns_zone_resource.go +++ b/azurerm/internal/services/dns/dns_zone_resource.go @@ -2,27 +2,30 @@ package dns import ( "fmt" + "strings" "time" "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmDnsZone() *schema.Resource { +func resourceDnsZone() *schema.Resource { return &schema.Resource{ - Create: resourceArmDnsZoneCreateUpdate, - Read: resourceArmDnsZoneRead, - Update: resourceArmDnsZoneCreateUpdate, - Delete: resourceArmDnsZoneDelete, + Create: resourceDnsZoneCreateUpdate, + Read: resourceDnsZoneRead, + Update: resourceDnsZoneCreateUpdate, + Delete: resourceDnsZoneDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -61,13 +64,86 @@ func resourceArmDnsZone() *schema.Resource { Set: schema.HashString, }, + "soa_record": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "email": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DnsZoneSOARecordEmail, + }, + + "host_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "expire_time": { + Type: schema.TypeInt, + Optional: true, + Default: 2419200, + ValidateFunc: validation.IntAtLeast(0), + }, + + "minimum_ttl": { + Type: schema.TypeInt, + Optional: true, + Default: 300, + ValidateFunc: validation.IntAtLeast(0), + }, + + "refresh_time": { + Type: schema.TypeInt, + Optional: true, + Default: 3600, + ValidateFunc: validation.IntAtLeast(0), + }, + + "retry_time": { + Type: schema.TypeInt, + Optional: true, + Default: 300, + ValidateFunc: validation.IntAtLeast(0), + }, + + "serial_number": { + Type: schema.TypeInt, + Optional: true, + Default: 1, + ValidateFunc: validation.IntAtLeast(0), + }, + + "ttl": { + Type: schema.TypeInt, + Optional: true, + Default: 3600, + ValidateFunc: validation.IntBetween(0, 2147483647), + }, + + "tags": tags.Schema(), + + "fqdn": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + "tags": tags.Schema(), }, } } -func resourceArmDnsZoneCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceDnsZoneCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.ZonesClient + recordSetsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -106,17 +182,37 @@ func resourceArmDnsZoneCreateUpdate(d *schema.ResourceData, meta interface{}) er return fmt.Errorf("Error retrieving DNS Zone %q (Resource Group %q): %s", name, resGroup, err) } + if v, ok := d.GetOk("soa_record"); ok { + soaRecord := v.([]interface{})[0].(map[string]interface{}) + rsParameters := dns.RecordSet{ + RecordSetProperties: &dns.RecordSetProperties{ + TTL: utils.Int64(int64(soaRecord["ttl"].(int))), + Metadata: tags.Expand(soaRecord["tags"].(map[string]interface{})), + SoaRecord: expandArmDNSZoneSOARecord(soaRecord), + }, + } + + if len(name+strings.TrimSuffix(*rsParameters.RecordSetProperties.SoaRecord.Email, ".")) > 253 { + return fmt.Errorf("`email` which is concatenated with DNS Zone `name` cannot exceed 253 characters excluding a trailing period") + } + + if _, err := recordSetsClient.CreateOrUpdate(ctx, resGroup, name, "@", dns.SOA, rsParameters, etag, ifNoneMatch); err != nil { + return fmt.Errorf("creating/updating DNS SOA Record @ (Zone %q / Resource Group %q): %s", name, resGroup, err) + } + } + if resp.ID == nil { return fmt.Errorf("Cannot read DNS Zone %q (Resource Group %q) ID", name, resGroup) } d.SetId(*resp.ID) - return resourceArmDnsZoneRead(d, meta) + return resourceDnsZoneRead(d, meta) } -func resourceArmDnsZoneRead(d *schema.ResourceData, meta interface{}) error { +func resourceDnsZoneRead(d *schema.ResourceData, meta interface{}) error { zonesClient := meta.(*clients.Client).Dns.ZonesClient + recordSetsClient := meta.(*clients.Client).Dns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -148,10 +244,19 @@ func resourceArmDnsZoneRead(d *schema.ResourceData, meta interface{}) error { return err } + rsResp, err := recordSetsClient.Get(ctx, id.ResourceGroup, id.Name, "@", dns.SOA) + if err != nil { + return fmt.Errorf("reading DNS SOA record @: %v", err) + } + + if err := d.Set("soa_record", flattenArmDNSZoneSOARecord(&rsResp)); err != nil { + return fmt.Errorf("setting `soa_record`: %+v", err) + } + return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmDnsZoneDelete(d *schema.ResourceData, meta interface{}) error { +func resourceDnsZoneDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Dns.ZonesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -179,3 +284,88 @@ func resourceArmDnsZoneDelete(d *schema.ResourceData, meta interface{}) error { return nil } + +func expandArmDNSZoneSOARecord(input map[string]interface{}) *dns.SoaRecord { + return &dns.SoaRecord{ + Email: utils.String(input["email"].(string)), + Host: utils.String(input["host_name"].(string)), + ExpireTime: utils.Int64(int64(input["expire_time"].(int))), + MinimumTTL: utils.Int64(int64(input["minimum_ttl"].(int))), + RefreshTime: utils.Int64(int64(input["refresh_time"].(int))), + RetryTime: utils.Int64(int64(input["retry_time"].(int))), + SerialNumber: utils.Int64(int64(input["serial_number"].(int))), + } +} + +func flattenArmDNSZoneSOARecord(input *dns.RecordSet) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + ttl := 0 + if input.TTL != nil { + ttl = int(*input.TTL) + } + + metaData := make(map[string]interface{}) + if input.Metadata != nil { + metaData = tags.Flatten(input.Metadata) + } + + fqdn := "" + if input.Fqdn != nil { + fqdn = *input.Fqdn + } + + email := "" + hostName := "" + expireTime := 0 + minimumTTL := 0 + refreshTime := 0 + retryTime := 0 + serialNumber := 0 + if input.SoaRecord != nil { + if input.SoaRecord.Email != nil { + email = *input.SoaRecord.Email + } + + if input.SoaRecord.Host != nil { + hostName = *input.SoaRecord.Host + } + + if input.SoaRecord.ExpireTime != nil { + expireTime = int(*input.SoaRecord.ExpireTime) + } + + if input.SoaRecord.MinimumTTL != nil { + minimumTTL = int(*input.SoaRecord.MinimumTTL) + } + + if input.SoaRecord.RefreshTime != nil { + refreshTime = int(*input.SoaRecord.RefreshTime) + } + + if input.SoaRecord.RetryTime != nil { + retryTime = int(*input.SoaRecord.RetryTime) + } + + if input.SoaRecord.SerialNumber != nil { + serialNumber = int(*input.SoaRecord.SerialNumber) + } + } + + return []interface{}{ + map[string]interface{}{ + "email": email, + "host_name": hostName, + "expire_time": expireTime, + "minimum_ttl": minimumTTL, + "refresh_time": refreshTime, + "retry_time": retryTime, + "serial_number": serialNumber, + "ttl": ttl, + "tags": metaData, + "fqdn": fqdn, + }, + } +} diff --git a/azurerm/internal/services/dns/dns_zone_resource_test.go b/azurerm/internal/services/dns/dns_zone_resource_test.go new file mode 100644 index 000000000000..d6cd3e0f94ab --- /dev/null +++ b/azurerm/internal/services/dns/dns_zone_resource_test.go @@ -0,0 +1,248 @@ +package dns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DnsZoneResource struct { +} + +func TestAccDnsZone_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") + r := DnsZoneResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsZone_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") + r := DnsZoneResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_dns_zone"), + }, + }) +} + +func TestAccDnsZone_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") + r := DnsZoneResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDnsZone_withSOARecord(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") + r := DnsZoneResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withBasicSOARecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withCompletedSOARecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withBasicSOARecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (DnsZoneResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DnsZoneID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Dns.ZonesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving DNS zone %s (resource group: %s): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ZoneProperties != nil), nil +} + +func (DnsZoneResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (DnsZoneResource) requiresImport(data acceptance.TestData) string { + template := DnsZoneResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dns_zone" "import" { + name = azurerm_dns_zone.test.name + resource_group_name = azurerm_dns_zone.test.resource_group_name +} +`, template) +} + +func (DnsZoneResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (DnsZoneResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (DnsZoneResource) withBasicSOARecord(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-dns-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + soa_record { + email = "testemail.com" + host_name = "testhost.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (DnsZoneResource) withCompletedSOARecord(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-dns-%d" + location = "%s" +} + +resource "azurerm_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + soa_record { + email = "testemail.com" + host_name = "testhost.contoso.com" + expire_time = 2419200 + minimum_ttl = 200 + refresh_time = 2600 + retry_time = 200 + serial_number = 1 + ttl = 100 + + tags = { + ENv = "Test" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/dns/parse/a_record.go b/azurerm/internal/services/dns/parse/a_record.go new file mode 100644 index 000000000000..4c0f1474a7a4 --- /dev/null +++ b/azurerm/internal/services/dns/parse/a_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ARecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + AName string +} + +func NewARecordID(subscriptionId, resourceGroup, dnszoneName, aName string) ARecordId { + return ARecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + AName: aName, + } +} + +func (id ARecordId) String() string { + segments := []string{ + fmt.Sprintf("A Name %q", id.AName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "A Record", segmentsStr) +} + +func (id ARecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/A/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.AName) +} + +// ARecordID parses a ARecord ID into an ARecordId struct +func ARecordID(input string) (*ARecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ARecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.AName, err = id.PopSegment("A"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/a_record_test.go b/azurerm/internal/services/dns/parse/a_record_test.go new file mode 100644 index 000000000000..6a118e4e757e --- /dev/null +++ b/azurerm/internal/services/dns/parse/a_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ARecordId{} + +func TestARecordIDFormatter(t *testing.T) { + actual := NewARecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "eh1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/eh1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestARecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ARecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/eh1", + Expected: &ARecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + AName: "eh1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/A/EH1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ARecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.AName != v.Expected.AName { + t.Fatalf("Expected %q but got %q for AName", v.Expected.AName, actual.AName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/aaaa_record.go b/azurerm/internal/services/dns/parse/aaaa_record.go new file mode 100644 index 000000000000..01660c41c268 --- /dev/null +++ b/azurerm/internal/services/dns/parse/aaaa_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AaaaRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + AAAAName string +} + +func NewAaaaRecordID(subscriptionId, resourceGroup, dnszoneName, aAAAName string) AaaaRecordId { + return AaaaRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + AAAAName: aAAAName, + } +} + +func (id AaaaRecordId) String() string { + segments := []string{ + fmt.Sprintf("A A A A Name %q", id.AAAAName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Aaaa Record", segmentsStr) +} + +func (id AaaaRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/AAAA/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.AAAAName) +} + +// AaaaRecordID parses a AaaaRecord ID into an AaaaRecordId struct +func AaaaRecordID(input string) (*AaaaRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AaaaRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.AAAAName, err = id.PopSegment("AAAA"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/aaaa_record_test.go b/azurerm/internal/services/dns/parse/aaaa_record_test.go new file mode 100644 index 000000000000..4fce1f729c8a --- /dev/null +++ b/azurerm/internal/services/dns/parse/aaaa_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AaaaRecordId{} + +func TestAaaaRecordIDFormatter(t *testing.T) { + actual := NewAaaaRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "eheh1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/eheh1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAaaaRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AaaaRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/eheh1", + Expected: &AaaaRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + AAAAName: "eheh1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/AAAA/EHEH1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AaaaRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.AAAAName != v.Expected.AAAAName { + t.Fatalf("Expected %q but got %q for AAAAName", v.Expected.AAAAName, actual.AAAAName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/caa_record.go b/azurerm/internal/services/dns/parse/caa_record.go new file mode 100644 index 000000000000..f5e321781a80 --- /dev/null +++ b/azurerm/internal/services/dns/parse/caa_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CaaRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + CAAName string +} + +func NewCaaRecordID(subscriptionId, resourceGroup, dnszoneName, cAAName string) CaaRecordId { + return CaaRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + CAAName: cAAName, + } +} + +func (id CaaRecordId) String() string { + segments := []string{ + fmt.Sprintf("C A A Name %q", id.CAAName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Caa Record", segmentsStr) +} + +func (id CaaRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/CAA/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.CAAName) +} + +// CaaRecordID parses a CaaRecord ID into an CaaRecordId struct +func CaaRecordID(input string) (*CaaRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CaaRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.CAAName, err = id.PopSegment("CAA"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/caa_record_test.go b/azurerm/internal/services/dns/parse/caa_record_test.go new file mode 100644 index 000000000000..5e463c2200d8 --- /dev/null +++ b/azurerm/internal/services/dns/parse/caa_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CaaRecordId{} + +func TestCaaRecordIDFormatter(t *testing.T) { + actual := NewCaaRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "caa1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/caa1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCaaRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CaaRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing CAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for CAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/caa1", + Expected: &CaaRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + CAAName: "caa1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/CAA/CAA1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CaaRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.CAAName != v.Expected.CAAName { + t.Fatalf("Expected %q but got %q for CAAName", v.Expected.CAAName, actual.CAAName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/cname_record.go b/azurerm/internal/services/dns/parse/cname_record.go new file mode 100644 index 000000000000..9e63180a069b --- /dev/null +++ b/azurerm/internal/services/dns/parse/cname_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CnameRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + CNAMEName string +} + +func NewCnameRecordID(subscriptionId, resourceGroup, dnszoneName, cNAMEName string) CnameRecordId { + return CnameRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + CNAMEName: cNAMEName, + } +} + +func (id CnameRecordId) String() string { + segments := []string{ + fmt.Sprintf("C N A M E Name %q", id.CNAMEName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cname Record", segmentsStr) +} + +func (id CnameRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/CNAME/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.CNAMEName) +} + +// CnameRecordID parses a CnameRecord ID into an CnameRecordId struct +func CnameRecordID(input string) (*CnameRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CnameRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.CNAMEName, err = id.PopSegment("CNAME"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/cname_record_test.go b/azurerm/internal/services/dns/parse/cname_record_test.go new file mode 100644 index 000000000000..186622c9c159 --- /dev/null +++ b/azurerm/internal/services/dns/parse/cname_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CnameRecordId{} + +func TestCnameRecordIDFormatter(t *testing.T) { + actual := NewCnameRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "name1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/name1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCnameRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CnameRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/name1", + Expected: &CnameRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + CNAMEName: "name1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/CNAME/NAME1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CnameRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.CNAMEName != v.Expected.CNAMEName { + t.Fatalf("Expected %q but got %q for CNAMEName", v.Expected.CNAMEName, actual.CNAMEName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/dns_a_record.go b/azurerm/internal/services/dns/parse/dns_a_record.go deleted file mode 100644 index dd85593c276f..000000000000 --- a/azurerm/internal/services/dns/parse/dns_a_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsARecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsARecordID(input string) (*DnsARecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS A Record ID %q: %+v", input, err) - } - - record := DnsARecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("A"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_a_record_test.go b/azurerm/internal/services/dns/parse/dns_a_record_test.go deleted file mode 100644 index cd48c1c80645..000000000000 --- a/azurerm/internal/services/dns/parse/dns_a_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsARecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsARecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing A Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/", - Expected: nil, - }, - { - Name: "DNS A Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/myrecord1", - Expected: &DnsARecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/a/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsARecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_aaaa_record.go b/azurerm/internal/services/dns/parse/dns_aaaa_record.go deleted file mode 100644 index e9b7b1bb10a3..000000000000 --- a/azurerm/internal/services/dns/parse/dns_aaaa_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsAaaaRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsAaaaRecordID(input string) (*DnsAaaaRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS AAAA Record ID %q: %+v", input, err) - } - - record := DnsAaaaRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("AAAA"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_aaaa_record_test.go b/azurerm/internal/services/dns/parse/dns_aaaa_record_test.go deleted file mode 100644 index 36dc9458b744..000000000000 --- a/azurerm/internal/services/dns/parse/dns_aaaa_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsAAAARecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsAaaaRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing AAAA Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/", - Expected: nil, - }, - { - Name: "DNS AAAA Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/myrecord1", - Expected: &DnsAaaaRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/aaaa/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsAaaaRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_caa_record.go b/azurerm/internal/services/dns/parse/dns_caa_record.go deleted file mode 100644 index 1ae6b4afc175..000000000000 --- a/azurerm/internal/services/dns/parse/dns_caa_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsCaaRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsCaaRecordID(input string) (*DnsCaaRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS CAA Record ID %q: %+v", input, err) - } - - record := DnsCaaRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("CAA"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_caa_record_test.go b/azurerm/internal/services/dns/parse/dns_caa_record_test.go deleted file mode 100644 index 0ee8c66689c7..000000000000 --- a/azurerm/internal/services/dns/parse/dns_caa_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsCaaRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsCaaRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing CAA Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/", - Expected: nil, - }, - { - Name: "DNS CAA Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/myrecord1", - Expected: &DnsCaaRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/caa/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsCaaRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_cname_record.go b/azurerm/internal/services/dns/parse/dns_cname_record.go deleted file mode 100644 index f43d8c4ff0dc..000000000000 --- a/azurerm/internal/services/dns/parse/dns_cname_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsCNameRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsCNameRecordID(input string) (*DnsCNameRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS CNAME Record ID %q: %+v", input, err) - } - - record := DnsCNameRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("CNAME"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_cname_record_test.go b/azurerm/internal/services/dns/parse/dns_cname_record_test.go deleted file mode 100644 index b0f4d55157b8..000000000000 --- a/azurerm/internal/services/dns/parse/dns_cname_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsCNameRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsCNameRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing CNAME Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/", - Expected: nil, - }, - { - Name: "DNS CNAME Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/myrecord1", - Expected: &DnsCNameRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/cname/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsCNameRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_mx_record.go b/azurerm/internal/services/dns/parse/dns_mx_record.go deleted file mode 100644 index 17bb2798b541..000000000000 --- a/azurerm/internal/services/dns/parse/dns_mx_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsMxRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsMxRecordID(input string) (*DnsMxRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS MX Record ID %q: %+v", input, err) - } - - record := DnsMxRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("MX"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_mx_record_test.go b/azurerm/internal/services/dns/parse/dns_mx_record_test.go deleted file mode 100644 index 2b565b675c5d..000000000000 --- a/azurerm/internal/services/dns/parse/dns_mx_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsMxRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsMxRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing MX Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/", - Expected: nil, - }, - { - Name: "DNS MX Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/myrecord1", - Expected: &DnsMxRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/mx/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsMxRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_ns_record.go b/azurerm/internal/services/dns/parse/dns_ns_record.go deleted file mode 100644 index 4bef6f9ea0b7..000000000000 --- a/azurerm/internal/services/dns/parse/dns_ns_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsNsRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsNsRecordID(input string) (*DnsNsRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS NS Record ID %q: %+v", input, err) - } - - record := DnsNsRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("NS"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_ns_record_test.go b/azurerm/internal/services/dns/parse/dns_ns_record_test.go deleted file mode 100644 index 35b2ce5c37f1..000000000000 --- a/azurerm/internal/services/dns/parse/dns_ns_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsNsRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsNsRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing NS Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/", - Expected: nil, - }, - { - Name: "DNS NS Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/myrecord1", - Expected: &DnsNsRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/ns/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsNsRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_ptr_record.go b/azurerm/internal/services/dns/parse/dns_ptr_record.go deleted file mode 100644 index 0926b71e3890..000000000000 --- a/azurerm/internal/services/dns/parse/dns_ptr_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsPtrRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsPtrRecordID(input string) (*DnsPtrRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS PTR Record ID %q: %+v", input, err) - } - - record := DnsPtrRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("PTR"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_ptr_record_test.go b/azurerm/internal/services/dns/parse/dns_ptr_record_test.go deleted file mode 100644 index 7dfd26213717..000000000000 --- a/azurerm/internal/services/dns/parse/dns_ptr_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsPtrRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsPtrRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing PTR Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/", - Expected: nil, - }, - { - Name: "DNS PTR Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/myrecord1", - Expected: &DnsPtrRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/ptr/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsPtrRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_srv_record.go b/azurerm/internal/services/dns/parse/dns_srv_record.go deleted file mode 100644 index ec4d9802705c..000000000000 --- a/azurerm/internal/services/dns/parse/dns_srv_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsSrvRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsSrvRecordID(input string) (*DnsSrvRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS SRV Record ID %q: %+v", input, err) - } - - record := DnsSrvRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("SRV"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_srv_record_test.go b/azurerm/internal/services/dns/parse/dns_srv_record_test.go deleted file mode 100644 index 00f6131a7c3a..000000000000 --- a/azurerm/internal/services/dns/parse/dns_srv_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsSrvRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsSrvRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing SRV Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/", - Expected: nil, - }, - { - Name: "DNS SRV Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/myrecord1", - Expected: &DnsSrvRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/srv/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsSrvRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_txt_record.go b/azurerm/internal/services/dns/parse/dns_txt_record.go deleted file mode 100644 index dc4344ac78b9..000000000000 --- a/azurerm/internal/services/dns/parse/dns_txt_record.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type DnsTxtRecordId struct { - ResourceGroup string - ZoneName string - Name string -} - -func DnsTxtRecordID(input string) (*DnsTxtRecordId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS TXT Record ID %q: %+v", input, err) - } - - record := DnsTxtRecordId{ - ResourceGroup: id.ResourceGroup, - } - - if record.ZoneName, err = id.PopSegment("dnszones"); err != nil { - return nil, err - } - - if record.Name, err = id.PopSegment("TXT"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &record, nil -} diff --git a/azurerm/internal/services/dns/parse/dns_txt_record_test.go b/azurerm/internal/services/dns/parse/dns_txt_record_test.go deleted file mode 100644 index 6c32e244024d..000000000000 --- a/azurerm/internal/services/dns/parse/dns_txt_record_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestDnsTxtRecordId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *DnsTxtRecordId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, - }, - { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", - Expected: nil, - }, - { - Name: "Missing TXT Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/", - Expected: nil, - }, - { - Name: "DNS TXT Record ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/myrecord1", - Expected: &DnsTxtRecordId{ - ResourceGroup: "resGroup1", - ZoneName: "zone1", - Name: "myrecord1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/txt/myrecord1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := DnsTxtRecordID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ZoneName != v.Expected.ZoneName { - t.Fatalf("Expected %q but got %q for ZoneName", v.Expected.ZoneName, actual.ZoneName) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/dns/parse/dns_zone.go b/azurerm/internal/services/dns/parse/dns_zone.go index 9d2fb359d068..4aa63d103a04 100644 --- a/azurerm/internal/services/dns/parse/dns_zone.go +++ b/azurerm/internal/services/dns/parse/dns_zone.go @@ -1,27 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type DnsZoneId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewDnsZoneID(subscriptionId, resourceGroup, name string) DnsZoneId { + return DnsZoneId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id DnsZoneId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Dns Zone", segmentsStr) } +func (id DnsZoneId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DnsZoneID parses a DnsZone ID into an DnsZoneId struct func DnsZoneID(input string) (*DnsZoneId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse DNS Zone ID %q: %+v", input, err) + return nil, err + } + + resourceId := DnsZoneId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - zone := DnsZoneId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if zone.Name, err = id.PopSegment("dnszones"); err != nil { + if resourceId.Name, err = id.PopSegment("dnszones"); err != nil { return nil, err } @@ -29,5 +65,5 @@ func DnsZoneID(input string) (*DnsZoneId, error) { return nil, err } - return &zone, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/dns/parse/dns_zone_test.go b/azurerm/internal/services/dns/parse/dns_zone_test.go index 3d970377aa14..79753891b0ed 100644 --- a/azurerm/internal/services/dns/parse/dns_zone_test.go +++ b/azurerm/internal/services/dns/parse/dns_zone_test.go @@ -1,73 +1,112 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) -func TestDnsZoneId(t *testing.T) { +var _ resourceid.Formatter = DnsZoneId{} + +func TestDnsZoneIDFormatter(t *testing.T) { + actual := NewDnsZoneID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDnsZoneID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *DnsZoneId }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, }, + { - Name: "Missing DNS Zones Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", - Expected: nil, + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, }, + { - Name: "DNS Zone ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", Expected: &DnsZoneId{ - Name: "zone1", - ResourceGroup: "resGroup1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "zone1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/Dnszones/zone1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1", + Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := DnsZoneID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/dns/parse/mx_record.go b/azurerm/internal/services/dns/parse/mx_record.go new file mode 100644 index 000000000000..7d1bdd85e4b8 --- /dev/null +++ b/azurerm/internal/services/dns/parse/mx_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type MxRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + MXName string +} + +func NewMxRecordID(subscriptionId, resourceGroup, dnszoneName, mXName string) MxRecordId { + return MxRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + MXName: mXName, + } +} + +func (id MxRecordId) String() string { + segments := []string{ + fmt.Sprintf("M X Name %q", id.MXName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Mx Record", segmentsStr) +} + +func (id MxRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/MX/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.MXName) +} + +// MxRecordID parses a MxRecord ID into an MxRecordId struct +func MxRecordID(input string) (*MxRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := MxRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.MXName, err = id.PopSegment("MX"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/mx_record_test.go b/azurerm/internal/services/dns/parse/mx_record_test.go new file mode 100644 index 000000000000..c5719838507e --- /dev/null +++ b/azurerm/internal/services/dns/parse/mx_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = MxRecordId{} + +func TestMxRecordIDFormatter(t *testing.T) { + actual := NewMxRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "mx1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/mx1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestMxRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *MxRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/mx1", + Expected: &MxRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + MXName: "mx1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/MX/MX1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := MxRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.MXName != v.Expected.MXName { + t.Fatalf("Expected %q but got %q for MXName", v.Expected.MXName, actual.MXName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/ns_record.go b/azurerm/internal/services/dns/parse/ns_record.go new file mode 100644 index 000000000000..32ca19c85b14 --- /dev/null +++ b/azurerm/internal/services/dns/parse/ns_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type NsRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + NSName string +} + +func NewNsRecordID(subscriptionId, resourceGroup, dnszoneName, nSName string) NsRecordId { + return NsRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + NSName: nSName, + } +} + +func (id NsRecordId) String() string { + segments := []string{ + fmt.Sprintf("N S Name %q", id.NSName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Ns Record", segmentsStr) +} + +func (id NsRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/NS/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.NSName) +} + +// NsRecordID parses a NsRecord ID into an NsRecordId struct +func NsRecordID(input string) (*NsRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := NsRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.NSName, err = id.PopSegment("NS"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/ns_record_test.go b/azurerm/internal/services/dns/parse/ns_record_test.go new file mode 100644 index 000000000000..bbd50ff5e82f --- /dev/null +++ b/azurerm/internal/services/dns/parse/ns_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NsRecordId{} + +func TestNsRecordIDFormatter(t *testing.T) { + actual := NewNsRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "ns1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/ns1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestNsRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *NsRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing NSName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for NSName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/ns1", + Expected: &NsRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + NSName: "ns1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/NS/NS1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := NsRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.NSName != v.Expected.NSName { + t.Fatalf("Expected %q but got %q for NSName", v.Expected.NSName, actual.NSName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/ptr_record.go b/azurerm/internal/services/dns/parse/ptr_record.go new file mode 100644 index 000000000000..250898b575c2 --- /dev/null +++ b/azurerm/internal/services/dns/parse/ptr_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PtrRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + PTRName string +} + +func NewPtrRecordID(subscriptionId, resourceGroup, dnszoneName, pTRName string) PtrRecordId { + return PtrRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + PTRName: pTRName, + } +} + +func (id PtrRecordId) String() string { + segments := []string{ + fmt.Sprintf("P T R Name %q", id.PTRName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Ptr Record", segmentsStr) +} + +func (id PtrRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/PTR/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.PTRName) +} + +// PtrRecordID parses a PtrRecord ID into an PtrRecordId struct +func PtrRecordID(input string) (*PtrRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PtrRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.PTRName, err = id.PopSegment("PTR"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/ptr_record_test.go b/azurerm/internal/services/dns/parse/ptr_record_test.go new file mode 100644 index 000000000000..644a91f52eb1 --- /dev/null +++ b/azurerm/internal/services/dns/parse/ptr_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PtrRecordId{} + +func TestPtrRecordIDFormatter(t *testing.T) { + actual := NewPtrRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "ptr1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/ptr1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPtrRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PtrRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/ptr1", + Expected: &PtrRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + PTRName: "ptr1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/PTR/PTR1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PtrRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.PTRName != v.Expected.PTRName { + t.Fatalf("Expected %q but got %q for PTRName", v.Expected.PTRName, actual.PTRName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/srv_record.go b/azurerm/internal/services/dns/parse/srv_record.go new file mode 100644 index 000000000000..fd3fdb98f95b --- /dev/null +++ b/azurerm/internal/services/dns/parse/srv_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SrvRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + SRVName string +} + +func NewSrvRecordID(subscriptionId, resourceGroup, dnszoneName, sRVName string) SrvRecordId { + return SrvRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + SRVName: sRVName, + } +} + +func (id SrvRecordId) String() string { + segments := []string{ + fmt.Sprintf("S R V Name %q", id.SRVName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Srv Record", segmentsStr) +} + +func (id SrvRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/SRV/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.SRVName) +} + +// SrvRecordID parses a SrvRecord ID into an SrvRecordId struct +func SrvRecordID(input string) (*SrvRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SrvRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.SRVName, err = id.PopSegment("SRV"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/srv_record_test.go b/azurerm/internal/services/dns/parse/srv_record_test.go new file mode 100644 index 000000000000..cad22cd6c9b8 --- /dev/null +++ b/azurerm/internal/services/dns/parse/srv_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SrvRecordId{} + +func TestSrvRecordIDFormatter(t *testing.T) { + actual := NewSrvRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "srv1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/srv1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSrvRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SrvRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/srv1", + Expected: &SrvRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + SRVName: "srv1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/SRV/SRV1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SrvRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.SRVName != v.Expected.SRVName { + t.Fatalf("Expected %q but got %q for SRVName", v.Expected.SRVName, actual.SRVName) + } + } +} diff --git a/azurerm/internal/services/dns/parse/txt_record.go b/azurerm/internal/services/dns/parse/txt_record.go new file mode 100644 index 000000000000..a58fbc3ec43f --- /dev/null +++ b/azurerm/internal/services/dns/parse/txt_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type TxtRecordId struct { + SubscriptionId string + ResourceGroup string + DnszoneName string + TXTName string +} + +func NewTxtRecordID(subscriptionId, resourceGroup, dnszoneName, tXTName string) TxtRecordId { + return TxtRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DnszoneName: dnszoneName, + TXTName: tXTName, + } +} + +func (id TxtRecordId) String() string { + segments := []string{ + fmt.Sprintf("T X T Name %q", id.TXTName), + fmt.Sprintf("Dnszone Name %q", id.DnszoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Txt Record", segmentsStr) +} + +func (id TxtRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/dnszones/%s/TXT/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DnszoneName, id.TXTName) +} + +// TxtRecordID parses a TxtRecord ID into an TxtRecordId struct +func TxtRecordID(input string) (*TxtRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := TxtRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DnszoneName, err = id.PopSegment("dnszones"); err != nil { + return nil, err + } + if resourceId.TXTName, err = id.PopSegment("TXT"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/dns/parse/txt_record_test.go b/azurerm/internal/services/dns/parse/txt_record_test.go new file mode 100644 index 000000000000..4f4ab472c226 --- /dev/null +++ b/azurerm/internal/services/dns/parse/txt_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = TxtRecordId{} + +func TestTxtRecordIDFormatter(t *testing.T) { + actual := NewTxtRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "zone1", "txt1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/txt1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestTxtRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *TxtRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Error: true, + }, + + { + // missing TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Error: true, + }, + + { + // missing value for TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/txt1", + Expected: &TxtRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DnszoneName: "zone1", + TXTName: "txt1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/TXT/TXT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := TxtRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DnszoneName != v.Expected.DnszoneName { + t.Fatalf("Expected %q but got %q for DnszoneName", v.Expected.DnszoneName, actual.DnszoneName) + } + if actual.TXTName != v.Expected.TXTName { + t.Fatalf("Expected %q but got %q for TXTName", v.Expected.TXTName, actual.TXTName) + } + } +} diff --git a/azurerm/internal/services/dns/registration.go b/azurerm/internal/services/dns/registration.go index 841f7e418f26..16e24982ae63 100644 --- a/azurerm/internal/services/dns/registration.go +++ b/azurerm/internal/services/dns/registration.go @@ -21,21 +21,22 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_dns_zone": dataSourceArmDnsZone(), + "azurerm_dns_zone": dataSourceDnsZone(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_dns_a_record": resourceArmDnsARecord(), - "azurerm_dns_aaaa_record": resourceArmDnsAAAARecord(), - "azurerm_dns_caa_record": resourceArmDnsCaaRecord(), - "azurerm_dns_cname_record": resourceArmDnsCNameRecord(), - "azurerm_dns_mx_record": resourceArmDnsMxRecord(), - "azurerm_dns_ns_record": resourceArmDnsNsRecord(), - "azurerm_dns_ptr_record": resourceArmDnsPtrRecord(), - "azurerm_dns_srv_record": resourceArmDnsSrvRecord(), - "azurerm_dns_txt_record": resourceArmDnsTxtRecord(), - "azurerm_dns_zone": resourceArmDnsZone()} + "azurerm_dns_a_record": resourceDnsARecord(), + "azurerm_dns_aaaa_record": resourceDnsAAAARecord(), + "azurerm_dns_caa_record": resourceDnsCaaRecord(), + "azurerm_dns_cname_record": resourceDnsCNameRecord(), + "azurerm_dns_mx_record": resourceDnsMxRecord(), + "azurerm_dns_ns_record": resourceDnsNsRecord(), + "azurerm_dns_ptr_record": resourceDnsPtrRecord(), + "azurerm_dns_srv_record": resourceDnsSrvRecord(), + "azurerm_dns_txt_record": resourceDnsTxtRecord(), + "azurerm_dns_zone": resourceDnsZone(), + } } diff --git a/azurerm/internal/services/dns/resourceids.go b/azurerm/internal/services/dns/resourceids.go new file mode 100644 index 000000000000..4ad419bec34f --- /dev/null +++ b/azurerm/internal/services/dns/resourceids.go @@ -0,0 +1,12 @@ +package dns + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DnsZone -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ARecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/eh1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=AaaaRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/eheh1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=CaaRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/caa1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=CnameRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/name1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=MxRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/mx1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NsRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/ns1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PtrRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/ptr1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SrvRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/srv1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=TxtRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/txt1 diff --git a/azurerm/internal/services/dns/tests/dns_a_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_a_record_resource_test.go deleted file mode 100644 index 8bcddeb71829..000000000000 --- a/azurerm/internal/services/dns/tests/dns_a_record_resource_test.go +++ /dev/null @@ -1,510 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsARecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsARecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsARecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_a_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsARecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsARecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsARecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsARecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsARecord_withAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - targetResourceName := "azurerm_public_ip.test" - targetResourceName2 := "azurerm_public_ip.test2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_withAlias(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - ), - }, - { - Config: testAccAzureRMDnsARecord_withAliasUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName2, "id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsARecord_RecordsToAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - targetResourceName := "azurerm_public_ip.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_AliasToRecordsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsARecord_AliasToRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsARecord_AliasToRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_a_record", "test") - targetResourceName := "azurerm_public_ip.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsARecord_AliasToRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - ), - }, - { - Config: testAccAzureRMDnsARecord_AliasToRecordsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "target_resource_id", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsARecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsARecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.A) - if err != nil { - return fmt.Errorf("Bad: Get A RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS A record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsARecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_a_record" { - continue - } - - id, err := parse.DnsARecordID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.A) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS A record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsARecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsARecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_a_record" "import" { - name = azurerm_dns_a_record.test.name - resource_group_name = azurerm_dns_a_record.test.resource_group_name - zone_name = azurerm_dns_a_record.test.zone_name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] -} -`, template) -} - -func testAccAzureRMDnsARecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5", "1.2.3.7"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_withAlias(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_public_ip" "test" { - name = "mypublicip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - ip_version = "IPv4" -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_public_ip.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_withAliasUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_public_ip" "test2" { - name = "mypublicip%d2" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - ip_version = "IPv4" -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_public_ip.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_AliasToRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_public_ip" "test" { - name = "mypublicip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - ip_version = "IPv4" -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_public_ip.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsARecord_AliasToRecordsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_aaaa_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_aaaa_record_resource_test.go deleted file mode 100644 index 0cc4b3447e89..000000000000 --- a/azurerm/internal/services/dns/tests/dns_aaaa_record_resource_test.go +++ /dev/null @@ -1,563 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsAAAARecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAAAARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsAAAARecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAAAARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsAAAARecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_aaaa_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsAAAARecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAAAARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsAAAARecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsAAAARecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAAAARecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsAAAARecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsAAAARecord_withAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - targetResourceName := "azurerm_public_ip.test" - targetResourceName2 := "azurerm_public_ip.test2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAAAARecord_withAlias(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - ), - }, - { - Config: testAccAzureRMDnsAAAARecord_withAliasUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName2, "id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsAAAARecord_RecordsToAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - targetResourceName := "azurerm_public_ip.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAaaaRecord_AliasToRecordsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsAaaaRecord_AliasToRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - resource.TestCheckNoResourceAttr(data.ResourceName, "records"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsAaaaRecord_AliasToRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - targetResourceName := "azurerm_public_ip.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAaaaRecord_AliasToRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - ), - }, - { - Config: testAccAzureRMDnsAaaaRecord_AliasToRecordsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "target_resource_id", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsAAAARecord_uncompressed(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_aaaa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsAAAARecord_uncompressed(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - { - Config: testAccAzureRMDnsAAAARecord_uncompressed(data), // just use the same for updating - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsAaaaRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsAaaaRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.AAAA) - if err != nil { - return fmt.Errorf("Bad: Get AAAA RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS AAAA record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsAaaaRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_aaaa_record" { - continue - } - - id, err := parse.DnsAaaaRecordID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.AAAA) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS AAAA record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsAAAARecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAAAARecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsAAAARecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_aaaa_record" "import" { - name = azurerm_dns_aaaa_record.test.name - resource_group_name = azurerm_dns_aaaa_record.test.resource_group_name - zone_name = azurerm_dns_aaaa_record.test.zone_name - ttl = 300 - records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] -} -`, template) -} - -func testAccAzureRMDnsAAAARecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006", "::1"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAAAARecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAAAARecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["2607:f8b0:4009:1803::1005", "2607:f8b0:4009:1803::1006"] - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAAAARecord_withAlias(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_public_ip" "test" { - name = "mypublicip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - ip_version = "IPv6" -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myaaaarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_public_ip.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAAAARecord_withAliasUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_public_ip" "test2" { - name = "mypublicip%d2" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - ip_version = "IPv6" -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myaaaarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_public_ip.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAaaaRecord_AliasToRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_public_ip" "test" { - name = "mypublicip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - ip_version = "IPv6" -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_public_ip.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAaaaRecord_AliasToRecordsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["3a62:353:8885:293c:a218:45cc:9ee9:4e27", "3a62:353:8885:293c:a218:45cc:9ee9:4e28"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsAAAARecord_uncompressed(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_aaaa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["2607:f8b0:4005:0800:0000:0000:0000:1003", "2201:1234:1234::1"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_caa_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_caa_record_resource_test.go deleted file mode 100644 index 74dff2fc8c15..000000000000 --- a/azurerm/internal/services/dns/tests/dns_caa_record_resource_test.go +++ /dev/null @@ -1,392 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsCaaRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCaaRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsCaaRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCaaRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCaaRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsCaaRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_caa_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsCaaRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCaaRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "4"), - ), - }, - { - Config: testAccAzureRMDnsCaaRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "5"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsCaaRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_caa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCaaRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsCaaRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsCaaRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsCaaRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CAA) - if err != nil { - return fmt.Errorf("Bad: Get CAA RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS CAA record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsCaaRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_caa_record" { - continue - } - - id, err := parse.DnsCaaRecordID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CAA) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS CAA record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsCaaRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_caa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - flags = 0 - tag = "issue" - value = "example.com" - } - - record { - flags = 0 - tag = "issue" - value = "example.net" - } - - record { - flags = 1 - tag = "issuewild" - value = ";" - } - - record { - flags = 0 - tag = "iodef" - value = "mailto:terraform@nonexist.tld" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCaaRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsCaaRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_caa_record" "import" { - name = azurerm_dns_caa_record.test.name - resource_group_name = azurerm_dns_caa_record.test.resource_group_name - zone_name = azurerm_dns_caa_record.test.zone_name - ttl = 300 - - record { - flags = 0 - tag = "issue" - value = "example.com" - } - - record { - flags = 0 - tag = "issue" - value = "example.net" - } - - record { - flags = 1 - tag = "issuewild" - value = ";" - } - - record { - flags = 0 - tag = "iodef" - value = "mailto:terraform@nonexist.tld" - } -} -`, template) -} - -func testAccAzureRMDnsCaaRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_caa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - flags = 0 - tag = "issue" - value = "example.com" - } - - record { - flags = 0 - tag = "issue" - value = "example.net" - } - - record { - flags = 1 - tag = "issuewild" - value = ";" - } - - record { - flags = 0 - tag = "iodef" - value = "mailto:terraform@nonexist.tld" - } - - record { - flags = 0 - tag = "issue" - value = "letsencrypt.org" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCaaRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_caa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - flags = 0 - tag = "issue" - value = "example.net" - } - - record { - flags = 1 - tag = "issuewild" - value = ";" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCaaRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_caa_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - flags = 0 - tag = "issue" - value = "example.net" - } - - record { - flags = 1 - tag = "issuewild" - value = ";" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_cname_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_cname_record_resource_test.go deleted file mode 100644 index 47de7d244ff2..000000000000 --- a/azurerm/internal/services/dns/tests/dns_cname_record_resource_test.go +++ /dev/null @@ -1,553 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsCNameRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsCNameRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_cname_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_subdomain(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_subdomain(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record", "test.contoso.com"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsCNameRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsCNameRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_withAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - targetResourceName := "azurerm_dns_cname_record.target" - targetResourceName2 := "azurerm_dns_cname_record.target2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_withAlias(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - ), - }, - { - Config: testAccAzureRMDnsCNameRecord_withAliasUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName2, "id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_RecordToAlias(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - targetResourceName := "azurerm_dns_cname_record.target2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_AliasToRecordUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsCNameRecord_AliasToRecord(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - resource.TestCheckResourceAttr(data.ResourceName, "record", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsCNameRecord_AliasToRecord(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_cname_record", "test") - targetResourceName := "azurerm_dns_cname_record.target2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsCNameRecord_AliasToRecord(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttrPair(data.ResourceName, "target_resource_id", targetResourceName, "id"), - ), - }, - { - Config: testAccAzureRMDnsCNameRecord_AliasToRecordUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "target_resource_id", ""), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsCNameRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsCNameRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CNAME) - if err != nil { - return fmt.Errorf("Bad: Get CNAME RecordSet: %v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS CNAME record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsCNameRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_cname_record" { - continue - } - - id, err := parse.DnsCNameRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.CNAME) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS CNAME record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsCNameRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.com" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsCNameRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_cname_record" "import" { - name = azurerm_dns_cname_record.test.name - resource_group_name = azurerm_dns_cname_record.test.resource_group_name - zone_name = azurerm_dns_cname_record.test.zone_name - ttl = 300 - record = "contoso.com" -} -`, template) -} - -func testAccAzureRMDnsCNameRecord_subdomain(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "test.contoso.com" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.co.uk" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.com" - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.com" - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_withAlias(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "target" { - name = "mycnametarget%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.com" -} - -resource "azurerm_dns_cname_record" "test" { - name = "mycnamerecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_dns_cname_record.target.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_withAliasUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "target2" { - name = "mycnametarget%d2" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.co.uk" -} - -resource "azurerm_dns_cname_record" "test" { - name = "mycnamerecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_dns_cname_record.target2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_AliasToRecord(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "target2" { - name = "mycnametarget%d2" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "contoso.co.uk" -} - -resource "azurerm_dns_cname_record" "test" { - name = "mycnamerecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - target_resource_id = azurerm_dns_cname_record.target2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsCNameRecord_AliasToRecordUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_cname_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - record = "1.2.3.4" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_mx_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_mx_record_resource_test.go deleted file mode 100644 index b4b4e722c465..000000000000 --- a/azurerm/internal/services/dns/tests/dns_mx_record_resource_test.go +++ /dev/null @@ -1,400 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsMxRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsMxRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsMxRecord_rootrecord(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsMxRecord_rootrecord(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsMxRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsMxRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsMxRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_mx_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsMxRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsMxRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsMxRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsMxRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_mx_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsMxRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsMxRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsMxRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsMxRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.MX) - if err != nil { - return fmt.Errorf("Bad: Get MX RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS MX record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsMxRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_mx_record" { - continue - } - - id, err := parse.DnsMxRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.MX) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS MX record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsMxRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_mx_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - preference = "10" - exchange = "mail1.contoso.com" - } - - record { - preference = "20" - exchange = "mail2.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsMxRecord_rootrecord(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_mx_record" "test" { - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - preference = "10" - exchange = "mail1.contoso.com" - } - - record { - preference = "20" - exchange = "mail2.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMDnsMxRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsMxRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_mx_record" "import" { - name = azurerm_dns_mx_record.test.name - resource_group_name = azurerm_dns_mx_record.test.resource_group_name - zone_name = azurerm_dns_mx_record.test.zone_name - ttl = 300 - - record { - preference = "10" - exchange = "mail1.contoso.com" - } - - record { - preference = "20" - exchange = "mail2.contoso.com" - } -} -`, template) -} - -func testAccAzureRMDnsMxRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_mx_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - preference = "10" - exchange = "mail1.contoso.com" - } - - record { - preference = "20" - exchange = "mail2.contoso.com" - } - - record { - preference = "50" - exchange = "mail3.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsMxRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_mx_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - preference = "10" - exchange = "mail1.contoso.com" - } - - record { - preference = "20" - exchange = "mail2.contoso.com" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsMxRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_mx_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - preference = "10" - exchange = "mail1.contoso.com" - } - - record { - preference = "20" - exchange = "mail2.contoso.com" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_ns_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_ns_record_resource_test.go deleted file mode 100644 index 0fddb70a1ed5..000000000000 --- a/azurerm/internal/services/dns/tests/dns_ns_record_resource_test.go +++ /dev/null @@ -1,299 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsNsRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsNsRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsNsRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsNsRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsNsRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsNsRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsNsRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsNsRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsNsRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_ns_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsNsRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsNsRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsNsRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsNsRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsNsRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsNsRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsNsRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ns_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsNsRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsNsRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsNsRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsNsRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsNsRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsNsRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsNsRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.NS) - if err != nil { - return fmt.Errorf("Bad: Get DNS NS Record: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS NS record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsNsRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_ns_record" { - continue - } - - id, err := parse.DnsNsRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.NS) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("DNS NS Record still exists:\n%#v", resp.RecordSetProperties) - } - } - - return nil -} - -func testAccAzureRMDnsNsRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ns_record" "test" { - name = "mynsrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - records = ["ns1.contoso.com", "ns2.contoso.com"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsNsRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsNsRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_ns_record" "import" { - name = azurerm_dns_ns_record.test.name - resource_group_name = azurerm_dns_ns_record.test.resource_group_name - zone_name = azurerm_dns_ns_record.test.zone_name - ttl = 300 - - records = ["ns1.contoso.com", "ns2.contoso.com"] -} -`, template) -} - -func testAccAzureRMDnsNsRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ns_record" "test" { - name = "mynsrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - records = ["ns1.contoso.com", "ns2.contoso.com", "ns3.contoso.com"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsNsRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ns_record" "test" { - name = "mynsrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - records = ["ns1.contoso.com", "ns2.contoso.com"] - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsNsRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ns_record" "test" { - name = "mynsrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - records = ["ns1.contoso.com", "ns2.contoso.com"] - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_ptr_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_ptr_record_resource_test.go deleted file mode 100644 index 4be0b93a6f17..000000000000 --- a/azurerm/internal/services/dns/tests/dns_ptr_record_resource_test.go +++ /dev/null @@ -1,298 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsPtrRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsPtrRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsPtrRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsPtrRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsPtrRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsPtrRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_ptr_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsPtrRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsPtrRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - - { - Config: testAccAzureRMDnsPtrRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsPtrRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_ptr_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsPtrRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - - { - Config: testAccAzureRMDnsPtrRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsPtrRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsPtrRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.PTR) - if err != nil { - return fmt.Errorf("Bad: Get PTR RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS PTR record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsPtrRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_ptr_record" { - continue - } - - id, err := parse.DnsPtrRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.PTR) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS PTR record still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDnsPtrRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ptr_record" "test" { - name = "testptrrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["hashicorp.com", "microsoft.com"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsPtrRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsPtrRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_ptr_record" "import" { - name = azurerm_dns_ptr_record.test.name - resource_group_name = azurerm_dns_ptr_record.test.resource_group_name - zone_name = azurerm_dns_ptr_record.test.zone_name - ttl = 300 - records = ["hashicorp.com", "microsoft.com"] -} -`, template) -} - -func testAccAzureRMDnsPtrRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ptr_record" "test" { - name = "testptrrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["hashicorp.com", "microsoft.com", "reddit.com"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsPtrRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ptr_record" "test" { - name = "testptrrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["hashicorp.com", "microsoft.com"] - - tags = { - environment = "Dev" - cost_center = "Ops" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsPtrRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_ptr_record" "test" { - name = "testptrrecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - records = ["hashicorp.com", "microsoft.com"] - - tags = { - environment = "Stage" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_srv_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_srv_record_resource_test.go deleted file mode 100644 index 0ee7724f46a8..000000000000 --- a/azurerm/internal/services/dns/tests/dns_srv_record_resource_test.go +++ /dev/null @@ -1,368 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsSrvRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsSrvRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsSrvRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsSrvRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsSrvRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsSrvRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_srv_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsSrvRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsSrvRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsSrvRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsSrvRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_srv_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsSrvRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsSrvRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsSrvRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsSrvRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.SRV) - if err != nil { - return fmt.Errorf("Bad: Get SRV RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS SRV record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsSrvRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_srv_record" { - continue - } - - id, err := parse.DnsSrvRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.SRV) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS SRV record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsSrvRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_srv_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - - record { - priority = 2 - weight = 25 - port = 8080 - target = "target2.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsSrvRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsSrvRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_srv_record" "import" { - name = azurerm_dns_srv_record.test.name - resource_group_name = azurerm_dns_srv_record.test.resource_group_name - zone_name = azurerm_dns_srv_record.test.zone_name - ttl = 300 - - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - - record { - priority = 2 - weight = 25 - port = 8080 - target = "target2.contoso.com" - } -} -`, template) -} - -func testAccAzureRMDnsSrvRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_srv_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - - record { - priority = 2 - weight = 25 - port = 8080 - target = "target2.contoso.com" - } - - record { - priority = 3 - weight = 100 - port = 8080 - target = "target3.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsSrvRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_srv_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - - record { - priority = 2 - weight = 25 - port = 8080 - target = "target2.contoso.com" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsSrvRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_srv_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - - record { - priority = 2 - weight = 25 - port = 8080 - target = "target2.contoso.com" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_txt_record_resource_test.go b/azurerm/internal/services/dns/tests/dns_txt_record_resource_test.go deleted file mode 100644 index 29f5d2eb63fc..000000000000 --- a/azurerm/internal/services/dns/tests/dns_txt_record_resource_test.go +++ /dev/null @@ -1,335 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsTxtRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsTxtRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsTxtRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsTxtRecord_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_txt_record"), - }, - }, - }) -} - -func TestAccAzureRMDnsTxtRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - { - Config: testAccAzureRMDnsTxtRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMDnsTxtRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_txt_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsTxtRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsTxtRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsTxtRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsTxtRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.TXT) - if err != nil { - return fmt.Errorf("Bad: Get TXT RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS TXT record %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsTxtRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_txt_record" { - continue - } - - id, err := parse.DnsTxtRecordID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.ZoneName, id.Name, dns.TXT) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS TXT record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMDnsTxtRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_txt_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsTxtRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsTxtRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_txt_record" "import" { - name = azurerm_dns_txt_record.test.name - resource_group_name = azurerm_dns_txt_record.test.resource_group_name - zone_name = azurerm_dns_txt_record.test.zone_name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } -} -`, template) -} - -func testAccAzureRMDnsTxtRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_txt_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } - - record { - value = "A wild 3rd record appears" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsTxtRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_txt_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "Another test txt string" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDnsTxtRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_dns_txt_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "Another test txt string" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_zone_data_source_test.go b/azurerm/internal/services/dns/tests/dns_zone_data_source_test.go deleted file mode 100644 index 736cf8accc0a..000000000000 --- a/azurerm/internal/services/dns/tests/dns_zone_data_source_test.go +++ /dev/null @@ -1,138 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMDNSZone_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDNSZone_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMDNSZone_tags(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDNSZone_tags(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMDNSZone_withoutResourceGroupName(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_dns_zone", "test") - // resource group of DNS zone is always small case - resourceGroupName := fmt.Sprintf("acctestrg-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceDNSZone_onlyName(data, resourceGroupName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "resource_group_name", resourceGroupName), - ), - }, - }, - }) -} - -func testAccDataSourceDNSZone_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -data "azurerm_dns_zone" "test" { - name = azurerm_dns_zone.test.name - resource_group_name = azurerm_dns_zone.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourceDNSZone_tags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name - - tags = { - hello = "world" - } -} - -data "azurerm_dns_zone" "test" { - name = azurerm_dns_zone.test.name - resource_group_name = azurerm_dns_zone.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourceDNSZone_onlyName(data acceptance.TestData, resourceGroupName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "%s" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -data "azurerm_dns_zone" "test" { - name = azurerm_dns_zone.test.name -} -`, resourceGroupName, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/tests/dns_zone_resource_test.go b/azurerm/internal/services/dns/tests/dns_zone_resource_test.go deleted file mode 100644 index e11374886ae8..000000000000 --- a/azurerm/internal/services/dns/tests/dns_zone_resource_test.go +++ /dev/null @@ -1,214 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" -) - -func TestAccAzureRMDnsZone_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsZone_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsZoneExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDnsZone_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsZone_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsZoneExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMDnsZone_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_dns_zone"), - }, - }, - }) -} - -func TestAccAzureRMDnsZone_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDnsZone_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsZoneExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMDnsZone_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDnsZoneExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMDnsZoneExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Dns.ZonesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.DnsZoneID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get DNS zone: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: DNS zone %s (resource group: %s) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDnsZoneDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Dns.ZonesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dns_zone" { - continue - } - - id, err := parse.DnsZoneID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("DNS Zone still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMDnsZone_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMDnsZone_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMDnsZone_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dns_zone" "import" { - name = azurerm_dns_zone.test.name - resource_group_name = azurerm_dns_zone.test.resource_group_name -} -`, template) -} - -func testAccAzureRMDnsZone_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMDnsZone_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/dns/validate/a_record_id.go b/azurerm/internal/services/dns/validate/a_record_id.go new file mode 100644 index 000000000000..67aeff08a22a --- /dev/null +++ b/azurerm/internal/services/dns/validate/a_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func ARecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ARecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/a_record_id_test.go b/azurerm/internal/services/dns/validate/a_record_id_test.go new file mode 100644 index 000000000000..f6ccc517f5d5 --- /dev/null +++ b/azurerm/internal/services/dns/validate/a_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestARecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/A/eh1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/A/EH1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ARecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/aaaa_record_id.go b/azurerm/internal/services/dns/validate/aaaa_record_id.go new file mode 100644 index 000000000000..c19808f29ab9 --- /dev/null +++ b/azurerm/internal/services/dns/validate/aaaa_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func AaaaRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AaaaRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/aaaa_record_id_test.go b/azurerm/internal/services/dns/validate/aaaa_record_id_test.go new file mode 100644 index 000000000000..89378cfceaf9 --- /dev/null +++ b/azurerm/internal/services/dns/validate/aaaa_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAaaaRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/AAAA/eheh1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/AAAA/EHEH1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AaaaRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/caa_record_id.go b/azurerm/internal/services/dns/validate/caa_record_id.go new file mode 100644 index 000000000000..9913cfc2daf0 --- /dev/null +++ b/azurerm/internal/services/dns/validate/caa_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func CaaRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CaaRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/caa_record_id_test.go b/azurerm/internal/services/dns/validate/caa_record_id_test.go new file mode 100644 index 000000000000..aa05cd29ef88 --- /dev/null +++ b/azurerm/internal/services/dns/validate/caa_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCaaRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing CAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for CAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CAA/caa1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/CAA/CAA1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CaaRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/cname_record_id.go b/azurerm/internal/services/dns/validate/cname_record_id.go new file mode 100644 index 000000000000..f246652d7bf7 --- /dev/null +++ b/azurerm/internal/services/dns/validate/cname_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func CnameRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CnameRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/cname_record_id_test.go b/azurerm/internal/services/dns/validate/cname_record_id_test.go new file mode 100644 index 000000000000..23c4075e7ccf --- /dev/null +++ b/azurerm/internal/services/dns/validate/cname_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCnameRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/CNAME/name1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/CNAME/NAME1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CnameRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/dns_zone_id.go b/azurerm/internal/services/dns/validate/dns_zone_id.go new file mode 100644 index 000000000000..df219e1bd7d2 --- /dev/null +++ b/azurerm/internal/services/dns/validate/dns_zone_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func DnsZoneID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DnsZoneID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/dns_zone_id_test.go b/azurerm/internal/services/dns/validate/dns_zone_id_test.go new file mode 100644 index 000000000000..e98912a40947 --- /dev/null +++ b/azurerm/internal/services/dns/validate/dns_zone_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDnsZoneID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DnsZoneID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/dns_zone_soa_record_email.go b/azurerm/internal/services/dns/validate/dns_zone_soa_record_email.go new file mode 100644 index 000000000000..9ea28c6a1ee7 --- /dev/null +++ b/azurerm/internal/services/dns/validate/dns_zone_soa_record_email.go @@ -0,0 +1,41 @@ +package validate + +import ( + "fmt" + "regexp" + "strings" +) + +func DnsZoneSOARecordEmail(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if len(value) == 0 { + errors = append(errors, fmt.Errorf("%q cannot be an empty string: %q", k, v)) + return warnings, errors + } + + vSegments := strings.Split(value, ".") + if len(vSegments) < 2 || len(vSegments) > 34 { + errors = append(errors, fmt.Errorf("%q must be between 2 and 34 segments", k)) + return warnings, errors + } + + for _, segment := range vSegments { + if segment == "" { + errors = append(errors, fmt.Errorf("%q cannot contain consecutive period", k)) + return warnings, errors + } + + if len(segment) > 63 { + errors = append(errors, fmt.Errorf("the each segment of the `email` must contain between 1 and 63 characters")) + return warnings, errors + } + } + + if !regexp.MustCompile(`^[a-zA-Z\d._-]+$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q only contains letters, numbers, underscores, dashes and periods", k)) + return warnings, errors + } + + return warnings, errors +} diff --git a/azurerm/internal/services/dns/validate/dns_zone_soa_record_email_test.go b/azurerm/internal/services/dns/validate/dns_zone_soa_record_email_test.go new file mode 100644 index 000000000000..842dbe330aa0 --- /dev/null +++ b/azurerm/internal/services/dns/validate/dns_zone_soa_record_email_test.go @@ -0,0 +1,72 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestDNSZoneSOARecordEmail(t *testing.T) { + cases := []struct { + Value string + Errors int + }{ + { + Value: "", + Errors: 1, + }, + { + Value: "a..com", + Errors: 1, + }, + { + Value: ".a.com", + Errors: 1, + }, + { + Value: "a.com.", + Errors: 1, + }, + { + Value: "a", + Errors: 1, + }, + { + Value: "a@.com.", + Errors: 1, + }, + { + Value: "a.com", + Errors: 0, + }, + { + Value: strings.Repeat("a.", 33) + "com", + Errors: 0, + }, + { + Value: strings.Repeat("a.", 34) + "com", + Errors: 1, + }, + { + Value: "a-b.com", + Errors: 0, + }, + { + Value: strings.Repeat("s", 63) + ".com", + Errors: 0, + }, + { + Value: strings.Repeat("s", 64) + ".com", + Errors: 1, + }, + } + + for _, tc := range cases { + t.Run(tc.Value, func(t *testing.T) { + _, errors := DnsZoneSOARecordEmail(tc.Value, "email") + + if len(errors) != tc.Errors { + t.Fatalf("Expected DNSZoneSOARecordEmail to return %d error(s) not %d", tc.Errors, len(errors)) + } + }) + } +} diff --git a/azurerm/internal/services/dns/validate/mx_record_id.go b/azurerm/internal/services/dns/validate/mx_record_id.go new file mode 100644 index 000000000000..7b725fdf91e5 --- /dev/null +++ b/azurerm/internal/services/dns/validate/mx_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func MxRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.MxRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/mx_record_id_test.go b/azurerm/internal/services/dns/validate/mx_record_id_test.go new file mode 100644 index 000000000000..ec999a7d517a --- /dev/null +++ b/azurerm/internal/services/dns/validate/mx_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestMxRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/MX/mx1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/MX/MX1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := MxRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/ns_record_id.go b/azurerm/internal/services/dns/validate/ns_record_id.go new file mode 100644 index 000000000000..bcd99b99574f --- /dev/null +++ b/azurerm/internal/services/dns/validate/ns_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func NsRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NsRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/ns_record_id_test.go b/azurerm/internal/services/dns/validate/ns_record_id_test.go new file mode 100644 index 000000000000..c9710d386833 --- /dev/null +++ b/azurerm/internal/services/dns/validate/ns_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNsRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing NSName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for NSName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/NS/ns1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/NS/NS1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NsRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/ptr_record_id.go b/azurerm/internal/services/dns/validate/ptr_record_id.go new file mode 100644 index 000000000000..763b04af6721 --- /dev/null +++ b/azurerm/internal/services/dns/validate/ptr_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func PtrRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PtrRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/ptr_record_id_test.go b/azurerm/internal/services/dns/validate/ptr_record_id_test.go new file mode 100644 index 000000000000..7e526241c55c --- /dev/null +++ b/azurerm/internal/services/dns/validate/ptr_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPtrRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/PTR/ptr1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/PTR/PTR1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PtrRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/srv_record_id.go b/azurerm/internal/services/dns/validate/srv_record_id.go new file mode 100644 index 000000000000..273eb73433c2 --- /dev/null +++ b/azurerm/internal/services/dns/validate/srv_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func SrvRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SrvRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/srv_record_id_test.go b/azurerm/internal/services/dns/validate/srv_record_id_test.go new file mode 100644 index 000000000000..d48d19c45af8 --- /dev/null +++ b/azurerm/internal/services/dns/validate/srv_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSrvRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/SRV/srv1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/SRV/SRV1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SrvRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/dns/validate/txt_record_id.go b/azurerm/internal/services/dns/validate/txt_record_id.go new file mode 100644 index 000000000000..f311785544aa --- /dev/null +++ b/azurerm/internal/services/dns/validate/txt_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/parse" +) + +func TxtRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.TxtRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/dns/validate/txt_record_id_test.go b/azurerm/internal/services/dns/validate/txt_record_id_test.go new file mode 100644 index 000000000000..eb23b2b72f55 --- /dev/null +++ b/azurerm/internal/services/dns/validate/txt_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestTxtRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for DnszoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/", + Valid: false, + }, + + { + // missing TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/", + Valid: false, + }, + + { + // missing value for TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/dnszones/zone1/TXT/txt1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/DNSZONES/ZONE1/TXT/TXT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := TxtRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventgrid/client/client.go b/azurerm/internal/services/eventgrid/client/client.go index 21f596fa3acf..de3f324ffbd4 100644 --- a/azurerm/internal/services/eventgrid/client/client.go +++ b/azurerm/internal/services/eventgrid/client/client.go @@ -6,11 +6,12 @@ import ( ) type Client struct { - DomainsClient *eventgrid.DomainsClient - DomainTopicsClient *eventgrid.DomainTopicsClient - EventSubscriptionsClient *eventgrid.EventSubscriptionsClient - TopicsClient *eventgrid.TopicsClient - SystemTopicsClient *eventgrid.SystemTopicsClient + DomainsClient *eventgrid.DomainsClient + DomainTopicsClient *eventgrid.DomainTopicsClient + EventSubscriptionsClient *eventgrid.EventSubscriptionsClient + TopicsClient *eventgrid.TopicsClient + SystemTopicsClient *eventgrid.SystemTopicsClient + SystemTopicEventSubscriptionsClient *eventgrid.SystemTopicEventSubscriptionsClient } func NewClient(o *common.ClientOptions) *Client { @@ -29,11 +30,15 @@ func NewClient(o *common.ClientOptions) *Client { SystemTopicsClient := eventgrid.NewSystemTopicsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&SystemTopicsClient.Client, o.ResourceManagerAuthorizer) + SystemTopicEventSubscriptionsClient := eventgrid.NewSystemTopicEventSubscriptionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&SystemTopicEventSubscriptionsClient.Client, o.ResourceManagerAuthorizer) + return &Client{ - DomainsClient: &DomainsClient, - EventSubscriptionsClient: &EventSubscriptionsClient, - DomainTopicsClient: &DomainTopicsClient, - TopicsClient: &TopicsClient, - SystemTopicsClient: &SystemTopicsClient, + DomainsClient: &DomainsClient, + EventSubscriptionsClient: &EventSubscriptionsClient, + DomainTopicsClient: &DomainTopicsClient, + TopicsClient: &TopicsClient, + SystemTopicsClient: &SystemTopicsClient, + SystemTopicEventSubscriptionsClient: &SystemTopicEventSubscriptionsClient, } } diff --git a/azurerm/internal/services/eventgrid/event_subscription.go b/azurerm/internal/services/eventgrid/event_subscription.go new file mode 100644 index 000000000000..eaba65490d63 --- /dev/null +++ b/azurerm/internal/services/eventgrid/event_subscription.go @@ -0,0 +1,1155 @@ +package eventgrid + +import ( + "fmt" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +// EventSubscriptionEndpointType enumerates the values for event subscription endpoint types. +type EventSubscriptionEndpointType string + +const ( + // AzureFunctionEndpoint ... + AzureFunctionEndpoint EventSubscriptionEndpointType = "azure_function_endpoint" + // EventHubEndpoint ... + EventHubEndpoint EventSubscriptionEndpointType = "eventhub_endpoint" + // EventHubEndpointID ... + EventHubEndpointID EventSubscriptionEndpointType = "eventhub_endpoint_id" + // HybridConnectionEndpoint ... + HybridConnectionEndpoint EventSubscriptionEndpointType = "hybrid_connection_endpoint" + // HybridConnectionEndpointID ... + HybridConnectionEndpointID EventSubscriptionEndpointType = "hybrid_connection_endpoint_id" + // ServiceBusQueueEndpointID ... + ServiceBusQueueEndpointID EventSubscriptionEndpointType = "service_bus_queue_endpoint_id" + // ServiceBusTopicEndpointID ... + ServiceBusTopicEndpointID EventSubscriptionEndpointType = "service_bus_topic_endpoint_id" + // StorageQueueEndpoint ... + StorageQueueEndpoint EventSubscriptionEndpointType = "storage_queue_endpoint" + // WebHookEndpoint ... + WebHookEndpoint EventSubscriptionEndpointType = "webhook_endpoint" +) + +func eventSubscriptionSchemaEventSubscriptionName() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,50}$"), + "EventGrid subscription name must be 3 - 50 characters long, contain only letters, numbers and hyphens.", + ), + ), + } +} + +func eventSubscriptionSchemaEventDeliverySchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: string(eventgrid.EventGridSchema), + ValidateFunc: validation.StringInSlice([]string{ + string(eventgrid.EventGridSchema), + string(eventgrid.CloudEventSchemaV10), + string(eventgrid.CustomInputSchema), + }, false), + } +} + +func eventSubscriptionSchemaExpirationTimeUTC() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + } +} + +func eventSubscriptionSchemaAzureFunctionEndpoint(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: conflictsWith, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "function_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateResourceID, + }, + "max_events_per_batch": { + Type: schema.TypeInt, + Optional: true, + }, + "preferred_batch_size_in_kilobytes": { + Type: schema.TypeInt, + Optional: true, + }, + }, + }, + } +} + +func eventSubscriptionSchemaEventHubEndpointID(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Computed: true, + ConflictsWith: conflictsWith, + ValidateFunc: azure.ValidateResourceID, + } +} + +func eventSubscriptionSchemaEventHubEndpoint(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Deprecated: "Deprecated in favour of `" + "eventhub_endpoint_id" + "`", + Optional: true, + Computed: true, + ConflictsWith: conflictsWith, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "eventhub_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: azure.ValidateResourceID, + }, + }, + }, + } +} + +func eventSubscriptionSchemaHybridConnectionEndpointID(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Computed: true, + ConflictsWith: conflictsWith, + ValidateFunc: azure.ValidateResourceID, + } +} + +func eventSubscriptionSchemaHybridEndpoint(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Deprecated: "Deprecated in favour of `" + "hybrid_connection_endpoint_id" + "`", + Optional: true, + Computed: true, + ConflictsWith: conflictsWith, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "hybrid_connection_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: azure.ValidateResourceID, + }, + }, + }, + } +} + +func eventSubscriptionSchemaServiceBusQueueEndpointID(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ConflictsWith: conflictsWith, + ValidateFunc: azure.ValidateResourceID, + } +} + +func eventSubscriptionSchemaServiceBusTopicEndpointID(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ConflictsWith: conflictsWith, + ValidateFunc: azure.ValidateResourceID, + } +} + +func eventSubscriptionSchemaStorageQueueEndpoint(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: conflictsWith, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "storage_account_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateResourceID, + }, + "queue_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + } +} + +func eventSubscriptionSchemaWebHookEndpoint(conflictsWith []string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: conflictsWith, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "url": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.IsURLWithHTTPS, + }, + "base_url": { + Type: schema.TypeString, + Computed: true, + }, + "max_events_per_batch": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(1, 5000), + }, + "preferred_batch_size_in_kilobytes": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(1, 1024), + }, + "active_directory_tenant_id": { + Type: schema.TypeString, + Optional: true, + }, + "active_directory_app_id_or_uri": { + Type: schema.TypeString, + Optional: true, + }, + }, + }, + } +} + +func eventSubscriptionSchemaIncludedEventTypes() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + } +} + +func eventSubscriptionSchemaSubjectFilter() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "subject_begins_with": { + Type: schema.TypeString, + Optional: true, + }, + "subject_ends_with": { + Type: schema.TypeString, + Optional: true, + }, + "case_sensitive": { + Type: schema.TypeBool, + Optional: true, + }, + }, + }, + } +} + +func eventSubscriptionSchemaAdvancedFilter() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bool_equals": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "value": { + Type: schema.TypeBool, + Required: true, + }, + }, + }, + }, + "number_greater_than": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "value": { + Type: schema.TypeFloat, + Required: true, + }, + }, + }, + }, + "number_greater_than_or_equals": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "value": { + Type: schema.TypeFloat, + Required: true, + }, + }, + }, + }, + "number_less_than": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "value": { + Type: schema.TypeFloat, + Required: true, + }, + }, + }, + }, + "number_less_than_or_equals": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "value": { + Type: schema.TypeFloat, + Required: true, + }, + }, + }, + }, + "number_in": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + }, + }, + }, + "number_not_in": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeFloat, + }, + }, + }, + }, + }, + "string_begins_with": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "string_ends_with": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "string_contains": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "string_in": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "string_not_in": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "values": { + Type: schema.TypeList, + Required: true, + MaxItems: 5, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + }, + }, + } +} + +func eventSubscriptionSchemaStorageBlobDeadletterDestination() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "storage_account_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateResourceID, + }, + "storage_blob_container_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + } +} + +func eventSubscriptionSchemaRetryPolicy() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "max_delivery_attempts": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 30), + }, + "event_time_to_live": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 1440), + }, + }, + }, + } +} + +func eventSubscriptionSchemaLabels() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + } +} + +func expandEventGridExpirationTime(d *schema.ResourceData) (*date.Time, error) { + if expirationTimeUtc, ok := d.GetOk("expiration_time_utc"); ok { + if expirationTimeUtc == "" { + return nil, nil + } + + parsedExpirationTimeUtc, err := date.ParseTime(time.RFC3339, expirationTimeUtc.(string)) + if err != nil { + return nil, err + } + + return &date.Time{Time: parsedExpirationTimeUtc}, nil + } + + return nil, nil +} + +func expandEventGridEventSubscriptionDestination(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { + if v, ok := d.GetOk("azure_function_endpoint"); ok { + return expandEventGridEventSubscriptionAzureFunctionEndpoint(v) + } + + if v, ok := d.GetOk("eventhub_endpoint_id"); ok { + return &eventgrid.EventHubEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeEventHub, + EventHubEventSubscriptionDestinationProperties: &eventgrid.EventHubEventSubscriptionDestinationProperties{ + ResourceID: utils.String(v.(string)), + }, + } + } else if _, ok := d.GetOk("eventhub_endpoint"); ok { + return expandEventGridEventSubscriptionEventhubEndpoint(d) + } + + if v, ok := d.GetOk("hybrid_connection_endpoint_id"); ok { + return &eventgrid.HybridConnectionEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeHybridConnection, + HybridConnectionEventSubscriptionDestinationProperties: &eventgrid.HybridConnectionEventSubscriptionDestinationProperties{ + ResourceID: utils.String(v.(string)), + }, + } + } else if _, ok := d.GetOk("hybrid_connection_endpoint"); ok { + return expandEventGridEventSubscriptionHybridConnectionEndpoint(d) + } + + if v, ok := d.GetOk("service_bus_queue_endpoint_id"); ok { + return &eventgrid.ServiceBusQueueEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeServiceBusQueue, + ServiceBusQueueEventSubscriptionDestinationProperties: &eventgrid.ServiceBusQueueEventSubscriptionDestinationProperties{ + ResourceID: utils.String(v.(string)), + }, + } + } + + if v, ok := d.GetOk("service_bus_topic_endpoint_id"); ok { + return &eventgrid.ServiceBusTopicEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeServiceBusTopic, + ServiceBusTopicEventSubscriptionDestinationProperties: &eventgrid.ServiceBusTopicEventSubscriptionDestinationProperties{ + ResourceID: utils.String(v.(string)), + }, + } + } + + if _, ok := d.GetOk("storage_queue_endpoint"); ok { + return expandEventGridEventSubscriptionStorageQueueEndpoint(d) + } + + if v, ok := d.GetOk("webhook_endpoint"); ok { + return expandEventGridEventSubscriptionWebhookEndpoint(v) + } + + return nil +} + +func expandEventGridEventSubscriptionStorageQueueEndpoint(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { + props := d.Get("storage_queue_endpoint").([]interface{})[0].(map[string]interface{}) + storageAccountID := props["storage_account_id"].(string) + queueName := props["queue_name"].(string) + + return eventgrid.StorageQueueEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeStorageQueue, + StorageQueueEventSubscriptionDestinationProperties: &eventgrid.StorageQueueEventSubscriptionDestinationProperties{ + ResourceID: &storageAccountID, + QueueName: &queueName, + }, + } +} + +func expandEventGridEventSubscriptionEventhubEndpoint(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { + props := d.Get("eventhub_endpoint").([]interface{})[0].(map[string]interface{}) + eventHubID := props["eventhub_id"].(string) + + return eventgrid.EventHubEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeEventHub, + EventHubEventSubscriptionDestinationProperties: &eventgrid.EventHubEventSubscriptionDestinationProperties{ + ResourceID: &eventHubID, + }, + } +} + +func expandEventGridEventSubscriptionHybridConnectionEndpoint(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { + props := d.Get("hybrid_connection_endpoint").([]interface{})[0].(map[string]interface{}) + hybridConnectionID := props["hybrid_connection_id"].(string) + + return eventgrid.HybridConnectionEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeHybridConnection, + HybridConnectionEventSubscriptionDestinationProperties: &eventgrid.HybridConnectionEventSubscriptionDestinationProperties{ + ResourceID: &hybridConnectionID, + }, + } +} + +func expandEventGridEventSubscriptionAzureFunctionEndpoint(input interface{}) eventgrid.BasicEventSubscriptionDestination { + configs := input.([]interface{}) + + props := eventgrid.AzureFunctionEventSubscriptionDestinationProperties{} + azureFunctionDestination := &eventgrid.AzureFunctionEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeAzureFunction, + AzureFunctionEventSubscriptionDestinationProperties: &props, + } + + if len(configs) == 0 { + return azureFunctionDestination + } + + config := configs[0].(map[string]interface{}) + + if v, ok := config["function_id"]; ok && v != "" { + props.ResourceID = utils.String(v.(string)) + } + + if v, ok := config["max_events_per_batch"]; ok && v != 0 { + props.MaxEventsPerBatch = utils.Int32(int32(v.(int))) + } + + if v, ok := config["preferred_batch_size_in_kilobytes"]; ok && v != 0 { + props.PreferredBatchSizeInKilobytes = utils.Int32(int32(v.(int))) + } + + return azureFunctionDestination +} + +func expandEventGridEventSubscriptionWebhookEndpoint(input interface{}) eventgrid.BasicEventSubscriptionDestination { + configs := input.([]interface{}) + + props := eventgrid.WebHookEventSubscriptionDestinationProperties{} + webhookDestination := &eventgrid.WebHookEventSubscriptionDestination{ + EndpointType: eventgrid.EndpointTypeWebHook, + WebHookEventSubscriptionDestinationProperties: &props, + } + + if len(configs) == 0 { + return webhookDestination + } + + config := configs[0].(map[string]interface{}) + + if v, ok := config["url"]; ok && v != "" { + props.EndpointURL = utils.String(v.(string)) + } + + if v, ok := config["max_events_per_batch"]; ok && v != 0 { + props.MaxEventsPerBatch = utils.Int32(int32(v.(int))) + } + + if v, ok := config["preferred_batch_size_in_kilobytes"]; ok && v != 0 { + props.PreferredBatchSizeInKilobytes = utils.Int32(int32(v.(int))) + } + + if v, ok := config["active_directory_tenant_id"]; ok && v != "" { + props.AzureActiveDirectoryTenantID = utils.String(v.(string)) + } + + if v, ok := config["active_directory_app_id_or_uri"]; ok && v != "" { + props.AzureActiveDirectoryApplicationIDOrURI = utils.String(v.(string)) + } + + return webhookDestination +} + +func expandEventGridEventSubscriptionFilter(d *schema.ResourceData) (*eventgrid.EventSubscriptionFilter, error) { + filter := &eventgrid.EventSubscriptionFilter{} + + if includedEvents, ok := d.GetOk("included_event_types"); ok { + filter.IncludedEventTypes = utils.ExpandStringSlice(includedEvents.([]interface{})) + } + + if v, ok := d.GetOk("subject_filter"); ok { + if v.([]interface{})[0] != nil { + config := v.([]interface{})[0].(map[string]interface{}) + subjectBeginsWith := config["subject_begins_with"].(string) + subjectEndsWith := config["subject_ends_with"].(string) + caseSensitive := config["case_sensitive"].(bool) + + filter.SubjectBeginsWith = &subjectBeginsWith + filter.SubjectEndsWith = &subjectEndsWith + filter.IsSubjectCaseSensitive = &caseSensitive + } + } + + if advancedFilter, ok := d.GetOk("advanced_filter"); ok { + advancedFilters := make([]eventgrid.BasicAdvancedFilter, 0) + for filterKey, filterSchema := range advancedFilter.([]interface{})[0].(map[string]interface{}) { + for _, options := range filterSchema.([]interface{}) { + if filter, err := expandAdvancedFilter(filterKey, options.(map[string]interface{})); err == nil { + advancedFilters = append(advancedFilters, filter) + } else { + return nil, err + } + } + } + filter.AdvancedFilters = &advancedFilters + } + + return filter, nil +} + +func expandAdvancedFilter(operatorType string, config map[string]interface{}) (eventgrid.BasicAdvancedFilter, error) { + k := config["key"].(string) + + switch operatorType { + case "bool_equals": + v := config["value"].(bool) + return eventgrid.BoolEqualsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeBoolEquals, Value: &v}, nil + case "number_greater_than": + v := config["value"].(float64) + return eventgrid.NumberGreaterThanAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberGreaterThan, Value: &v}, nil + case "number_greater_than_or_equals": + v := config["value"].(float64) + return eventgrid.NumberGreaterThanOrEqualsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberGreaterThanOrEquals, Value: &v}, nil + case "number_less_than": + v := config["value"].(float64) + return eventgrid.NumberLessThanAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberLessThan, Value: &v}, nil + case "number_less_than_or_equals": + v := config["value"].(float64) + return eventgrid.NumberLessThanOrEqualsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberLessThanOrEquals, Value: &v}, nil + case "number_in": + v := utils.ExpandFloatSlice(config["values"].([]interface{})) + return eventgrid.NumberInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberIn, Values: v}, nil + case "number_not_in": + v := utils.ExpandFloatSlice(config["values"].([]interface{})) + return eventgrid.NumberNotInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberIn, Values: v}, nil + case "string_begins_with": + v := utils.ExpandStringSlice(config["values"].([]interface{})) + return eventgrid.StringBeginsWithAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringBeginsWith, Values: v}, nil + case "string_ends_with": + v := utils.ExpandStringSlice(config["values"].([]interface{})) + return eventgrid.StringEndsWithAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringEndsWith, Values: v}, nil + case "string_contains": + v := utils.ExpandStringSlice(config["values"].([]interface{})) + return eventgrid.StringContainsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringContains, Values: v}, nil + case "string_in": + v := utils.ExpandStringSlice(config["values"].([]interface{})) + return eventgrid.StringInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringIn, Values: v}, nil + case "string_not_in": + v := utils.ExpandStringSlice(config["values"].([]interface{})) + return eventgrid.StringNotInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringNotIn, Values: v}, nil + default: + return nil, fmt.Errorf("Invalid `advanced_filter` operator_type %q used", operatorType) + } +} + +func expandEventGridEventSubscriptionStorageBlobDeadLetterDestination(d *schema.ResourceData) eventgrid.BasicDeadLetterDestination { + if v, ok := d.GetOk("storage_blob_dead_letter_destination"); ok { + dest := v.([]interface{})[0].(map[string]interface{}) + resourceID := dest["storage_account_id"].(string) + blobName := dest["storage_blob_container_name"].(string) + return eventgrid.StorageBlobDeadLetterDestination{ + EndpointType: eventgrid.EndpointTypeStorageBlob, + StorageBlobDeadLetterDestinationProperties: &eventgrid.StorageBlobDeadLetterDestinationProperties{ + ResourceID: &resourceID, + BlobContainerName: &blobName, + }, + } + } + + return nil +} + +func expandEventGridEventSubscriptionRetryPolicy(d *schema.ResourceData) *eventgrid.RetryPolicy { + if v, ok := d.GetOk("retry_policy"); ok { + dest := v.([]interface{})[0].(map[string]interface{}) + maxDeliveryAttempts := dest["max_delivery_attempts"].(int) + eventTimeToLive := dest["event_time_to_live"].(int) + return &eventgrid.RetryPolicy{ + MaxDeliveryAttempts: utils.Int32(int32(maxDeliveryAttempts)), + EventTimeToLiveInMinutes: utils.Int32(int32(eventTimeToLive)), + } + } + + return nil +} + +func flattenEventGridEventSubscriptionEventhubEndpoint(input *eventgrid.EventHubEventSubscriptionDestination) []interface{} { + if input == nil { + return nil + } + result := make(map[string]interface{}) + + if input.ResourceID != nil { + result["eventhub_id"] = *input.ResourceID + } + + return []interface{}{result} +} + +func flattenEventGridEventSubscriptionHybridConnectionEndpoint(input *eventgrid.HybridConnectionEventSubscriptionDestination) []interface{} { + if input == nil { + return nil + } + + hybridConnectionId := "" + if input.ResourceID != nil { + hybridConnectionId = *input.ResourceID + } + + return []interface{}{ + map[string]interface{}{ + "hybrid_connection_id": hybridConnectionId, + }, + } +} + +func flattenEventGridEventSubscriptionStorageQueueEndpoint(input *eventgrid.StorageQueueEventSubscriptionDestination) []interface{} { + if input == nil { + return nil + } + result := make(map[string]interface{}) + + if input.ResourceID != nil { + result["storage_account_id"] = *input.ResourceID + } + if input.QueueName != nil { + result["queue_name"] = *input.QueueName + } + + return []interface{}{result} +} + +func flattenEventGridEventSubscriptionAzureFunctionEndpoint(input *eventgrid.AzureFunctionEventSubscriptionDestination) []interface{} { + results := make([]interface{}, 0) + + if input == nil { + return results + } + + functionID := "" + if input.ResourceID != nil { + functionID = *input.ResourceID + } + + maxEventsPerBatch := 0 + if input.MaxEventsPerBatch != nil { + maxEventsPerBatch = int(*input.MaxEventsPerBatch) + } + + preferredBatchSize := 0 + if input.PreferredBatchSizeInKilobytes != nil { + preferredBatchSize = int(*input.PreferredBatchSizeInKilobytes) + } + + return append(results, map[string]interface{}{ + "function_id": functionID, + "max_events_per_batch": maxEventsPerBatch, + "preferred_batch_size_in_kilobytes": preferredBatchSize, + }) +} + +func flattenEventGridEventSubscriptionWebhookEndpoint(input *eventgrid.WebHookEventSubscriptionDestination, fullURL *eventgrid.EventSubscriptionFullURL) []interface{} { + results := make([]interface{}, 0) + + if input == nil { + return results + } + + webhookURL := "" + if fullURL != nil { + webhookURL = *fullURL.EndpointURL + } + + webhookBaseURL := "" + if input.EndpointBaseURL != nil { + webhookBaseURL = *input.EndpointBaseURL + } + + maxEventsPerBatch := 0 + if input.MaxEventsPerBatch != nil { + maxEventsPerBatch = int(*input.MaxEventsPerBatch) + } + + preferredBatchSizeInKilobytes := 0 + if input.PreferredBatchSizeInKilobytes != nil { + preferredBatchSizeInKilobytes = int(*input.PreferredBatchSizeInKilobytes) + } + + azureActiveDirectoryTenantID := "" + if input.AzureActiveDirectoryTenantID != nil { + azureActiveDirectoryTenantID = *input.AzureActiveDirectoryTenantID + } + + azureActiveDirectoryApplicationIDOrURI := "" + if input.AzureActiveDirectoryApplicationIDOrURI != nil { + azureActiveDirectoryApplicationIDOrURI = *input.AzureActiveDirectoryApplicationIDOrURI + } + + return append(results, map[string]interface{}{ + "url": webhookURL, + "base_url": webhookBaseURL, + "max_events_per_batch": maxEventsPerBatch, + "preferred_batch_size_in_kilobytes": preferredBatchSizeInKilobytes, + "active_directory_tenant_id": azureActiveDirectoryTenantID, + "active_directory_app_id_or_uri": azureActiveDirectoryApplicationIDOrURI, + }) +} + +func flattenEventGridEventSubscriptionSubjectFilter(filter *eventgrid.EventSubscriptionFilter) []interface{} { + if (filter.SubjectBeginsWith != nil && *filter.SubjectBeginsWith == "") && (filter.SubjectEndsWith != nil && *filter.SubjectEndsWith == "") { + return nil + } + result := make(map[string]interface{}) + + if filter.SubjectBeginsWith != nil { + result["subject_begins_with"] = *filter.SubjectBeginsWith + } + + if filter.SubjectEndsWith != nil { + result["subject_ends_with"] = *filter.SubjectEndsWith + } + + if filter.IsSubjectCaseSensitive != nil { + result["case_sensitive"] = *filter.IsSubjectCaseSensitive + } + + return []interface{}{result} +} + +func flattenEventGridEventSubscriptionAdvancedFilter(input *eventgrid.EventSubscriptionFilter) []interface{} { + results := make([]interface{}, 0) + if input == nil || input.AdvancedFilters == nil { + return results + } + + boolEquals := make([]interface{}, 0) + numberGreaterThan := make([]interface{}, 0) + numberGreaterThanOrEquals := make([]interface{}, 0) + numberLessThan := make([]interface{}, 0) + numberLessThanOrEquals := make([]interface{}, 0) + numberIn := make([]interface{}, 0) + numberNotIn := make([]interface{}, 0) + stringBeginsWith := make([]interface{}, 0) + stringEndsWith := make([]interface{}, 0) + stringContains := make([]interface{}, 0) + stringIn := make([]interface{}, 0) + stringNotIn := make([]interface{}, 0) + + for _, item := range *input.AdvancedFilters { + switch f := item.(type) { + case eventgrid.BoolEqualsAdvancedFilter: + v := interface{}(f.Value) + boolEquals = append(boolEquals, flattenValue(f.Key, &v)) + case eventgrid.NumberGreaterThanAdvancedFilter: + v := interface{}(f.Value) + numberGreaterThan = append(numberGreaterThan, flattenValue(f.Key, &v)) + case eventgrid.NumberGreaterThanOrEqualsAdvancedFilter: + v := interface{}(f.Value) + numberGreaterThanOrEquals = append(numberGreaterThanOrEquals, flattenValue(f.Key, &v)) + case eventgrid.NumberLessThanAdvancedFilter: + v := interface{}(f.Value) + numberLessThan = append(numberLessThan, flattenValue(f.Key, &v)) + case eventgrid.NumberLessThanOrEqualsAdvancedFilter: + v := interface{}(f.Value) + numberLessThanOrEquals = append(numberLessThanOrEquals, flattenValue(f.Key, &v)) + case eventgrid.NumberInAdvancedFilter: + v := utils.FlattenFloatSlice(f.Values) + numberIn = append(numberIn, flattenValues(f.Key, &v)) + case eventgrid.NumberNotInAdvancedFilter: + v := utils.FlattenFloatSlice(f.Values) + numberNotIn = append(numberNotIn, flattenValues(f.Key, &v)) + case eventgrid.StringBeginsWithAdvancedFilter: + v := utils.FlattenStringSlice(f.Values) + stringBeginsWith = append(stringBeginsWith, flattenValues(f.Key, &v)) + case eventgrid.StringEndsWithAdvancedFilter: + v := utils.FlattenStringSlice(f.Values) + stringEndsWith = append(stringEndsWith, flattenValues(f.Key, &v)) + case eventgrid.StringContainsAdvancedFilter: + v := utils.FlattenStringSlice(f.Values) + stringContains = append(stringContains, flattenValues(f.Key, &v)) + case eventgrid.StringInAdvancedFilter: + v := utils.FlattenStringSlice(f.Values) + stringIn = append(stringIn, flattenValues(f.Key, &v)) + case eventgrid.StringNotInAdvancedFilter: + v := utils.FlattenStringSlice(f.Values) + stringNotIn = append(stringNotIn, flattenValues(f.Key, &v)) + } + } + + return []interface{}{ + map[string][]interface{}{ + "bool_equals": boolEquals, + "number_greater_than": numberGreaterThan, + "number_greater_than_or_equals": numberGreaterThanOrEquals, + "number_less_than": numberLessThan, + "number_less_than_or_equals": numberLessThanOrEquals, + "number_in": numberIn, + "number_not_in": numberNotIn, + "string_begins_with": stringBeginsWith, + "string_ends_with": stringEndsWith, + "string_contains": stringContains, + "string_in": stringIn, + "string_not_in": stringNotIn, + }, + } +} + +func flattenEventGridEventSubscriptionStorageBlobDeadLetterDestination(dest *eventgrid.StorageBlobDeadLetterDestination) []interface{} { + if dest == nil { + return nil + } + result := make(map[string]interface{}) + + if dest.ResourceID != nil { + result["storage_account_id"] = *dest.ResourceID + } + + if dest.BlobContainerName != nil { + result["storage_blob_container_name"] = *dest.BlobContainerName + } + + return []interface{}{result} +} + +func flattenEventGridEventSubscriptionRetryPolicy(retryPolicy *eventgrid.RetryPolicy) []interface{} { + result := make(map[string]interface{}) + + if v := retryPolicy.EventTimeToLiveInMinutes; v != nil { + result["event_time_to_live"] = int(*v) + } + + if v := retryPolicy.MaxDeliveryAttempts; v != nil { + result["max_delivery_attempts"] = int(*v) + } + + return []interface{}{result} +} + +func flattenValue(inputKey *string, inputValue *interface{}) map[string]interface{} { + key := "" + if inputKey != nil { + key = *inputKey + } + var value interface{} + if inputValue != nil { + value = inputValue + } + + return map[string]interface{}{ + "key": key, + "value": value, + } +} + +func flattenValues(inputKey *string, inputValues *[]interface{}) map[string]interface{} { + key := "" + if inputKey != nil { + key = *inputKey + } + values := make([]interface{}, 0) + if inputValues != nil { + values = *inputValues + } + + return map[string]interface{}{ + "key": key, + "values": values, + } +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_domain_resource.go b/azurerm/internal/services/eventgrid/eventgrid_domain_resource.go index f6aa4d9ee289..4f2b5da2365a 100644 --- a/azurerm/internal/services/eventgrid/eventgrid_domain_resource.go +++ b/azurerm/internal/services/eventgrid/eventgrid_domain_resource.go @@ -3,6 +3,7 @@ package eventgrid import ( "fmt" "log" + "regexp" "time" "github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid" @@ -19,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventGridDomain() *schema.Resource { +func resourceEventGridDomain() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventGridDomainCreateUpdate, - Read: resourceArmEventGridDomainRead, - Update: resourceArmEventGridDomainCreateUpdate, - Delete: resourceArmEventGridDomainDelete, + Create: resourceEventGridDomainCreateUpdate, + Read: resourceEventGridDomainRead, + Update: resourceEventGridDomainCreateUpdate, + Delete: resourceEventGridDomainDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,7 +35,7 @@ func resourceArmEventGridDomain() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.EventGridDomainID(id) + _, err := parse.DomainID(id) return err }), @@ -43,6 +44,13 @@ func resourceArmEventGridDomain() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,50}$"), + "EventGrid domain name must be 3 - 50 characters long, contain only letters, numbers and hyphens.", + ), + ), }, "location": azure.SchemaLocation(), @@ -150,7 +158,7 @@ func resourceArmEventGridDomain() *schema.Resource { } } -func resourceArmEventGridDomainCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridDomainCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.DomainsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -206,15 +214,15 @@ func resourceArmEventGridDomainCreateUpdate(d *schema.ResourceData, meta interfa d.SetId(*read.ID) - return resourceArmEventGridDomainRead(d, meta) + return resourceEventGridDomainRead(d, meta) } -func resourceArmEventGridDomainRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridDomainRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.DomainsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridDomainID(d.Id()) + id, err := parse.DomainID(d.Id()) if err != nil { return err } @@ -268,12 +276,12 @@ func resourceArmEventGridDomainRead(d *schema.ResourceData, meta interface{}) er return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmEventGridDomainDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridDomainDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.DomainsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridDomainID(d.Id()) + id, err := parse.DomainID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/eventgrid/eventgrid_domain_resource_test.go b/azurerm/internal/services/eventgrid/eventgrid_domain_resource_test.go new file mode 100644 index 000000000000..b8522c207aed --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_domain_resource_test.go @@ -0,0 +1,190 @@ +package eventgrid_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventGridDomainResource struct { +} + +func TestAccEventGridDomain_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") + r := EventGridDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridDomain_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") + r := EventGridDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_domain"), + }, + }) +} + +func TestAccEventGridDomain_mapping(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") + r := EventGridDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.mapping(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("input_mapping_fields.0.topic").HasValue("test"), + check.That(data.ResourceName).Key("input_mapping_fields.0.topic").HasValue("test"), + check.That(data.ResourceName).Key("input_mapping_default_values.0.data_version").HasValue("1.0"), + check.That(data.ResourceName).Key("input_mapping_default_values.0.subject").HasValue("DefaultSubject"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridDomain_basicWithTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") + r := EventGridDomainResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.foo").HasValue("bar"), + ), + }, + data.ImportStep(), + }) +} + +func (EventGridDomainResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DomainID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.EventGrid.DomainsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving EventGrid Domain %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.DomainProperties != nil), nil +} + +func (EventGridDomainResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventgrid_domain" "test" { + name = "acctesteg-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventGridDomainResource) requiresImport(data acceptance.TestData) string { + template := EventGridDomainResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_eventgrid_domain" "import" { + name = azurerm_eventgrid_domain.test.name + location = azurerm_eventgrid_domain.test.location + resource_group_name = azurerm_eventgrid_domain.test.resource_group_name +} +`, template) +} + +func (EventGridDomainResource) mapping(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventgrid_domain" "test" { + name = "acctesteg-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + input_schema = "CustomEventSchema" + + input_mapping_fields { + topic = "test" + event_type = "test" + } + + input_mapping_default_values { + data_version = "1.0" + subject = "DefaultSubject" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventGridDomainResource) basicWithTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventgrid_domain" "test" { + name = "acctesteg-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + "foo" = "bar" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_domain_topic_data_source.go b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_data_source.go new file mode 100644 index 000000000000..5af3df16b8ab --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_data_source.go @@ -0,0 +1,65 @@ +package eventgrid + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceEventGridDomainTopic() *schema.Resource { + return &schema.Resource{ + Read: dataSourceEventGridDomainTopicRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "domain_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + }, + } +} + +func dataSourceEventGridDomainTopicRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).EventGrid.DomainTopicsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + domainName := d.Get("domain_name").(string) + + resp, err := client.Get(ctx, resourceGroup, domainName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error: EventGrid Domain Topic %s (Resource Group %s) was not found: %+v", name, resourceGroup, err) + } + + return fmt.Errorf("Error making Read request on EventGrid Domain Topic '%s': %+v", name, err) + } + + d.SetId(*resp.ID) + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + d.Set("domain_name", domainName) + + return nil +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_domain_topic_data_source_test.go b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_data_source_test.go new file mode 100644 index 000000000000..988be68be3f6 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_data_source_test.go @@ -0,0 +1,39 @@ +package eventgrid_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventGridDomainTopicDataSource struct { +} + +func TestAccEventGridDomainTopicDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventgrid_domain_topic", "test") + r := EventGridDomainTopicDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("domain_name").Exists(), + ), + }, + }) +} + +func (EventGridDomainTopicDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_eventgrid_domain_topic" "test" { + name = azurerm_eventgrid_domain_topic.test.name + domain_name = azurerm_eventgrid_domain_topic.test.domain_name + resource_group_name = azurerm_resource_group.test.name +} +`, EventGridDomainTopicResource{}.basic(data)) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource.go b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource.go index 2268d91b5c1d..579b6d0dd48e 100644 --- a/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource.go +++ b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource.go @@ -3,10 +3,12 @@ package eventgrid import ( "fmt" "log" + "regexp" "time" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" @@ -16,11 +18,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventGridDomainTopic() *schema.Resource { +func resourceEventGridDomainTopic() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventGridDomainTopicCreate, - Read: resourceArmEventGridDomainTopicRead, - Delete: resourceArmEventGridDomainTopicDelete, + Create: resourceEventGridDomainTopicCreate, + Read: resourceEventGridDomainTopicRead, + Delete: resourceEventGridDomainTopicDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -29,7 +31,7 @@ func resourceArmEventGridDomainTopic() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.EventGridDomainTopicID(id) + _, err := parse.DomainTopicID(id) return err }), @@ -38,12 +40,26 @@ func resourceArmEventGridDomainTopic() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,50}$"), + "EventGrid domain name must be 3 - 50 characters long, contain only letters, numbers and hyphens.", + ), + ), }, "domain_name": { Type: schema.TypeString, Required: true, ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,50}$"), + "EventGrid domain name must be 3 - 50 characters long, contain only letters, numbers and hyphens.", + ), + ), }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -51,7 +67,7 @@ func resourceArmEventGridDomainTopic() *schema.Resource { } } -func resourceArmEventGridDomainTopicCreate(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridDomainTopicCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.DomainTopicsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -92,60 +108,60 @@ func resourceArmEventGridDomainTopicCreate(d *schema.ResourceData, meta interfac d.SetId(*read.ID) - return resourceArmEventGridDomainTopicRead(d, meta) + return resourceEventGridDomainTopicRead(d, meta) } -func resourceArmEventGridDomainTopicRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridDomainTopicRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.DomainTopicsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridDomainTopicID(d.Id()) + id, err := parse.DomainTopicID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Domain, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.DomainName, id.TopicName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] EventGrid Domain Topic %q was not found (Resource Group %q)", id.Name, id.ResourceGroup) + log.Printf("[WARN] EventGrid Domain Topic %q was not found (Resource Group %q)", id.TopicName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on EventGrid Domain Topic %q: %+v", id.Name, err) + return fmt.Errorf("Error making Read request on EventGrid Domain Topic %q: %+v", id.TopicName, err) } d.Set("name", resp.Name) - d.Set("domain_name", id.Domain) + d.Set("domain_name", id.DomainName) d.Set("resource_group_name", id.ResourceGroup) return nil } -func resourceArmEventGridDomainTopicDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridDomainTopicDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.DomainTopicsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridDomainTopicID(d.Id()) + id, err := parse.DomainTopicID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Domain, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.DomainName, id.TopicName) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting EventGrid Domain Topic %q: %+v", id.Name, err) + return fmt.Errorf("Error deleting EventGrid Domain Topic %q: %+v", id.TopicName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting EventGrid Domain Topic %q: %+v", id.Name, err) + return fmt.Errorf("Error deleting EventGrid Domain Topic %q: %+v", id.TopicName, err) } return nil diff --git a/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource_test.go b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource_test.go new file mode 100644 index 000000000000..4e4823263713 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_domain_topic_resource_test.go @@ -0,0 +1,99 @@ +package eventgrid_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventGridDomainTopicResource struct { +} + +func TestAccEventGridDomainTopic_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain_topic", "test") + r := EventGridDomainTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridDomainTopic_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain_topic", "test") + r := EventGridDomainTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_domain_topic"), + }, + }) +} + +func (EventGridDomainTopicResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DomainTopicID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.EventGrid.DomainTopicsClient.Get(ctx, id.ResourceGroup, id.DomainName, id.TopicName) + if err != nil { + return nil, fmt.Errorf("retrieving EventGrid Domain Topic %q (resource group: %q): %+v", id.TopicName, id.ResourceGroup, err) + } + + return utils.Bool(resp.DomainTopicProperties != nil), nil +} + +func (EventGridDomainTopicResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +resource "azurerm_eventgrid_domain" "test" { + name = "acctestegdomain-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +resource "azurerm_eventgrid_domain_topic" "test" { + name = "acctestegtopic-%d" + domain_name = azurerm_eventgrid_domain.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (EventGridDomainTopicResource) requiresImport(data acceptance.TestData) string { + template := EventGridDomainTopicResource{}.basic(data) + return fmt.Sprintf(` +%s +resource "azurerm_eventgrid_domain_topic" "import" { + name = azurerm_eventgrid_domain_topic.test.name + domain_name = azurerm_eventgrid_domain_topic.test.domain_name + resource_group_name = azurerm_eventgrid_domain_topic.test.resource_group_name +} +`, template) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource.go b/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource.go index 3fac9a7d00d5..68c396826a98 100644 --- a/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource.go +++ b/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource.go @@ -6,11 +6,9 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid" - "github.com/Azure/go-autorest/autorest/date" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" @@ -19,26 +17,26 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func enpointPropertyNames() []string { +func PossibleEventSubscriptionEndpointTypes() []string { return []string{ - "azure_function_endpoint", - "eventhub_endpoint", - "eventhub_endpoint_id", - "hybrid_connection_endpoint", - "hybrid_connection_endpoint_id", - "service_bus_queue_endpoint_id", - "service_bus_topic_endpoint_id", - "storage_queue_endpoint", - "webhook_endpoint", + string(AzureFunctionEndpoint), + string(EventHubEndpoint), + string(EventHubEndpointID), + string(HybridConnectionEndpoint), + string(HybridConnectionEndpointID), + string(ServiceBusQueueEndpointID), + string(ServiceBusTopicEndpointID), + string(StorageQueueEndpoint), + string(WebHookEndpoint), } } -func resourceArmEventGridEventSubscription() *schema.Resource { +func resourceEventGridEventSubscription() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventGridEventSubscriptionCreateUpdate, - Read: resourceArmEventGridEventSubscriptionRead, - Update: resourceArmEventGridEventSubscriptionCreateUpdate, - Delete: resourceArmEventGridEventSubscriptionDelete, + Create: resourceEventGridEventSubscriptionCreateUpdate, + Read: resourceEventGridEventSubscriptionRead, + Update: resourceEventGridEventSubscriptionCreateUpdate, + Delete: resourceEventGridEventSubscriptionDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -48,17 +46,12 @@ func resourceArmEventGridEventSubscription() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.EventGridEventSubscriptionID(id) + _, err := parse.EventSubscriptionID(id) return err }), Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, + "name": eventSubscriptionSchemaEventSubscriptionName(), "scope": { Type: schema.TypeString, @@ -67,23 +60,9 @@ func resourceArmEventGridEventSubscription() *schema.Resource { ValidateFunc: validation.StringIsNotEmpty, }, - "event_delivery_schema": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Default: string(eventgrid.EventGridSchema), - ValidateFunc: validation.StringInSlice([]string{ - string(eventgrid.EventGridSchema), - string(eventgrid.CloudEventSchemaV10), - string(eventgrid.CustomInputSchema), - }, false), - }, + "event_delivery_schema": eventSubscriptionSchemaEventDeliverySchema(), - "expiration_time_utc": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringIsNotEmpty, - }, + "expiration_time_utc": eventSubscriptionSchemaExpirationTimeUTC(), "topic_name": { Type: schema.TypeString, @@ -92,483 +71,85 @@ func resourceArmEventGridEventSubscription() *schema.Resource { Deprecated: "This field has been updated to readonly field since Apr 25, 2019 so no longer has any affect and will be removed in version 3.0 of the provider.", }, - "azure_function_endpoint": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "azure_function_endpoint"), - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "function_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.ValidateResourceID, - }, - "max_events_per_batch": { - Type: schema.TypeInt, - Optional: true, - }, - "preferred_batch_size_in_kilobytes": { - Type: schema.TypeInt, - Optional: true, - }, - }, - }, - }, - - "eventhub_endpoint_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "eventhub_endpoint_id"), - ValidateFunc: azure.ValidateResourceID, - }, - - "eventhub_endpoint": { - Type: schema.TypeList, - MaxItems: 1, - Deprecated: "Deprecated in favour of `" + "eventhub_endpoint_id" + "`", - Optional: true, - Computed: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "eventhub_endpoint"), - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "eventhub_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: azure.ValidateResourceID, - }, - }, - }, - }, - - "hybrid_connection_endpoint_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "hybrid_connection_endpoint_id"), - ValidateFunc: azure.ValidateResourceID, - }, - - "hybrid_connection_endpoint": { - Type: schema.TypeList, - MaxItems: 1, - Deprecated: "Deprecated in favour of `" + "hybrid_connection_endpoint_id" + "`", - Optional: true, - Computed: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "hybrid_connection_endpoint"), - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "hybrid_connection_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: azure.ValidateResourceID, - }, - }, - }, - }, - - "service_bus_queue_endpoint_id": { - Type: schema.TypeString, - Optional: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "service_bus_queue_endpoint_id"), - ValidateFunc: azure.ValidateResourceID, - }, - - "service_bus_topic_endpoint_id": { - Type: schema.TypeString, - Optional: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "service_bus_topic_endpoint_id"), - ValidateFunc: azure.ValidateResourceID, - }, - - "storage_queue_endpoint": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "storage_queue_endpoint"), - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "storage_account_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.ValidateResourceID, - }, - "queue_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - }, - }, - }, - - "webhook_endpoint": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ConflictsWith: utils.RemoveFromStringArray(enpointPropertyNames(), "webhook_endpoint"), - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "url": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.IsURLWithHTTPS, - }, - "base_url": { - Type: schema.TypeString, - Computed: true, - }, - "max_events_per_batch": { - Type: schema.TypeInt, - Optional: true, - ValidateFunc: validation.IntBetween(1, 5000), - }, - "preferred_batch_size_in_kilobytes": { - Type: schema.TypeInt, - Optional: true, - ValidateFunc: validation.IntBetween(1, 1024), - }, - "active_directory_tenant_id": { - Type: schema.TypeString, - Optional: true, - }, - "active_directory_app_id_or_uri": { - Type: schema.TypeString, - Optional: true, - }, - }, - }, - }, - - "included_event_types": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - }, - - "subject_filter": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "subject_begins_with": { - Type: schema.TypeString, - Optional: true, - }, - "subject_ends_with": { - Type: schema.TypeString, - Optional: true, - }, - "case_sensitive": { - Type: schema.TypeBool, - Optional: true, - }, - }, - }, - }, - - "advanced_filter": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "bool_equals": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "value": { - Type: schema.TypeBool, - Required: true, - }, - }, - }, - }, - "number_greater_than": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "value": { - Type: schema.TypeFloat, - Required: true, - }, - }, - }, - }, - "number_greater_than_or_equals": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "value": { - Type: schema.TypeFloat, - Required: true, - }, - }, - }}, - "number_less_than": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "value": { - Type: schema.TypeFloat, - Required: true, - }, - }, - }, - }, - "number_less_than_or_equals": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "value": { - Type: schema.TypeFloat, - Required: true, - }, - }, - }, - }, - "number_in": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeFloat, - }, - }, - }, - }, - }, - "number_not_in": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeFloat, - }, - }, - }, - }, - }, - "string_begins_with": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - }, - }, - }, - "string_ends_with": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - }, - }, - }, - "string_contains": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - }, - }, - }, - "string_in": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - }, - }, - }, - "string_not_in": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "values": { - Type: schema.TypeList, - Required: true, - MaxItems: 5, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - }, - }, - }, - }, - }, - }, - - "storage_blob_dead_letter_destination": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "storage_account_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.ValidateResourceID, - }, - "storage_blob_container_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - }, - }, - }, - - "retry_policy": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "max_delivery_attempts": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 30), - }, - "event_time_to_live": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 1440), - }, - }, - }, - }, - - "labels": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, + "azure_function_endpoint": eventSubscriptionSchemaAzureFunctionEndpoint( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(AzureFunctionEndpoint), + ), + ), + + "eventhub_endpoint_id": eventSubscriptionSchemaEventHubEndpointID( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(EventHubEndpointID), + ), + ), + + "eventhub_endpoint": eventSubscriptionSchemaEventHubEndpoint( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(EventHubEndpoint), + ), + ), + + "hybrid_connection_endpoint_id": eventSubscriptionSchemaHybridConnectionEndpointID( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(HybridConnectionEndpointID), + ), + ), + + "hybrid_connection_endpoint": eventSubscriptionSchemaHybridEndpoint( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(HybridConnectionEndpoint), + ), + ), + + "service_bus_queue_endpoint_id": eventSubscriptionSchemaServiceBusQueueEndpointID( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(ServiceBusQueueEndpointID), + ), + ), + + "service_bus_topic_endpoint_id": eventSubscriptionSchemaServiceBusTopicEndpointID( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(ServiceBusTopicEndpointID), + ), + ), + + "storage_queue_endpoint": eventSubscriptionSchemaStorageQueueEndpoint( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(StorageQueueEndpoint), + ), + ), + + "webhook_endpoint": eventSubscriptionSchemaWebHookEndpoint( + utils.RemoveFromStringArray( + PossibleEventSubscriptionEndpointTypes(), + string(WebHookEndpoint), + ), + ), + + "included_event_types": eventSubscriptionSchemaIncludedEventTypes(), + + "subject_filter": eventSubscriptionSchemaSubjectFilter(), + + "advanced_filter": eventSubscriptionSchemaAdvancedFilter(), + + "storage_blob_dead_letter_destination": eventSubscriptionSchemaStorageBlobDeadletterDestination(), + + "retry_policy": eventSubscriptionSchemaRetryPolicy(), + + "labels": eventSubscriptionSchemaLabels(), }, } } -func resourceArmEventGridEventSubscriptionCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridEventSubscriptionCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.EventSubscriptionsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -591,7 +172,7 @@ func resourceArmEventGridEventSubscriptionCreateUpdate(d *schema.ResourceData, m destination := expandEventGridEventSubscriptionDestination(d) if destination == nil { - return fmt.Errorf("One of the following endpoint types must be specificed to create an EventGrid Event Subscription: %q", enpointPropertyNames()) + return fmt.Errorf("One of the following endpoint types must be specificed to create an EventGrid Event Subscription: %q", PossibleEventSubscriptionEndpointTypes()) } filter, err := expandEventGridEventSubscriptionFilter(d) @@ -639,15 +220,15 @@ func resourceArmEventGridEventSubscriptionCreateUpdate(d *schema.ResourceData, m d.SetId(*read.ID) - return resourceArmEventGridEventSubscriptionRead(d, meta) + return resourceEventGridEventSubscriptionRead(d, meta) } -func resourceArmEventGridEventSubscriptionRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridEventSubscriptionRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.EventSubscriptionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridEventSubscriptionID(d.Id()) + id, err := parse.EventSubscriptionID(d.Id()) if err != nil { return err } @@ -753,12 +334,12 @@ func resourceArmEventGridEventSubscriptionRead(d *schema.ResourceData, meta inte return nil } -func resourceArmEventGridEventSubscriptionDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridEventSubscriptionDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.EventSubscriptionsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridEventSubscriptionID(d.Id()) + id, err := parse.EventSubscriptionID(d.Id()) if err != nil { return err } @@ -780,579 +361,3 @@ func resourceArmEventGridEventSubscriptionDelete(d *schema.ResourceData, meta in return nil } - -func expandEventGridExpirationTime(d *schema.ResourceData) (*date.Time, error) { - if expirationTimeUtc, ok := d.GetOk("expiration_time_utc"); ok { - if expirationTimeUtc == "" { - return nil, nil - } - - parsedExpirationTimeUtc, err := date.ParseTime(time.RFC3339, expirationTimeUtc.(string)) - if err != nil { - return nil, err - } - - return &date.Time{Time: parsedExpirationTimeUtc}, nil - } - - return nil, nil -} - -func expandEventGridEventSubscriptionDestination(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { - if v, ok := d.GetOk("azure_function_endpoint"); ok { - return expandEventGridEventSubscriptionAzureFunctionEndpoint(v) - } - - if v, ok := d.GetOk("eventhub_endpoint_id"); ok { - return &eventgrid.EventHubEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeEventHub, - EventHubEventSubscriptionDestinationProperties: &eventgrid.EventHubEventSubscriptionDestinationProperties{ - ResourceID: utils.String(v.(string)), - }, - } - } else if _, ok := d.GetOk("eventhub_endpoint"); ok { - return expandEventGridEventSubscriptionEventhubEndpoint(d) - } - - if v, ok := d.GetOk("hybrid_connection_endpoint_id"); ok { - return &eventgrid.HybridConnectionEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeHybridConnection, - HybridConnectionEventSubscriptionDestinationProperties: &eventgrid.HybridConnectionEventSubscriptionDestinationProperties{ - ResourceID: utils.String(v.(string)), - }, - } - } else if _, ok := d.GetOk("hybrid_connection_endpoint"); ok { - return expandEventGridEventSubscriptionHybridConnectionEndpoint(d) - } - - if v, ok := d.GetOk("service_bus_queue_endpoint_id"); ok { - return &eventgrid.ServiceBusQueueEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeServiceBusQueue, - ServiceBusQueueEventSubscriptionDestinationProperties: &eventgrid.ServiceBusQueueEventSubscriptionDestinationProperties{ - ResourceID: utils.String(v.(string)), - }, - } - } - - if v, ok := d.GetOk("service_bus_topic_endpoint_id"); ok { - return &eventgrid.ServiceBusTopicEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeServiceBusTopic, - ServiceBusTopicEventSubscriptionDestinationProperties: &eventgrid.ServiceBusTopicEventSubscriptionDestinationProperties{ - ResourceID: utils.String(v.(string)), - }, - } - } - - if _, ok := d.GetOk("storage_queue_endpoint"); ok { - return expandEventGridEventSubscriptionStorageQueueEndpoint(d) - } - - if v, ok := d.GetOk("webhook_endpoint"); ok { - return expandEventGridEventSubscriptionWebhookEndpoint(v) - } - - return nil -} - -func expandEventGridEventSubscriptionStorageQueueEndpoint(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { - props := d.Get("storage_queue_endpoint").([]interface{})[0].(map[string]interface{}) - storageAccountID := props["storage_account_id"].(string) - queueName := props["queue_name"].(string) - - return eventgrid.StorageQueueEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeStorageQueue, - StorageQueueEventSubscriptionDestinationProperties: &eventgrid.StorageQueueEventSubscriptionDestinationProperties{ - ResourceID: &storageAccountID, - QueueName: &queueName, - }, - } -} - -func expandEventGridEventSubscriptionEventhubEndpoint(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { - props := d.Get("eventhub_endpoint").([]interface{})[0].(map[string]interface{}) - eventHubID := props["eventhub_id"].(string) - - return eventgrid.EventHubEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeEventHub, - EventHubEventSubscriptionDestinationProperties: &eventgrid.EventHubEventSubscriptionDestinationProperties{ - ResourceID: &eventHubID, - }, - } -} - -func expandEventGridEventSubscriptionHybridConnectionEndpoint(d *schema.ResourceData) eventgrid.BasicEventSubscriptionDestination { - props := d.Get("hybrid_connection_endpoint").([]interface{})[0].(map[string]interface{}) - hybridConnectionID := props["hybrid_connection_id"].(string) - - return eventgrid.HybridConnectionEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeHybridConnection, - HybridConnectionEventSubscriptionDestinationProperties: &eventgrid.HybridConnectionEventSubscriptionDestinationProperties{ - ResourceID: &hybridConnectionID, - }, - } -} - -func expandEventGridEventSubscriptionAzureFunctionEndpoint(input interface{}) eventgrid.BasicEventSubscriptionDestination { - configs := input.([]interface{}) - - props := eventgrid.AzureFunctionEventSubscriptionDestinationProperties{} - azureFunctionDestination := &eventgrid.AzureFunctionEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeAzureFunction, - AzureFunctionEventSubscriptionDestinationProperties: &props, - } - - if len(configs) == 0 { - return azureFunctionDestination - } - - config := configs[0].(map[string]interface{}) - - if v, ok := config["function_id"]; ok && v != "" { - props.ResourceID = utils.String(v.(string)) - } - - if v, ok := config["max_events_per_batch"]; ok && v != 0 { - props.MaxEventsPerBatch = utils.Int32(int32(v.(int))) - } - - if v, ok := config["preferred_batch_size_in_kilobytes"]; ok && v != 0 { - props.PreferredBatchSizeInKilobytes = utils.Int32(int32(v.(int))) - } - - return azureFunctionDestination -} - -func expandEventGridEventSubscriptionWebhookEndpoint(input interface{}) eventgrid.BasicEventSubscriptionDestination { - configs := input.([]interface{}) - - props := eventgrid.WebHookEventSubscriptionDestinationProperties{} - webhookDestination := &eventgrid.WebHookEventSubscriptionDestination{ - EndpointType: eventgrid.EndpointTypeWebHook, - WebHookEventSubscriptionDestinationProperties: &props, - } - - if len(configs) == 0 { - return webhookDestination - } - - config := configs[0].(map[string]interface{}) - - if v, ok := config["url"]; ok && v != "" { - props.EndpointURL = utils.String(v.(string)) - } - - if v, ok := config["max_events_per_batch"]; ok && v != 0 { - props.MaxEventsPerBatch = utils.Int32(int32(v.(int))) - } - - if v, ok := config["preferred_batch_size_in_kilobytes"]; ok && v != 0 { - props.PreferredBatchSizeInKilobytes = utils.Int32(int32(v.(int))) - } - - if v, ok := config["active_directory_tenant_id"]; ok && v != "" { - props.AzureActiveDirectoryTenantID = utils.String(v.(string)) - } - - if v, ok := config["active_directory_app_id_or_uri"]; ok && v != "" { - props.AzureActiveDirectoryApplicationIDOrURI = utils.String(v.(string)) - } - - return webhookDestination -} - -func expandEventGridEventSubscriptionFilter(d *schema.ResourceData) (*eventgrid.EventSubscriptionFilter, error) { - filter := &eventgrid.EventSubscriptionFilter{} - - if includedEvents, ok := d.GetOk("included_event_types"); ok { - filter.IncludedEventTypes = utils.ExpandStringSlice(includedEvents.([]interface{})) - } - - if v, ok := d.GetOk("subject_filter"); ok { - if v.([]interface{})[0] != nil { - config := v.([]interface{})[0].(map[string]interface{}) - subjectBeginsWith := config["subject_begins_with"].(string) - subjectEndsWith := config["subject_ends_with"].(string) - caseSensitive := config["case_sensitive"].(bool) - - filter.SubjectBeginsWith = &subjectBeginsWith - filter.SubjectEndsWith = &subjectEndsWith - filter.IsSubjectCaseSensitive = &caseSensitive - } - } - - if advancedFilter, ok := d.GetOk("advanced_filter"); ok { - advancedFilters := make([]eventgrid.BasicAdvancedFilter, 0) - for filterKey, filterSchema := range advancedFilter.([]interface{})[0].(map[string]interface{}) { - for _, options := range filterSchema.([]interface{}) { - if filter, err := expandAdvancedFilter(filterKey, options.(map[string]interface{})); err == nil { - advancedFilters = append(advancedFilters, filter) - } else { - return nil, err - } - } - } - filter.AdvancedFilters = &advancedFilters - } - - return filter, nil -} - -func expandAdvancedFilter(operatorType string, config map[string]interface{}) (eventgrid.BasicAdvancedFilter, error) { - k := config["key"].(string) - - switch operatorType { - case "bool_equals": - v := config["value"].(bool) - return eventgrid.BoolEqualsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeBoolEquals, Value: &v}, nil - case "number_greater_than": - v := config["value"].(float64) - return eventgrid.NumberGreaterThanAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberGreaterThan, Value: &v}, nil - case "number_greater_than_or_equals": - v := config["value"].(float64) - return eventgrid.NumberGreaterThanOrEqualsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberGreaterThanOrEquals, Value: &v}, nil - case "number_less_than": - v := config["value"].(float64) - return eventgrid.NumberLessThanAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberLessThan, Value: &v}, nil - case "number_less_than_or_equals": - v := config["value"].(float64) - return eventgrid.NumberLessThanOrEqualsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberLessThanOrEquals, Value: &v}, nil - case "number_in": - v := utils.ExpandFloatSlice(config["values"].([]interface{})) - return eventgrid.NumberInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberIn, Values: v}, nil - case "number_not_in": - v := utils.ExpandFloatSlice(config["values"].([]interface{})) - return eventgrid.NumberNotInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeNumberIn, Values: v}, nil - case "string_begins_with": - v := utils.ExpandStringSlice(config["values"].([]interface{})) - return eventgrid.StringBeginsWithAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringBeginsWith, Values: v}, nil - case "string_ends_with": - v := utils.ExpandStringSlice(config["values"].([]interface{})) - return eventgrid.StringEndsWithAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringEndsWith, Values: v}, nil - case "string_contains": - v := utils.ExpandStringSlice(config["values"].([]interface{})) - return eventgrid.StringContainsAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringContains, Values: v}, nil - case "string_in": - v := utils.ExpandStringSlice(config["values"].([]interface{})) - return eventgrid.StringInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringIn, Values: v}, nil - case "string_not_in": - v := utils.ExpandStringSlice(config["values"].([]interface{})) - return eventgrid.StringNotInAdvancedFilter{Key: &k, OperatorType: eventgrid.OperatorTypeStringNotIn, Values: v}, nil - default: - return nil, fmt.Errorf("Invalid `advanced_filter` operator_type %q used", operatorType) - } -} - -func expandEventGridEventSubscriptionStorageBlobDeadLetterDestination(d *schema.ResourceData) eventgrid.BasicDeadLetterDestination { - if v, ok := d.GetOk("storage_blob_dead_letter_destination"); ok { - dest := v.([]interface{})[0].(map[string]interface{}) - resourceID := dest["storage_account_id"].(string) - blobName := dest["storage_blob_container_name"].(string) - return eventgrid.StorageBlobDeadLetterDestination{ - EndpointType: eventgrid.EndpointTypeStorageBlob, - StorageBlobDeadLetterDestinationProperties: &eventgrid.StorageBlobDeadLetterDestinationProperties{ - ResourceID: &resourceID, - BlobContainerName: &blobName, - }, - } - } - - return nil -} - -func expandEventGridEventSubscriptionRetryPolicy(d *schema.ResourceData) *eventgrid.RetryPolicy { - if v, ok := d.GetOk("retry_policy"); ok { - dest := v.([]interface{})[0].(map[string]interface{}) - maxDeliveryAttempts := dest["max_delivery_attempts"].(int) - eventTimeToLive := dest["event_time_to_live"].(int) - return &eventgrid.RetryPolicy{ - MaxDeliveryAttempts: utils.Int32(int32(maxDeliveryAttempts)), - EventTimeToLiveInMinutes: utils.Int32(int32(eventTimeToLive)), - } - } - - return nil -} - -func flattenEventGridEventSubscriptionEventhubEndpoint(input *eventgrid.EventHubEventSubscriptionDestination) []interface{} { - if input == nil { - return nil - } - result := make(map[string]interface{}) - - if input.ResourceID != nil { - result["eventhub_id"] = *input.ResourceID - } - - return []interface{}{result} -} - -func flattenEventGridEventSubscriptionHybridConnectionEndpoint(input *eventgrid.HybridConnectionEventSubscriptionDestination) []interface{} { - if input == nil { - return nil - } - - hybridConnectionId := "" - if input.ResourceID != nil { - hybridConnectionId = *input.ResourceID - } - - return []interface{}{ - map[string]interface{}{ - "hybrid_connection_id": hybridConnectionId, - }, - } -} - -func flattenEventGridEventSubscriptionStorageQueueEndpoint(input *eventgrid.StorageQueueEventSubscriptionDestination) []interface{} { - if input == nil { - return nil - } - result := make(map[string]interface{}) - - if input.ResourceID != nil { - result["storage_account_id"] = *input.ResourceID - } - if input.QueueName != nil { - result["queue_name"] = *input.QueueName - } - - return []interface{}{result} -} - -func flattenEventGridEventSubscriptionAzureFunctionEndpoint(input *eventgrid.AzureFunctionEventSubscriptionDestination) []interface{} { - results := make([]interface{}, 0) - - if input == nil { - return results - } - - functionID := "" - if input.ResourceID != nil { - functionID = *input.ResourceID - } - - maxEventsPerBatch := 0 - if input.MaxEventsPerBatch != nil { - maxEventsPerBatch = int(*input.MaxEventsPerBatch) - } - - preferredBatchSize := 0 - if input.PreferredBatchSizeInKilobytes != nil { - preferredBatchSize = int(*input.PreferredBatchSizeInKilobytes) - } - - return append(results, map[string]interface{}{ - "function_id": functionID, - "max_events_per_batch": maxEventsPerBatch, - "preferred_batch_size_in_kilobytes": preferredBatchSize, - }) -} - -func flattenEventGridEventSubscriptionWebhookEndpoint(input *eventgrid.WebHookEventSubscriptionDestination, fullURL *eventgrid.EventSubscriptionFullURL) []interface{} { - results := make([]interface{}, 0) - - if input == nil { - return results - } - - webhookURL := "" - if fullURL != nil { - webhookURL = *fullURL.EndpointURL - } - - webhookBaseURL := "" - if input.EndpointBaseURL != nil { - webhookBaseURL = *input.EndpointBaseURL - } - - maxEventsPerBatch := 0 - if input.MaxEventsPerBatch != nil { - maxEventsPerBatch = int(*input.MaxEventsPerBatch) - } - - preferredBatchSizeInKilobytes := 0 - if input.PreferredBatchSizeInKilobytes != nil { - preferredBatchSizeInKilobytes = int(*input.PreferredBatchSizeInKilobytes) - } - - azureActiveDirectoryTenantID := "" - if input.AzureActiveDirectoryTenantID != nil { - azureActiveDirectoryTenantID = *input.AzureActiveDirectoryTenantID - } - - azureActiveDirectoryApplicationIDOrURI := "" - if input.AzureActiveDirectoryApplicationIDOrURI != nil { - azureActiveDirectoryApplicationIDOrURI = *input.AzureActiveDirectoryApplicationIDOrURI - } - - return append(results, map[string]interface{}{ - "url": webhookURL, - "base_url": webhookBaseURL, - "max_events_per_batch": maxEventsPerBatch, - "preferred_batch_size_in_kilobytes": preferredBatchSizeInKilobytes, - "active_directory_tenant_id": azureActiveDirectoryTenantID, - "active_directory_app_id_or_uri": azureActiveDirectoryApplicationIDOrURI, - }) -} - -func flattenEventGridEventSubscriptionSubjectFilter(filter *eventgrid.EventSubscriptionFilter) []interface{} { - if (filter.SubjectBeginsWith != nil && *filter.SubjectBeginsWith == "") && (filter.SubjectEndsWith != nil && *filter.SubjectEndsWith == "") { - return nil - } - result := make(map[string]interface{}) - - if filter.SubjectBeginsWith != nil { - result["subject_begins_with"] = *filter.SubjectBeginsWith - } - - if filter.SubjectEndsWith != nil { - result["subject_ends_with"] = *filter.SubjectEndsWith - } - - if filter.IsSubjectCaseSensitive != nil { - result["case_sensitive"] = *filter.IsSubjectCaseSensitive - } - - return []interface{}{result} -} - -func flattenEventGridEventSubscriptionAdvancedFilter(input *eventgrid.EventSubscriptionFilter) []interface{} { - results := make([]interface{}, 0) - if input == nil || input.AdvancedFilters == nil { - return results - } - - boolEquals := make([]interface{}, 0) - numberGreaterThan := make([]interface{}, 0) - numberGreaterThanOrEquals := make([]interface{}, 0) - numberLessThan := make([]interface{}, 0) - numberLessThanOrEquals := make([]interface{}, 0) - numberIn := make([]interface{}, 0) - numberNotIn := make([]interface{}, 0) - stringBeginsWith := make([]interface{}, 0) - stringEndsWith := make([]interface{}, 0) - stringContains := make([]interface{}, 0) - stringIn := make([]interface{}, 0) - stringNotIn := make([]interface{}, 0) - - for _, item := range *input.AdvancedFilters { - switch f := item.(type) { - case eventgrid.BoolEqualsAdvancedFilter: - v := interface{}(f.Value) - boolEquals = append(boolEquals, flattenValue(f.Key, &v)) - case eventgrid.NumberGreaterThanAdvancedFilter: - v := interface{}(f.Value) - numberGreaterThan = append(numberGreaterThan, flattenValue(f.Key, &v)) - case eventgrid.NumberGreaterThanOrEqualsAdvancedFilter: - v := interface{}(f.Value) - numberGreaterThanOrEquals = append(numberGreaterThanOrEquals, flattenValue(f.Key, &v)) - case eventgrid.NumberLessThanAdvancedFilter: - v := interface{}(f.Value) - numberLessThan = append(numberLessThan, flattenValue(f.Key, &v)) - case eventgrid.NumberLessThanOrEqualsAdvancedFilter: - v := interface{}(f.Value) - numberLessThanOrEquals = append(numberLessThanOrEquals, flattenValue(f.Key, &v)) - case eventgrid.NumberInAdvancedFilter: - v := utils.FlattenFloatSlice(f.Values) - numberIn = append(numberIn, flattenValues(f.Key, &v)) - case eventgrid.NumberNotInAdvancedFilter: - v := utils.FlattenFloatSlice(f.Values) - numberNotIn = append(numberNotIn, flattenValues(f.Key, &v)) - case eventgrid.StringBeginsWithAdvancedFilter: - v := utils.FlattenStringSlice(f.Values) - stringBeginsWith = append(stringBeginsWith, flattenValues(f.Key, &v)) - case eventgrid.StringEndsWithAdvancedFilter: - v := utils.FlattenStringSlice(f.Values) - stringEndsWith = append(stringEndsWith, flattenValues(f.Key, &v)) - case eventgrid.StringContainsAdvancedFilter: - v := utils.FlattenStringSlice(f.Values) - stringContains = append(stringContains, flattenValues(f.Key, &v)) - case eventgrid.StringInAdvancedFilter: - v := utils.FlattenStringSlice(f.Values) - stringIn = append(stringIn, flattenValues(f.Key, &v)) - case eventgrid.StringNotInAdvancedFilter: - v := utils.FlattenStringSlice(f.Values) - stringNotIn = append(stringNotIn, flattenValues(f.Key, &v)) - } - } - - return []interface{}{ - map[string][]interface{}{ - "bool_equals": boolEquals, - "number_greater_than": numberGreaterThan, - "number_greater_than_or_equals": numberGreaterThanOrEquals, - "number_less_than": numberLessThan, - "number_less_than_or_equals": numberLessThanOrEquals, - "number_in": numberIn, - "number_not_in": numberNotIn, - "string_begins_with": stringBeginsWith, - "string_ends_with": stringEndsWith, - "string_contains": stringContains, - "string_in": stringIn, - "string_not_in": stringNotIn, - }, - } -} - -func flattenEventGridEventSubscriptionStorageBlobDeadLetterDestination(dest *eventgrid.StorageBlobDeadLetterDestination) []interface{} { - if dest == nil { - return nil - } - result := make(map[string]interface{}) - - if dest.ResourceID != nil { - result["storage_account_id"] = *dest.ResourceID - } - - if dest.BlobContainerName != nil { - result["storage_blob_container_name"] = *dest.BlobContainerName - } - - return []interface{}{result} -} - -func flattenEventGridEventSubscriptionRetryPolicy(retryPolicy *eventgrid.RetryPolicy) []interface{} { - result := make(map[string]interface{}) - - if v := retryPolicy.EventTimeToLiveInMinutes; v != nil { - result["event_time_to_live"] = int(*v) - } - - if v := retryPolicy.MaxDeliveryAttempts; v != nil { - result["max_delivery_attempts"] = int(*v) - } - - return []interface{}{result} -} - -func flattenValue(inputKey *string, inputValue *interface{}) map[string]interface{} { - key := "" - if inputKey != nil { - key = *inputKey - } - var value interface{} - if inputValue != nil { - value = inputValue - } - - return map[string]interface{}{ - "key": key, - "value": value, - } -} - -func flattenValues(inputKey *string, inputValues *[]interface{}) map[string]interface{} { - key := "" - if inputKey != nil { - key = *inputKey - } - values := make([]interface{}, 0) - if inputValues != nil { - values = *inputValues - } - - return map[string]interface{}{ - "key": key, - "values": values, - } -} diff --git a/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource_test.go b/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource_test.go new file mode 100644 index 000000000000..026aa370c2f1 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_event_subscription_resource_test.go @@ -0,0 +1,601 @@ +package eventgrid_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventGridEventSubscriptionResource struct { +} + +func TestAccEventGridEventSubscription_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("EventGridSchema"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridEventSubscription_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_event_subscription"), + }, + }) +} + +func TestAccEventGridEventSubscription_eventHubID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.eventHubID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("CloudEventSchemaV1_0"), + check.That(data.ResourceName).Key("eventhub_endpoint_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridEventSubscription_serviceBusQueueID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serviceBusQueueID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("CloudEventSchemaV1_0"), + check.That(data.ResourceName).Key("service_bus_queue_endpoint_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridEventSubscription_serviceBusTopicID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serviceBusTopicID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("CloudEventSchemaV1_0"), + check.That(data.ResourceName).Key("service_bus_topic_endpoint_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridEventSubscription_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("EventGridSchema"), + check.That(data.ResourceName).Key("storage_queue_endpoint.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_blob_dead_letter_destination.#").HasValue("1"), + check.That(data.ResourceName).Key("included_event_types.0").HasValue("Microsoft.Resources.ResourceWriteSuccess"), + check.That(data.ResourceName).Key("retry_policy.0.max_delivery_attempts").HasValue("11"), + check.That(data.ResourceName).Key("retry_policy.0.event_time_to_live").HasValue("11"), + check.That(data.ResourceName).Key("labels.0").HasValue("test"), + check.That(data.ResourceName).Key("labels.2").HasValue("test2"), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("included_event_types.0").HasValue("Microsoft.Storage.BlobCreated"), + check.That(data.ResourceName).Key("included_event_types.1").HasValue("Microsoft.Storage.BlobDeleted"), + check.That(data.ResourceName).Key("subject_filter.0.subject_ends_with").HasValue(".jpg"), + check.That(data.ResourceName).Key("subject_filter.0.subject_begins_with").HasValue("test/test"), + check.That(data.ResourceName).Key("retry_policy.0.max_delivery_attempts").HasValue("10"), + check.That(data.ResourceName).Key("retry_policy.0.event_time_to_live").HasValue("12"), + check.That(data.ResourceName).Key("labels.0").HasValue("test4"), + check.That(data.ResourceName).Key("labels.2").HasValue("test6"), + ), + }, + }) +} + +func TestAccEventGridEventSubscription_filter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.filter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("included_event_types.0").HasValue("Microsoft.Storage.BlobCreated"), + check.That(data.ResourceName).Key("included_event_types.1").HasValue("Microsoft.Storage.BlobDeleted"), + check.That(data.ResourceName).Key("subject_filter.0.subject_ends_with").HasValue(".jpg"), + check.That(data.ResourceName).Key("subject_filter.0.subject_begins_with").HasValue("test/test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridEventSubscription_advancedFilter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") + r := EventGridEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.advancedFilter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("advanced_filter.0.bool_equals.0.key").HasValue("subject"), + check.That(data.ResourceName).Key("advanced_filter.0.bool_equals.0.value").HasValue("true"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than.0.key").HasValue("data.metadataVersion"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than.0.value").HasValue("1"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than_or_equals.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than_or_equals.0.value").HasValue("42"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than.0.value").HasValue("42.1"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than_or_equals.0.key").HasValue("data.metadataVersion"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than_or_equals.0.value").HasValue("2"), + check.That(data.ResourceName).Key("advanced_filter.0.number_in.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_in.0.values.0").HasValue("0"), + check.That(data.ResourceName).Key("advanced_filter.0.number_not_in.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_not_in.0.values.0").HasValue("5"), + check.That(data.ResourceName).Key("advanced_filter.0.string_begins_with.0.key").HasValue("subject"), + check.That(data.ResourceName).Key("advanced_filter.0.string_begins_with.0.values.0").HasValue("foo"), + check.That(data.ResourceName).Key("advanced_filter.0.string_ends_with.0.key").HasValue("subject"), + check.That(data.ResourceName).Key("advanced_filter.0.string_ends_with.0.values.0").HasValue("bar"), + check.That(data.ResourceName).Key("advanced_filter.0.string_contains.0.key").HasValue("data.contentType"), + check.That(data.ResourceName).Key("advanced_filter.0.string_contains.0.values.0").HasValue("application"), + check.That(data.ResourceName).Key("advanced_filter.0.string_in.0.key").HasValue("data.blobType"), + check.That(data.ResourceName).Key("advanced_filter.0.string_in.0.values.0").HasValue("Block"), + check.That(data.ResourceName).Key("advanced_filter.0.string_not_in.0.key").HasValue("data.blobType"), + check.That(data.ResourceName).Key("advanced_filter.0.string_not_in.0.values.0").HasValue("Page"), + ), + }, + data.ImportStep(), + }) +} + +func (EventGridEventSubscriptionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EventSubscriptionID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.EventGrid.EventSubscriptionsClient.Get(ctx, id.Scope, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving EventGrid Event Subscription %q (scope: %q): %+v", id.Name, id.Scope, err) + } + + return utils.Bool(resp.EventSubscriptionProperties != nil), nil +} + +func (EventGridEventSubscriptionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_storage_blob" "test" { + name = "herpderp1.vhd" + + storage_account_name = azurerm_storage_account.test.name + storage_container_name = azurerm_storage_container.test.name + + type = "Page" + size = 5120 +} + +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctesteg-%d" + scope = azurerm_resource_group.test.id + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + storage_blob_dead_letter_destination { + storage_account_id = azurerm_storage_account.test.id + storage_blob_container_name = azurerm_storage_container.test.name + } + + retry_policy { + event_time_to_live = 11 + max_delivery_attempts = 11 + } + + labels = ["test", "test1", "test2"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (EventGridEventSubscriptionResource) requiresImport(data acceptance.TestData) string { + template := EventGridEventSubscriptionResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_eventgrid_event_subscription" "import" { + name = azurerm_eventgrid_event_subscription.test.name + scope = azurerm_eventgrid_event_subscription.test.scope +} +`, template) +} + +func (EventGridEventSubscriptionResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_storage_blob" "test" { + name = "herpderp1.vhd" + + storage_account_name = azurerm_storage_account.test.name + storage_container_name = azurerm_storage_container.test.name + + type = "Page" + size = 5120 +} + +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctest-eg-%d" + scope = azurerm_resource_group.test.id + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + storage_blob_dead_letter_destination { + storage_account_id = azurerm_storage_account.test.id + storage_blob_container_name = azurerm_storage_container.test.name + } + + retry_policy { + event_time_to_live = 12 + max_delivery_attempts = 10 + } + + subject_filter { + subject_begins_with = "test/test" + subject_ends_with = ".jpg" + } + + included_event_types = ["Microsoft.Storage.BlobCreated", "Microsoft.Storage.BlobDeleted"] + labels = ["test4", "test5", "test6"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (EventGridEventSubscriptionResource) eventHubID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctest-eg-%d" + scope = azurerm_resource_group.test.id + event_delivery_schema = "CloudEventSchemaV1_0" + + eventhub_endpoint_id = azurerm_eventhub.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (EventGridEventSubscriptionResource) serviceBusQueueID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_servicebus_namespace" "example" { + name = "acctestservicebusnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} +resource "azurerm_servicebus_queue" "test" { + name = "acctestservicebusqueue-%d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_servicebus_namespace.example.name + enable_partitioning = true +} +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctest-eg-%d" + scope = azurerm_resource_group.test.id + event_delivery_schema = "CloudEventSchemaV1_0" + service_bus_queue_endpoint_id = azurerm_servicebus_queue.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (EventGridEventSubscriptionResource) serviceBusTopicID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} +resource "azurerm_servicebus_namespace" "example" { + name = "acctestservicebusnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} +resource "azurerm_servicebus_topic" "test" { + name = "acctestservicebustopic-%d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_servicebus_namespace.example.name + enable_partitioning = true +} +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctest-eg-%d" + scope = azurerm_resource_group.test.id + event_delivery_schema = "CloudEventSchemaV1_0" + service_bus_topic_endpoint_id = azurerm_servicebus_topic.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (EventGridEventSubscriptionResource) filter(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctest-eg-%d" + scope = azurerm_resource_group.test.id + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + included_event_types = ["Microsoft.Storage.BlobCreated", "Microsoft.Storage.BlobDeleted"] + + subject_filter { + subject_begins_with = "test/test" + subject_ends_with = ".jpg" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (EventGridEventSubscriptionResource) advancedFilter(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_eventgrid_event_subscription" "test" { + name = "acctesteg-%d" + scope = azurerm_storage_account.test.id + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + advanced_filter { + bool_equals { + key = "subject" + value = true + } + number_greater_than { + key = "data.metadataVersion" + value = 1 + } + number_greater_than_or_equals { + key = "data.contentLength" + value = 42.0 + } + number_less_than { + key = "data.contentLength" + value = 42.1 + } + number_less_than_or_equals { + key = "data.metadataVersion" + value = 2 + } + number_in { + key = "data.contentLength" + values = [0, 1, 1, 2, 3] + } + number_not_in { + key = "data.contentLength" + values = [5, 8, 13, 21, 34] + } + string_begins_with { + key = "subject" + values = ["foo"] + } + string_ends_with { + key = "subject" + values = ["bar"] + } + string_contains { + key = "data.contentType" + values = ["application", "octet-stream"] + } + string_in { + key = "data.blobType" + values = ["Block"] + } + string_not_in { + key = "data.blobType" + values = ["Page"] + } + } + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_system_topic_event_subscription_resource.go b/azurerm/internal/services/eventgrid/eventgrid_system_topic_event_subscription_resource.go new file mode 100644 index 000000000000..c9828ae9d961 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_system_topic_event_subscription_resource.go @@ -0,0 +1,337 @@ +package eventgrid + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func PossibleSystemTopicEventSubscriptionEndpointTypes() []string { + return []string{ + string(AzureFunctionEndpoint), + string(EventHubEndpointID), + string(HybridConnectionEndpointID), + string(ServiceBusQueueEndpointID), + string(ServiceBusTopicEndpointID), + string(StorageQueueEndpoint), + string(WebHookEndpoint), + } +} + +func resourceEventGridSystemTopicEventSubscription() *schema.Resource { + return &schema.Resource{ + Create: resourceEventGridSystemTopicEventSubscriptionCreateUpdate, + Read: resourceEventGridSystemTopicEventSubscriptionRead, + Update: resourceEventGridSystemTopicEventSubscriptionCreateUpdate, + Delete: resourceEventGridSystemTopicEventSubscriptionDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.SystemTopicEventSubscriptionID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": eventSubscriptionSchemaEventSubscriptionName(), + + "system_topic": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "event_delivery_schema": eventSubscriptionSchemaEventDeliverySchema(), + + "expiration_time_utc": eventSubscriptionSchemaExpirationTimeUTC(), + + "azure_function_endpoint": eventSubscriptionSchemaAzureFunctionEndpoint( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(AzureFunctionEndpoint), + ), + ), + + "eventhub_endpoint_id": eventSubscriptionSchemaEventHubEndpointID( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(EventHubEndpointID), + ), + ), + + "hybrid_connection_endpoint_id": eventSubscriptionSchemaHybridConnectionEndpointID( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(HybridConnectionEndpointID), + ), + ), + + "service_bus_queue_endpoint_id": eventSubscriptionSchemaServiceBusQueueEndpointID( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(ServiceBusQueueEndpointID), + ), + ), + + "service_bus_topic_endpoint_id": eventSubscriptionSchemaServiceBusTopicEndpointID( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(ServiceBusTopicEndpointID), + ), + ), + + "storage_queue_endpoint": eventSubscriptionSchemaStorageQueueEndpoint( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(StorageQueueEndpoint), + ), + ), + + "webhook_endpoint": eventSubscriptionSchemaWebHookEndpoint( + utils.RemoveFromStringArray( + PossibleSystemTopicEventSubscriptionEndpointTypes(), + string(WebHookEndpoint), + ), + ), + + "included_event_types": eventSubscriptionSchemaIncludedEventTypes(), + + "subject_filter": eventSubscriptionSchemaSubjectFilter(), + + "advanced_filter": eventSubscriptionSchemaAdvancedFilter(), + + "storage_blob_dead_letter_destination": eventSubscriptionSchemaStorageBlobDeadletterDestination(), + + "retry_policy": eventSubscriptionSchemaRetryPolicy(), + + "labels": eventSubscriptionSchemaLabels(), + }, + } +} + +func resourceEventGridSystemTopicEventSubscriptionCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).EventGrid.SystemTopicEventSubscriptionsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + systemTopic := d.Get("system_topic").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, systemTopic, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing EventGrid System Topic Event Subscription %q (System Topic %q): %s", name, systemTopic, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_eventgrid_system_topic_event_subscription", *existing.ID) + } + } + + destination := expandEventGridEventSubscriptionDestination(d) + if destination == nil { + return fmt.Errorf("One of the following endpoint types must be specificed to create an EventGrid System Topic Event Subscription: %q", PossibleSystemTopicEventSubscriptionEndpointTypes()) + } + + filter, err := expandEventGridEventSubscriptionFilter(d) + if err != nil { + return fmt.Errorf("expanding filters for EventGrid System Topic Event Subscription %q (System Topic %q): %+v", name, systemTopic, err) + } + + expirationTime, err := expandEventGridExpirationTime(d) + if err != nil { + return fmt.Errorf("Error creating/updating EventGrid System Topic Event Subscription %q (System Topic %q): %s", name, systemTopic, err) + } + + eventSubscriptionProperties := eventgrid.EventSubscriptionProperties{ + Destination: destination, + Filter: filter, + DeadLetterDestination: expandEventGridEventSubscriptionStorageBlobDeadLetterDestination(d), + RetryPolicy: expandEventGridEventSubscriptionRetryPolicy(d), + Labels: utils.ExpandStringSlice(d.Get("labels").([]interface{})), + EventDeliverySchema: eventgrid.EventDeliverySchema(d.Get("event_delivery_schema").(string)), + ExpirationTimeUtc: expirationTime, + } + + eventSubscription := eventgrid.EventSubscription{ + EventSubscriptionProperties: &eventSubscriptionProperties, + } + + log.Printf("[INFO] preparing arguments for AzureRM EventGrid System Topic Event Subscription creation with Properties: %+v.", eventSubscription) + + future, err := client.CreateOrUpdate(ctx, resourceGroup, systemTopic, name, eventSubscription) + if err != nil { + return fmt.Errorf("Error creating/updating EventGrid System Topic Event Subscription %q (System Topic %q): %s", name, systemTopic, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for EventGrid System Topic Event Subscription %q (System Topic %q) to become available: %s", name, systemTopic, err) + } + + read, err := client.Get(ctx, resourceGroup, systemTopic, name) + if err != nil { + return fmt.Errorf("Error retrieving EventGrid System Topic Event Subscription %q (System Topic %q): %s", name, systemTopic, err) + } + if read.ID == nil { + return fmt.Errorf("Cannot read EventGrid System Topic Event Subscription %s (System Topic %s) ID", name, systemTopic) + } + + d.SetId(*read.ID) + + return resourceEventGridSystemTopicEventSubscriptionRead(d, meta) +} + +func resourceEventGridSystemTopicEventSubscriptionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).EventGrid.SystemTopicEventSubscriptionsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SystemTopicEventSubscriptionID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.SystemTopic, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[WARN] EventGrid System Topic Event Subscription '%q' was not found (System Topic %q)", id.Name, id.SystemTopic) + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on EventGrid System Topic Event Subscription '%q' (System Topic %q): %+v", id.Name, id.SystemTopic, err) + } + + d.Set("name", resp.Name) + d.Set("system_topic", id.SystemTopic) + d.Set("resource_group_name", id.ResourceGroup) + + if props := resp.EventSubscriptionProperties; props != nil { + if props.ExpirationTimeUtc != nil { + d.Set("expiration_time_utc", props.ExpirationTimeUtc.Format(time.RFC3339)) + } + + d.Set("event_delivery_schema", string(props.EventDeliverySchema)) + + if azureFunctionEndpoint, ok := props.Destination.AsAzureFunctionEventSubscriptionDestination(); ok { + if err := d.Set("azure_function_endpoint", flattenEventGridEventSubscriptionAzureFunctionEndpoint(azureFunctionEndpoint)); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "azure_function_endpoint", id.Name, id.SystemTopic, err) + } + } + if v, ok := props.Destination.AsEventHubEventSubscriptionDestination(); ok { + if err := d.Set("eventhub_endpoint_id", v.ResourceID); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "eventhub_endpoint_id", id.Name, id.SystemTopic, err) + } + } + if v, ok := props.Destination.AsHybridConnectionEventSubscriptionDestination(); ok { + if err := d.Set("hybrid_connection_endpoint_id", v.ResourceID); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "hybrid_connection_endpoint_id", id.Name, id.SystemTopic, err) + } + } + if serviceBusQueueEndpoint, ok := props.Destination.AsServiceBusQueueEventSubscriptionDestination(); ok { + if err := d.Set("service_bus_queue_endpoint_id", serviceBusQueueEndpoint.ResourceID); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "service_bus_queue_endpoint_id", id.Name, id.SystemTopic, err) + } + } + if serviceBusTopicEndpoint, ok := props.Destination.AsServiceBusTopicEventSubscriptionDestination(); ok { + if err := d.Set("service_bus_topic_endpoint_id", serviceBusTopicEndpoint.ResourceID); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "service_bus_topic_endpoint_id", id.Name, id.SystemTopic, err) + } + } + if v, ok := props.Destination.AsStorageQueueEventSubscriptionDestination(); ok { + if err := d.Set("storage_queue_endpoint", flattenEventGridEventSubscriptionStorageQueueEndpoint(v)); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "storage_queue_endpoint", id.Name, id.SystemTopic, err) + } + } + if v, ok := props.Destination.AsWebHookEventSubscriptionDestination(); ok { + fullURL, err := client.GetFullURL(ctx, id.ResourceGroup, id.SystemTopic, id.Name) + if err != nil { + return fmt.Errorf("Error making Read request on EventGrid System Topic Event Subscription full URL '%s': %+v", id.Name, err) + } + if err := d.Set("webhook_endpoint", flattenEventGridEventSubscriptionWebhookEndpoint(v, &fullURL)); err != nil { + return fmt.Errorf("Error setting `%q` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", "webhook_endpoint", id.Name, id.SystemTopic, err) + } + } + + if filter := props.Filter; filter != nil { + d.Set("included_event_types", filter.IncludedEventTypes) + if err := d.Set("subject_filter", flattenEventGridEventSubscriptionSubjectFilter(filter)); err != nil { + return fmt.Errorf("Error setting `subject_filter` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", id.Name, id.SystemTopic, err) + } + if err := d.Set("advanced_filter", flattenEventGridEventSubscriptionAdvancedFilter(filter)); err != nil { + return fmt.Errorf("Error setting `advanced_filter` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", id.Name, id.SystemTopic, err) + } + } + + if props.DeadLetterDestination != nil { + if storageBlobDeadLetterDestination, ok := props.DeadLetterDestination.AsStorageBlobDeadLetterDestination(); ok { + if err := d.Set("storage_blob_dead_letter_destination", flattenEventGridEventSubscriptionStorageBlobDeadLetterDestination(storageBlobDeadLetterDestination)); err != nil { + return fmt.Errorf("Error setting `storage_blob_dead_letter_destination` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", id.Name, id.SystemTopic, err) + } + } + } + + if retryPolicy := props.RetryPolicy; retryPolicy != nil { + if err := d.Set("retry_policy", flattenEventGridEventSubscriptionRetryPolicy(retryPolicy)); err != nil { + return fmt.Errorf("Error setting `retry_policy` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", id.Name, id.SystemTopic, err) + } + } + + if err := d.Set("labels", props.Labels); err != nil { + return fmt.Errorf("Error setting `labels` for EventGrid System Topic Event Subscription %q (System Topic %q): %s", id.Name, id.SystemTopic, err) + } + } + + return nil +} + +func resourceEventGridSystemTopicEventSubscriptionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).EventGrid.SystemTopicEventSubscriptionsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SystemTopicEventSubscriptionID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.SystemTopic, id.Name) + if err != nil { + if response.WasNotFound(future.Response()) { + return nil + } + return fmt.Errorf("Error deleting Event Grid System Topic Event Subscription %q (System Topic %q): %+v", id.Name, id.SystemTopic, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + if response.WasNotFound(future.Response()) { + return nil + } + return fmt.Errorf("Error deleting Event Grid System Topic Event Subscription %q (System Topic %q): %+v", id.Name, id.SystemTopic, err) + } + + return nil +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_system_topic_event_subscription_resource_test.go b/azurerm/internal/services/eventgrid/eventgrid_system_topic_event_subscription_resource_test.go new file mode 100644 index 000000000000..657f86ec3817 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_system_topic_event_subscription_resource_test.go @@ -0,0 +1,675 @@ +package eventgrid_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventGridSystemTopicEventSubscriptionResource struct { +} + +func TestAccEventGridSystemTopicEventSubscription_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("EventGridSchema"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridSystemTopicEventSubscription_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_system_topic_event_subscription"), + }, + }) +} + +func TestAccEventGridSystemTopicEventSubscription_eventHubID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.eventHubID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("CloudEventSchemaV1_0"), + check.That(data.ResourceName).Key("eventhub_endpoint_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridSystemTopicEventSubscription_serviceBusQueueID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serviceBusQueueID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("CloudEventSchemaV1_0"), + check.That(data.ResourceName).Key("service_bus_queue_endpoint_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridSystemTopicEventSubscription_serviceBusTopicID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serviceBusTopicID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("CloudEventSchemaV1_0"), + check.That(data.ResourceName).Key("service_bus_topic_endpoint_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridSystemTopicEventSubscription_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("event_delivery_schema").HasValue("EventGridSchema"), + check.That(data.ResourceName).Key("storage_queue_endpoint.#").HasValue("1"), + check.That(data.ResourceName).Key("storage_blob_dead_letter_destination.#").HasValue("1"), + check.That(data.ResourceName).Key("included_event_types.0").HasValue("Microsoft.Resources.ResourceWriteSuccess"), + check.That(data.ResourceName).Key("retry_policy.0.max_delivery_attempts").HasValue("11"), + check.That(data.ResourceName).Key("retry_policy.0.event_time_to_live").HasValue("11"), + check.That(data.ResourceName).Key("labels.0").HasValue("test"), + check.That(data.ResourceName).Key("labels.2").HasValue("test2"), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("included_event_types.0").HasValue("Microsoft.Storage.BlobCreated"), + check.That(data.ResourceName).Key("included_event_types.1").HasValue("Microsoft.Storage.BlobDeleted"), + check.That(data.ResourceName).Key("subject_filter.0.subject_ends_with").HasValue(".jpg"), + check.That(data.ResourceName).Key("subject_filter.0.subject_begins_with").HasValue("test/test"), + check.That(data.ResourceName).Key("retry_policy.0.max_delivery_attempts").HasValue("10"), + check.That(data.ResourceName).Key("retry_policy.0.event_time_to_live").HasValue("12"), + check.That(data.ResourceName).Key("labels.0").HasValue("test4"), + check.That(data.ResourceName).Key("labels.2").HasValue("test6"), + ), + }, + }) +} + +func TestAccEventGridSystemTopicEventSubscription_filter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.filter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("included_event_types.0").HasValue("Microsoft.Storage.BlobCreated"), + check.That(data.ResourceName).Key("included_event_types.1").HasValue("Microsoft.Storage.BlobDeleted"), + check.That(data.ResourceName).Key("subject_filter.0.subject_ends_with").HasValue(".jpg"), + check.That(data.ResourceName).Key("subject_filter.0.subject_begins_with").HasValue("test/test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridSystemTopicEventSubscription_advancedFilter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic_event_subscription", "test") + r := EventGridSystemTopicEventSubscriptionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.advancedFilter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("advanced_filter.0.bool_equals.0.key").HasValue("subject"), + check.That(data.ResourceName).Key("advanced_filter.0.bool_equals.0.value").HasValue("true"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than.0.key").HasValue("data.metadataVersion"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than.0.value").HasValue("1"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than_or_equals.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_greater_than_or_equals.0.value").HasValue("42"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than.0.value").HasValue("42.1"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than_or_equals.0.key").HasValue("data.metadataVersion"), + check.That(data.ResourceName).Key("advanced_filter.0.number_less_than_or_equals.0.value").HasValue("2"), + check.That(data.ResourceName).Key("advanced_filter.0.number_in.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_in.0.values.0").HasValue("0"), + check.That(data.ResourceName).Key("advanced_filter.0.number_not_in.0.key").HasValue("data.contentLength"), + check.That(data.ResourceName).Key("advanced_filter.0.number_not_in.0.values.0").HasValue("5"), + check.That(data.ResourceName).Key("advanced_filter.0.string_begins_with.0.key").HasValue("subject"), + check.That(data.ResourceName).Key("advanced_filter.0.string_begins_with.0.values.0").HasValue("foo"), + check.That(data.ResourceName).Key("advanced_filter.0.string_ends_with.0.key").HasValue("subject"), + check.That(data.ResourceName).Key("advanced_filter.0.string_ends_with.0.values.0").HasValue("bar"), + check.That(data.ResourceName).Key("advanced_filter.0.string_contains.0.key").HasValue("data.contentType"), + check.That(data.ResourceName).Key("advanced_filter.0.string_contains.0.values.0").HasValue("application"), + check.That(data.ResourceName).Key("advanced_filter.0.string_in.0.key").HasValue("data.blobType"), + check.That(data.ResourceName).Key("advanced_filter.0.string_in.0.values.0").HasValue("Block"), + check.That(data.ResourceName).Key("advanced_filter.0.string_not_in.0.key").HasValue("data.blobType"), + check.That(data.ResourceName).Key("advanced_filter.0.string_not_in.0.values.0").HasValue("Page"), + ), + }, + data.ImportStep(), + }) +} + +func (EventGridSystemTopicEventSubscriptionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SystemTopicEventSubscriptionID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.EventGrid.SystemTopicEventSubscriptionsClient.Get(ctx, id.ResourceGroup, id.SystemTopic, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving EventGrid System Topic Event Subscription %q (System Topic: %q): %+v", id.Name, id.SystemTopic, err) + } + + return utils.Bool(resp.EventSubscriptionProperties != nil), nil +} + +func (EventGridSystemTopicEventSubscriptionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%[1]d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_storage_blob" "test" { + name = "herpderp1.vhd" + + storage_account_name = azurerm_storage_account.test.name + storage_container_name = azurerm_storage_container.test.name + + type = "Page" + size = 5120 +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + storage_blob_dead_letter_destination { + storage_account_id = azurerm_storage_account.test.id + storage_blob_container_name = azurerm_storage_container.test.name + } + + retry_policy { + event_time_to_live = 11 + max_delivery_attempts = 11 + } + + labels = ["test", "test1", "test2"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (EventGridSystemTopicEventSubscriptionResource) requiresImport(data acceptance.TestData) string { + template := EventGridSystemTopicEventSubscriptionResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_eventgrid_system_topic_event_subscription" "import" { + name = azurerm_eventgrid_system_topic_event_subscription.test.name + system_topic = azurerm_eventgrid_system_topic_event_subscription.test.system_topic + resource_group_name = azurerm_eventgrid_system_topic_event_subscription.test.resource_group_name +} +`, template) +} + +func (EventGridSystemTopicEventSubscriptionResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%[1]d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_storage_container" "test" { + name = "vhds" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_storage_blob" "test" { + name = "herpderp1.vhd" + + storage_account_name = azurerm_storage_account.test.name + storage_container_name = azurerm_storage_container.test.name + + type = "Page" + size = 5120 +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + storage_blob_dead_letter_destination { + storage_account_id = azurerm_storage_account.test.id + storage_blob_container_name = azurerm_storage_container.test.name + } + + retry_policy { + event_time_to_live = 12 + max_delivery_attempts = 10 + } + + subject_filter { + subject_begins_with = "test/test" + subject_ends_with = ".jpg" + } + + included_event_types = ["Microsoft.Storage.BlobCreated", "Microsoft.Storage.BlobDeleted"] + labels = ["test4", "test5", "test6"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (EventGridSystemTopicEventSubscriptionResource) eventHubID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + event_delivery_schema = "CloudEventSchemaV1_0" + + eventhub_endpoint_id = azurerm_eventhub.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (EventGridSystemTopicEventSubscriptionResource) serviceBusQueueID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_servicebus_namespace" "example" { + name = "acctestservicebusnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_servicebus_queue" "test" { + name = "acctestservicebusqueue-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_servicebus_namespace.example.name + enable_partitioning = true +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + event_delivery_schema = "CloudEventSchemaV1_0" + service_bus_queue_endpoint_id = azurerm_servicebus_queue.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (EventGridSystemTopicEventSubscriptionResource) serviceBusTopicID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_servicebus_namespace" "example" { + name = "acctestservicebusnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_servicebus_topic" "test" { + name = "acctestservicebustopic-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_servicebus_namespace.example.name + enable_partitioning = true +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + event_delivery_schema = "CloudEventSchemaV1_0" + service_bus_topic_endpoint_id = azurerm_servicebus_topic.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (EventGridSystemTopicEventSubscriptionResource) filter(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%[1]d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + included_event_types = ["Microsoft.Storage.BlobCreated", "Microsoft.Storage.BlobDeleted"] + + subject_filter { + subject_begins_with = "test/test" + subject_ends_with = ".jpg" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (EventGridSystemTopicEventSubscriptionResource) advancedFilter(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestacc%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_storage_queue" "test" { + name = "mysamplequeue-%[1]d" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctesteg-%[1]d" + location = "Global" + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_resource_group.test.id + topic_type = "Microsoft.Resources.ResourceGroups" +} + +resource "azurerm_eventgrid_system_topic_event_subscription" "test" { + name = "acctesteg-%[1]d" + system_topic = azurerm_eventgrid_system_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + storage_queue_endpoint { + storage_account_id = azurerm_storage_account.test.id + queue_name = azurerm_storage_queue.test.name + } + + advanced_filter { + bool_equals { + key = "subject" + value = true + } + number_greater_than { + key = "data.metadataVersion" + value = 1 + } + number_greater_than_or_equals { + key = "data.contentLength" + value = 42.0 + } + number_less_than { + key = "data.contentLength" + value = 42.1 + } + number_less_than_or_equals { + key = "data.metadataVersion" + value = 2 + } + number_in { + key = "data.contentLength" + values = [0, 1, 1, 2, 3] + } + number_not_in { + key = "data.contentLength" + values = [5, 8, 13, 21, 34] + } + string_begins_with { + key = "subject" + values = ["foo"] + } + string_ends_with { + key = "subject" + values = ["bar"] + } + string_contains { + key = "data.contentType" + values = ["application", "octet-stream"] + } + string_in { + key = "data.blobType" + values = ["Block"] + } + string_not_in { + key = "data.blobType" + values = ["Page"] + } + } + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource.go b/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource.go index 4250ffb296f2..61477c03e243 100644 --- a/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource.go +++ b/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource.go @@ -3,6 +3,7 @@ package eventgrid import ( "fmt" "log" + "regexp" "time" "github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid" @@ -19,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventGridSystemTopic() *schema.Resource { +func resourceEventGridSystemTopic() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventGridSystemTopicCreateUpdate, - Read: resourceArmEventGridSystemTopicRead, - Update: resourceArmEventGridSystemTopicCreateUpdate, - Delete: resourceArmEventGridSystemTopicDelete, + Create: resourceEventGridSystemTopicCreateUpdate, + Read: resourceEventGridSystemTopicRead, + Update: resourceEventGridSystemTopicCreateUpdate, + Delete: resourceEventGridSystemTopicDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,16 +35,22 @@ func resourceArmEventGridSystemTopic() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.EventGridSystemTopicID(id) + _, err := parse.SystemTopicID(id) return err }), Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,128}$"), + "EventGrid Topics name must be 3 - 128 characters long, contain only letters, numbers and hyphens.", + ), + ), }, "location": azure.SchemaLocation(), @@ -93,7 +100,7 @@ func resourceArmEventGridSystemTopic() *schema.Resource { } } -func resourceArmEventGridSystemTopicCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridSystemTopicCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.SystemTopicsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -149,15 +156,15 @@ func resourceArmEventGridSystemTopicCreateUpdate(d *schema.ResourceData, meta in d.SetId(*read.ID) - return resourceArmEventGridSystemTopicRead(d, meta) + return resourceEventGridSystemTopicRead(d, meta) } -func resourceArmEventGridSystemTopicRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridSystemTopicRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.SystemTopicsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridSystemTopicID(d.Id()) + id, err := parse.SystemTopicID(d.Id()) if err != nil { return err } @@ -188,12 +195,12 @@ func resourceArmEventGridSystemTopicRead(d *schema.ResourceData, meta interface{ return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmEventGridSystemTopicDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridSystemTopicDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.SystemTopicsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridSystemTopicID(d.Id()) + id, err := parse.SystemTopicID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource_test.go b/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource_test.go new file mode 100644 index 000000000000..d3f3929484a1 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_system_topic_resource_test.go @@ -0,0 +1,164 @@ +package eventgrid_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventGridSystemTopicResource struct { +} + +func TestAccEventGridSystemTopic_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic", "test") + r := EventGridSystemTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("source_arm_resource_id").Exists(), + check.That(data.ResourceName).Key("topic_type").Exists(), + check.That(data.ResourceName).Key("metric_arm_resource_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridSystemTopic_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic", "test") + r := EventGridSystemTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_system_topic"), + }, + }) +} + +func TestAccEventGridSystemTopic_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic", "test") + r := EventGridSystemTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Foo").HasValue("Bar"), + check.That(data.ResourceName).Key("source_arm_resource_id").Exists(), + check.That(data.ResourceName).Key("topic_type").Exists(), + check.That(data.ResourceName).Key("metric_arm_resource_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (EventGridSystemTopicResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SystemTopicID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.EventGrid.SystemTopicsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Event Grid System Topic %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.SystemTopicProperties != nil), nil +} + +func (EventGridSystemTopicResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestegst%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctestEGST%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_storage_account.test.id + topic_type = "Microsoft.Storage.StorageAccounts" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12), data.RandomIntOfLength(10)) +} + +func (r EventGridSystemTopicResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_eventgrid_system_topic" "import" { + name = azurerm_eventgrid_system_topic.test.name + location = azurerm_eventgrid_system_topic.test.location + resource_group_name = azurerm_eventgrid_system_topic.test.resource_group_name + source_arm_resource_id = azurerm_eventgrid_system_topic.test.source_arm_resource_id + topic_type = azurerm_eventgrid_system_topic.test.topic_type +} +`, r.basic(data)) +} + +func (EventGridSystemTopicResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eg-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestegst%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_eventgrid_system_topic" "test" { + name = "acctestEGST%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + source_arm_resource_id = azurerm_storage_account.test.id + topic_type = "Microsoft.Storage.StorageAccounts" + + tags = { + "Foo" = "Bar" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12), data.RandomIntOfLength(10)) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_topic_data_source.go b/azurerm/internal/services/eventgrid/eventgrid_topic_data_source.go index 773440f225f0..de86287b023c 100644 --- a/azurerm/internal/services/eventgrid/eventgrid_topic_data_source.go +++ b/azurerm/internal/services/eventgrid/eventgrid_topic_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmEventGridTopic() *schema.Resource { +func dataSourceEventGridTopic() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmEventGridTopicRead, + Read: dataSourceEventGridTopicRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -54,7 +54,7 @@ func dataSourceArmEventGridTopic() *schema.Resource { } } -func dataSourceArmEventGridTopicRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceEventGridTopicRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.TopicsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventgrid/eventgrid_topic_data_source_test.go b/azurerm/internal/services/eventgrid/eventgrid_topic_data_source_test.go new file mode 100644 index 000000000000..430bc1cebe51 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_topic_data_source_test.go @@ -0,0 +1,40 @@ +package eventgrid_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventGridTopicDataSource struct { +} + +func TestAccEventGridTopicDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventgrid_topic", "test") + r := EventGridTopicDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + }) +} + +func (EventGridTopicDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_eventgrid_topic" "test" { + name = azurerm_eventgrid_topic.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, EventGridTopicResource{}.basic(data)) +} diff --git a/azurerm/internal/services/eventgrid/eventgrid_topic_resource.go b/azurerm/internal/services/eventgrid/eventgrid_topic_resource.go index 62cb088f4cd9..c66b9755a9ca 100644 --- a/azurerm/internal/services/eventgrid/eventgrid_topic_resource.go +++ b/azurerm/internal/services/eventgrid/eventgrid_topic_resource.go @@ -3,6 +3,7 @@ package eventgrid import ( "fmt" "log" + "regexp" "time" "github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid" @@ -19,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventGridTopic() *schema.Resource { +func resourceEventGridTopic() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventGridTopicCreateUpdate, - Read: resourceArmEventGridTopicRead, - Update: resourceArmEventGridTopicCreateUpdate, - Delete: resourceArmEventGridTopicDelete, + Create: resourceEventGridTopicCreateUpdate, + Read: resourceEventGridTopicRead, + Update: resourceEventGridTopicCreateUpdate, + Delete: resourceEventGridTopicDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,16 +35,22 @@ func resourceArmEventGridTopic() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.EventGridTopicID(id) + _, err := parse.TopicID(id) return err }), Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,50}$"), + "EventGrid topic name must be 3 - 50 characters long, contain only letters, numbers and hyphens.", + ), + ), }, "location": azure.SchemaLocation(), @@ -151,7 +158,7 @@ func resourceArmEventGridTopic() *schema.Resource { } } -func resourceArmEventGridTopicCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridTopicCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.TopicsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -207,15 +214,15 @@ func resourceArmEventGridTopicCreateUpdate(d *schema.ResourceData, meta interfac d.SetId(*read.ID) - return resourceArmEventGridTopicRead(d, meta) + return resourceEventGridTopicRead(d, meta) } -func resourceArmEventGridTopicRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridTopicRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.TopicsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridTopicID(d.Id()) + id, err := parse.TopicID(d.Id()) if err != nil { return err } @@ -273,12 +280,12 @@ func resourceArmEventGridTopicRead(d *schema.ResourceData, meta interface{}) err return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmEventGridTopicDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventGridTopicDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).EventGrid.TopicsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.EventGridTopicID(d.Id()) + id, err := parse.TopicID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/eventgrid/eventgrid_topic_resource_test.go b/azurerm/internal/services/eventgrid/eventgrid_topic_resource_test.go new file mode 100644 index 000000000000..8a3d0a6182d3 --- /dev/null +++ b/azurerm/internal/services/eventgrid/eventgrid_topic_resource_test.go @@ -0,0 +1,193 @@ +package eventgrid_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventGridTopicResource struct { +} + +func TestAccEventGridTopic_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") + r := EventGridTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridTopic_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") + r := EventGridTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_topic"), + }, + }) +} + +func TestAccEventGridTopic_mapping(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") + r := EventGridTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.mapping(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("input_mapping_fields.0.topic").HasValue("test"), + check.That(data.ResourceName).Key("input_mapping_fields.0.topic").HasValue("test"), + check.That(data.ResourceName).Key("input_mapping_default_values.0.data_version").HasValue("1.0"), + check.That(data.ResourceName).Key("input_mapping_default_values.0.subject").HasValue("DefaultSubject"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventGridTopic_basicWithTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") + r := EventGridTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.foo").HasValue("bar"), + check.That(data.ResourceName).Key("endpoint").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (EventGridTopicResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.TopicID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.EventGrid.TopicsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving EventGrid Topic %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.TopicProperties != nil), nil +} + +func (EventGridTopicResource) basic(data acceptance.TestData) string { + // TODO: confirm if this is still the case + // currently only supported in "West Central US" & "West US 2" + location := "westus2" + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventgrid_topic" "test" { + name = "acctesteg-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, location, data.RandomInteger) +} + +func (EventGridTopicResource) requiresImport(data acceptance.TestData) string { + template := EventGridTopicResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_eventgrid_topic" "import" { + name = azurerm_eventgrid_topic.test.name + location = azurerm_eventgrid_topic.test.location + resource_group_name = azurerm_eventgrid_topic.test.resource_group_name +} +`, template) +} + +func (EventGridTopicResource) mapping(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +resource "azurerm_eventgrid_topic" "test" { + name = "acctesteg-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + input_schema = "CustomEventSchema" + input_mapping_fields { + topic = "test" + event_type = "test" + } + input_mapping_default_values { + data_version = "1.0" + subject = "DefaultSubject" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventGridTopicResource) basicWithTags(data acceptance.TestData) string { + // currently only supported in "West Central US" & "West US 2" + location := "westus2" + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventgrid_topic" "test" { + name = "acctesteg-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + "foo" = "bar" + } +} +`, data.RandomInteger, location, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventgrid/parse/domain.go b/azurerm/internal/services/eventgrid/parse/domain.go new file mode 100644 index 000000000000..9d47a67b8643 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/domain.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DomainId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewDomainID(subscriptionId, resourceGroup, name string) DomainId { + return DomainId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id DomainId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Domain", segmentsStr) +} + +func (id DomainId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventGrid/domains/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DomainID parses a Domain ID into an DomainId struct +func DomainID(input string) (*DomainId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DomainId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("domains"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/eventgrid/parse/domain_test.go b/azurerm/internal/services/eventgrid/parse/domain_test.go new file mode 100644 index 000000000000..a5b7926e6474 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/domain_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DomainId{} + +func TestDomainIDFormatter(t *testing.T) { + actual := NewDomainID("12345678-1234-9876-4563-123456789012", "resGroup1", "domain1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDomainID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DomainId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1", + Expected: &DomainId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "domain1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/DOMAINS/DOMAIN1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DomainID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/eventgrid/parse/domain_topic.go b/azurerm/internal/services/eventgrid/parse/domain_topic.go new file mode 100644 index 000000000000..4ef1019e8005 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/domain_topic.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DomainTopicId struct { + SubscriptionId string + ResourceGroup string + DomainName string + TopicName string +} + +func NewDomainTopicID(subscriptionId, resourceGroup, domainName, topicName string) DomainTopicId { + return DomainTopicId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DomainName: domainName, + TopicName: topicName, + } +} + +func (id DomainTopicId) String() string { + segments := []string{ + fmt.Sprintf("Topic Name %q", id.TopicName), + fmt.Sprintf("Domain Name %q", id.DomainName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Domain Topic", segmentsStr) +} + +func (id DomainTopicId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventGrid/domains/%s/topics/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DomainName, id.TopicName) +} + +// DomainTopicID parses a DomainTopic ID into an DomainTopicId struct +func DomainTopicID(input string) (*DomainTopicId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DomainTopicId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DomainName, err = id.PopSegment("domains"); err != nil { + return nil, err + } + if resourceId.TopicName, err = id.PopSegment("topics"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/eventgrid/parse/domain_topic_test.go b/azurerm/internal/services/eventgrid/parse/domain_topic_test.go new file mode 100644 index 000000000000..3d964235f83d --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/domain_topic_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DomainTopicId{} + +func TestDomainTopicIDFormatter(t *testing.T) { + actual := NewDomainTopicID("12345678-1234-9876-4563-123456789012", "resGroup1", "domain1", "topic1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/topic1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDomainTopicID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DomainTopicId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DomainName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Error: true, + }, + + { + // missing value for DomainName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/", + Error: true, + }, + + { + // missing TopicName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/", + Error: true, + }, + + { + // missing value for TopicName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/topic1", + Expected: &DomainTopicId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + DomainName: "domain1", + TopicName: "topic1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/DOMAINS/DOMAIN1/TOPICS/TOPIC1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DomainTopicID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DomainName != v.Expected.DomainName { + t.Fatalf("Expected %q but got %q for DomainName", v.Expected.DomainName, actual.DomainName) + } + if actual.TopicName != v.Expected.TopicName { + t.Fatalf("Expected %q but got %q for TopicName", v.Expected.TopicName, actual.TopicName) + } + } +} diff --git a/azurerm/internal/services/eventgrid/parse/event_subscription.go b/azurerm/internal/services/eventgrid/parse/event_subscription.go new file mode 100644 index 000000000000..99f6a6f4e897 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/event_subscription.go @@ -0,0 +1,32 @@ +package parse + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type EventSubscriptionId struct { + Scope string + Name string +} + +func EventSubscriptionID(input string) (*EventSubscriptionId, error) { + _, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid Event Subscription ID %q: %+v", input, err) + } + + segments := strings.Split(input, "/providers/Microsoft.EventGrid/eventSubscriptions/") + if len(segments) != 2 { + return nil, fmt.Errorf("Expected ID to be in the format `{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{name} - got %d segments", len(segments)) + } + + eventSubscription := EventSubscriptionId{ + Scope: segments[0], + Name: segments[1], + } + + return &eventSubscription, nil +} diff --git a/azurerm/internal/services/eventgrid/parse/event_subscription_test.go b/azurerm/internal/services/eventgrid/parse/event_subscription_test.go new file mode 100644 index 000000000000..e5c61fda0a2d --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/event_subscription_test.go @@ -0,0 +1,85 @@ +package parse + +import ( + "testing" +) + +func TestEventGridEventSubscriptionId(t *testing.T) { + testData := []struct { + Name string + Input string + Expected *EventSubscriptionId + }{ + { + Name: "Empty", + Input: "", + Expected: nil, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", + Expected: nil, + }, + { + Name: "Subscription Scope", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", + Expected: &EventSubscriptionId{ + Name: "subscription1", + Scope: "/subscriptions/00000000-0000-0000-0000-000000000000", + }, + }, + { + Name: "Resource Group", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", + Expected: &EventSubscriptionId{ + Name: "subscription1", + Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1", + }, + }, + { + Name: "Storage Account Scope", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Storage/storageAccounts/storage1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", + Expected: &EventSubscriptionId{ + Name: "subscription1", + Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Storage/storageAccounts/storage1", + }, + }, + { + Name: "Event Grid Domain Scope", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", + Expected: &EventSubscriptionId{ + Name: "subscription1", + Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1", + }, + }, + { + Name: "Event Grid Topic Scope", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", + Expected: &EventSubscriptionId{ + Name: "subscription1", + Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Name) + + actual, err := EventSubscriptionID(v.Input) + if err != nil { + if v.Expected == nil { + continue + } + + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + + if actual.Scope != v.Expected.Scope { + t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.Scope, actual.Scope) + } + } +} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_domain.go b/azurerm/internal/services/eventgrid/parse/eventgrid_domain.go deleted file mode 100644 index f6db8c651fb8..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_domain.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type EventGridDomainId struct { - ResourceGroup string - Name string -} - -func EventGridDomainID(input string) (*EventGridDomainId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid Domain ID %q: %+v", input, err) - } - - domain := EventGridDomainId{ - ResourceGroup: id.ResourceGroup, - } - - if domain.Name, err = id.PopSegment("domains"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &domain, nil -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_domain_test.go b/azurerm/internal/services/eventgrid/parse/eventgrid_domain_test.go deleted file mode 100644 index c27fcbc313dc..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_domain_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestEventGridDomainId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *EventGridDomainId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Domain", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1", - Expected: nil, - }, - { - Name: "EventGrid Domain ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1", - Expected: &EventGridDomainId{ - Name: "domain1", - ResourceGroup: "resGroup1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := EventGridDomainID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_domain_topic.go b/azurerm/internal/services/eventgrid/parse/eventgrid_domain_topic.go deleted file mode 100644 index f11961171e95..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_domain_topic.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type EventGridDomainTopicId struct { - ResourceGroup string - Name string - Domain string -} - -func EventGridDomainTopicID(input string) (*EventGridDomainTopicId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid Domain Topic ID %q: %+v", input, err) - } - - domainTopic := EventGridDomainTopicId{ - ResourceGroup: id.ResourceGroup, - } - - if domainTopic.Name, err = id.PopSegment("topics"); err != nil { - return nil, err - } - - if domainTopic.Domain, err = id.PopSegment("domains"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &domainTopic, nil -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_domain_topic_test.go b/azurerm/internal/services/eventgrid/parse/eventgrid_domain_topic_test.go deleted file mode 100644 index bc2a1c849f01..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_domain_topic_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestEventGridDomainTopicId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *EventGridDomainTopicId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing Domain", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/providers/Microsoft.EventGrid/topics/topic1", - Expected: nil, - }, - { - Name: "Domain Topic ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/topic1", - Expected: &EventGridDomainTopicId{ - Name: "topic1", - ResourceGroup: "resGroup1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := EventGridDomainTopicID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_event_subscription.go b/azurerm/internal/services/eventgrid/parse/eventgrid_event_subscription.go deleted file mode 100644 index 26efe025973b..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_event_subscription.go +++ /dev/null @@ -1,32 +0,0 @@ -package parse - -import ( - "fmt" - "strings" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type EventGridEventSubscriptionId struct { - Scope string - Name string -} - -func EventGridEventSubscriptionID(input string) (*EventGridEventSubscriptionId, error) { - _, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid Event Subscription ID %q: %+v", input, err) - } - - segments := strings.Split(input, "/providers/Microsoft.EventGrid/eventSubscriptions/") - if len(segments) != 2 { - return nil, fmt.Errorf("Expected ID to be in the format `{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{name} - got %d segments", len(segments)) - } - - eventSubscription := EventGridEventSubscriptionId{ - Scope: segments[0], - Name: segments[1], - } - - return &eventSubscription, nil -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_event_subscription_test.go b/azurerm/internal/services/eventgrid/parse/eventgrid_event_subscription_test.go deleted file mode 100644 index e2f21c807e41..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_event_subscription_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestEventGridEventSubscriptionId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *EventGridEventSubscriptionId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", - Expected: nil, - }, - { - Name: "Subscription Scope", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", - Expected: &EventGridEventSubscriptionId{ - Name: "subscription1", - Scope: "/subscriptions/00000000-0000-0000-0000-000000000000", - }, - }, - { - Name: "Resource Group", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", - Expected: &EventGridEventSubscriptionId{ - Name: "subscription1", - Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1", - }, - }, - { - Name: "Storage Account Scope", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Storage/storageAccounts/storage1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", - Expected: &EventGridEventSubscriptionId{ - Name: "subscription1", - Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Storage/storageAccounts/storage1", - }, - }, - { - Name: "Event Grid Domain Scope", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", - Expected: &EventGridEventSubscriptionId{ - Name: "subscription1", - Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1", - }, - }, - { - Name: "Event Grid Topic Scope", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1/providers/Microsoft.EventGrid/eventSubscriptions/subscription1", - Expected: &EventGridEventSubscriptionId{ - Name: "subscription1", - Scope: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := EventGridEventSubscriptionID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.Scope != v.Expected.Scope { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.Scope, actual.Scope) - } - } -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_system_topic.go b/azurerm/internal/services/eventgrid/parse/eventgrid_system_topic.go deleted file mode 100644 index 8ddb67959709..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_system_topic.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type EventGridSystemTopicId struct { - ResourceGroup string - Name string -} - -func EventGridSystemTopicID(input string) (*EventGridSystemTopicId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid System Topic ID %q: %+v", input, err) - } - - topic := EventGridSystemTopicId{ - ResourceGroup: id.ResourceGroup, - } - - if topic.Name, err = id.PopSegment("systemTopics"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &topic, nil -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_system_topic_test.go b/azurerm/internal/services/eventgrid/parse/eventgrid_system_topic_test.go deleted file mode 100644 index e7d1167bdd60..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_system_topic_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestEventGridSystemTopicId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *EventGridSystemTopicId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No System Topic", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/providers/Microsoft.EventGrid", - Expected: nil, - }, - { - Name: "EventGrid System Topic ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/systemTopic1", - Expected: &EventGridSystemTopicId{ - Name: "systemTopic1", - ResourceGroup: "resGroup1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := EventGridSystemTopicID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_topic.go b/azurerm/internal/services/eventgrid/parse/eventgrid_topic.go deleted file mode 100644 index ad95e0312846..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_topic.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type EventGridTopicId struct { - ResourceGroup string - Name string -} - -func EventGridTopicID(input string) (*EventGridTopicId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid Topic ID %q: %+v", input, err) - } - - topic := EventGridTopicId{ - ResourceGroup: id.ResourceGroup, - } - - if topic.Name, err = id.PopSegment("topics"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &topic, nil -} diff --git a/azurerm/internal/services/eventgrid/parse/eventgrid_topic_test.go b/azurerm/internal/services/eventgrid/parse/eventgrid_topic_test.go deleted file mode 100644 index f20306a4b06b..000000000000 --- a/azurerm/internal/services/eventgrid/parse/eventgrid_topic_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestEventGridTopicId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *EventGridTopicId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Topic", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/providers/Microsoft.EventGrid/domains/domain1", - Expected: nil, - }, - { - Name: "EventGrid Topic ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1", - Expected: &EventGridTopicId{ - Name: "topic1", - ResourceGroup: "resGroup1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := EventGridTopicID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/eventgrid/parse/system_topic.go b/azurerm/internal/services/eventgrid/parse/system_topic.go new file mode 100644 index 000000000000..fed7eeae84b4 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/system_topic.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SystemTopicId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewSystemTopicID(subscriptionId, resourceGroup, name string) SystemTopicId { + return SystemTopicId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id SystemTopicId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "System Topic", segmentsStr) +} + +func (id SystemTopicId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventGrid/systemTopics/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// SystemTopicID parses a SystemTopic ID into an SystemTopicId struct +func SystemTopicID(input string) (*SystemTopicId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SystemTopicId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("systemTopics"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/eventgrid/parse/system_topic_event_subscription.go b/azurerm/internal/services/eventgrid/parse/system_topic_event_subscription.go new file mode 100644 index 000000000000..a9958078b11e --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/system_topic_event_subscription.go @@ -0,0 +1,38 @@ +package parse + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SystemTopicEventSubscriptionId struct { + ResourceGroup string + SystemTopic string + Name string +} + +func SystemTopicEventSubscriptionID(input string) (*SystemTopicEventSubscriptionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, fmt.Errorf("[ERROR] Unable to parse EventGrid System Topic Event Subscription ID %q: %+v", input, err) + } + + systemTopicEventSubscriptionID := SystemTopicEventSubscriptionId{ + ResourceGroup: id.ResourceGroup, + } + + if systemTopicEventSubscriptionID.SystemTopic, err = id.PopSegment("systemTopics"); err != nil { + return nil, err + } + + if systemTopicEventSubscriptionID.Name, err = id.PopSegment("eventSubscriptions"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &systemTopicEventSubscriptionID, nil +} diff --git a/azurerm/internal/services/eventgrid/parse/system_topic_event_subscription_test.go b/azurerm/internal/services/eventgrid/parse/system_topic_event_subscription_test.go new file mode 100644 index 000000000000..a4b72d6f154f --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/system_topic_event_subscription_test.go @@ -0,0 +1,58 @@ +package parse + +import ( + "testing" +) + +func TestSystenTopicEventGridEventSubscriptionId(t *testing.T) { + testData := []struct { + Name string + Input string + Expected *SystemTopicEventSubscriptionId + }{ + { + Name: "Empty", + Input: "", + Expected: nil, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/providers/Microsoft.EventGrid/systemTopics/topic1/eventSubscriptions/subscription1", + Expected: nil, + }, + { + Name: "Event Grid System Topic Scope", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/topic1/eventSubscriptions/subscription1", + Expected: &SystemTopicEventSubscriptionId{ + Name: "subscription1", + SystemTopic: "topic1", + ResourceGroup: "resGroup1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Name) + + actual, err := SystemTopicEventSubscriptionID(v.Input) + if err != nil { + if v.Expected == nil { + continue + } + + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + + if actual.SystemTopic != v.Expected.SystemTopic { + t.Fatalf("Expected %q but got %q for System Topic", v.Expected.SystemTopic, actual.SystemTopic) + } + + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) + } + } +} diff --git a/azurerm/internal/services/eventgrid/parse/system_topic_test.go b/azurerm/internal/services/eventgrid/parse/system_topic_test.go new file mode 100644 index 000000000000..165cd5cf33bf --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/system_topic_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SystemTopicId{} + +func TestSystemTopicIDFormatter(t *testing.T) { + actual := NewSystemTopicID("12345678-1234-9876-4563-123456789012", "resGroup1", "systemTopic1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/systemTopic1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSystemTopicID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SystemTopicId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/systemTopic1", + Expected: &SystemTopicId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "systemTopic1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/SYSTEMTOPICS/SYSTEMTOPIC1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SystemTopicID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/eventgrid/parse/topic.go b/azurerm/internal/services/eventgrid/parse/topic.go new file mode 100644 index 000000000000..ebc9fa2868b6 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/topic.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type TopicId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewTopicID(subscriptionId, resourceGroup, name string) TopicId { + return TopicId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id TopicId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Topic", segmentsStr) +} + +func (id TopicId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventGrid/topics/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// TopicID parses a Topic ID into an TopicId struct +func TopicID(input string) (*TopicId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := TopicId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("topics"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/eventgrid/parse/topic_test.go b/azurerm/internal/services/eventgrid/parse/topic_test.go new file mode 100644 index 000000000000..2e88f0a677d9 --- /dev/null +++ b/azurerm/internal/services/eventgrid/parse/topic_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = TopicId{} + +func TestTopicIDFormatter(t *testing.T) { + actual := NewTopicID("12345678-1234-9876-4563-123456789012", "resGroup1", "topic1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestTopicID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *TopicId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1", + Expected: &TopicId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "topic1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/TOPICS/TOPIC1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := TopicID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/eventgrid/registration.go b/azurerm/internal/services/eventgrid/registration.go index 9734f9869874..cad10fdd74cc 100644 --- a/azurerm/internal/services/eventgrid/registration.go +++ b/azurerm/internal/services/eventgrid/registration.go @@ -21,17 +21,19 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_eventgrid_topic": dataSourceArmEventGridTopic(), + "azurerm_eventgrid_topic": dataSourceEventGridTopic(), + "azurerm_eventgrid_domain_topic": dataSourceEventGridDomainTopic(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_eventgrid_domain": resourceArmEventGridDomain(), - "azurerm_eventgrid_domain_topic": resourceArmEventGridDomainTopic(), - "azurerm_eventgrid_event_subscription": resourceArmEventGridEventSubscription(), - "azurerm_eventgrid_topic": resourceArmEventGridTopic(), - "azurerm_eventgrid_system_topic": resourceArmEventGridSystemTopic(), + "azurerm_eventgrid_domain": resourceEventGridDomain(), + "azurerm_eventgrid_domain_topic": resourceEventGridDomainTopic(), + "azurerm_eventgrid_event_subscription": resourceEventGridEventSubscription(), + "azurerm_eventgrid_topic": resourceEventGridTopic(), + "azurerm_eventgrid_system_topic": resourceEventGridSystemTopic(), + "azurerm_eventgrid_system_topic_event_subscription": resourceEventGridSystemTopicEventSubscription(), } } diff --git a/azurerm/internal/services/eventgrid/resourceids.go b/azurerm/internal/services/eventgrid/resourceids.go new file mode 100644 index 000000000000..3518c16b658a --- /dev/null +++ b/azurerm/internal/services/eventgrid/resourceids.go @@ -0,0 +1,8 @@ +package eventgrid + +// EventSubscription can't be generated (today) + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Domain -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DomainTopic -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/topic1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SystemTopic -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/systemTopic1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Topic -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1 diff --git a/azurerm/internal/services/eventgrid/tests/eventgrid_domain_resource_test.go b/azurerm/internal/services/eventgrid/tests/eventgrid_domain_resource_test.go deleted file mode 100644 index 31b28728ebdf..000000000000 --- a/azurerm/internal/services/eventgrid/tests/eventgrid_domain_resource_test.go +++ /dev/null @@ -1,246 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventGridDomain_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridDomainDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridDomain_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridDomainExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridDomain_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridDomainDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridDomain_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridDomainExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventGridDomain_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_domain"), - }, - }, - }) -} - -func TestAccAzureRMEventGridDomain_mapping(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridDomainDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridDomain_mapping(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridDomainExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_fields.0.topic", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_fields.0.topic", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_default_values.0.data_version", "1.0"), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_default_values.0.subject", "DefaultSubject"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridDomain_basicWithTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridDomainDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridDomain_basicWithTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridDomainExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.foo", "bar"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventGridDomainDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.DomainsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventgrid_domain" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("EventGrid Domain still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMEventGridDomainExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.DomainsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for EventGrid Domain: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: EventGrid Domain %q (resource group: %s) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on eventGridDomainsClient: %s", err) - } - - return nil - } -} - -func testAccAzureRMEventGridDomain_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventgrid_domain" "test" { - name = "acctesteg-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventGridDomain_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventGridDomain_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_eventgrid_domain" "import" { - name = azurerm_eventgrid_domain.test.name - location = azurerm_eventgrid_domain.test.location - resource_group_name = azurerm_eventgrid_domain.test.resource_group_name -} -`, template) -} - -func testAccAzureRMEventGridDomain_mapping(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventgrid_domain" "test" { - name = "acctesteg-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - input_schema = "CustomEventSchema" - - input_mapping_fields { - topic = "test" - event_type = "test" - } - - input_mapping_default_values { - data_version = "1.0" - subject = "DefaultSubject" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventGridDomain_basicWithTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventgrid_domain" "test" { - name = "acctesteg-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - "foo" = "bar" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventgrid/tests/eventgrid_domain_topic_resource_test.go b/azurerm/internal/services/eventgrid/tests/eventgrid_domain_topic_resource_test.go deleted file mode 100644 index 62eef498b5a9..000000000000 --- a/azurerm/internal/services/eventgrid/tests/eventgrid_domain_topic_resource_test.go +++ /dev/null @@ -1,149 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventGridDomainTopic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridDomainTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridDomainTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridDomainTopicExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridDomainTopic_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_domain_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridDomainTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridDomainTopicExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventGridDomainTopic_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_domain_topic"), - }, - }, - }) -} - -func testCheckAzureRMEventGridDomainTopicDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.DomainTopicsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventgrid_domain_topic" { - continue - } - - name := rs.Primary.Attributes["name"] - domainName := rs.Primary.Attributes["domain_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, domainName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("EventGrid Domain Topic still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMEventGridDomainTopicExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.DomainTopicsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - domainName := rs.Primary.Attributes["domain_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for EventGrid Domain Topic: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, domainName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: EventGrid Domain Topic %q (resource group: %s) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on EventGrid.DomainTopicsClient: %s", err) - } - - return nil - } -} - -func testAccAzureRMEventGridDomainTopic_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} -resource "azurerm_eventgrid_domain" "test" { - name = "acctestegdomain-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -resource "azurerm_eventgrid_domain_topic" "test" { - name = "acctestegtopic-%d" - domain_name = azurerm_eventgrid_domain.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridDomainTopic_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventGridDomainTopic_basic(data) - return fmt.Sprintf(` -%s -resource "azurerm_eventgrid_domain_topic" "import" { - name = azurerm_eventgrid_domain_topic.test.name - domain_name = azurerm_eventgrid_domain_topic.test.domain_name - resource_group_name = azurerm_eventgrid_domain_topic.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/eventgrid/tests/eventgrid_event_subscription_resource_test.go b/azurerm/internal/services/eventgrid/tests/eventgrid_event_subscription_resource_test.go deleted file mode 100644 index a6e796ff44fa..000000000000 --- a/azurerm/internal/services/eventgrid/tests/eventgrid_event_subscription_resource_test.go +++ /dev/null @@ -1,673 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventGridEventSubscription_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "event_delivery_schema", "EventGridSchema"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventGridEventSubscription_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_event_subscription"), - }, - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_eventHubID(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_eventHubID(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "event_delivery_schema", "CloudEventSchemaV1_0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub_endpoint_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_serviceBusQueueID(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_serviceBusQueueID(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "event_delivery_schema", "CloudEventSchemaV1_0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_bus_queue_endpoint_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_serviceBusTopicID(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_serviceBusTopicID(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "event_delivery_schema", "CloudEventSchemaV1_0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_bus_topic_endpoint_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "event_delivery_schema", "EventGridSchema"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_queue_endpoint.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_blob_dead_letter_destination.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "included_event_types.0", "Microsoft.Resources.ResourceWriteSuccess"), - resource.TestCheckResourceAttr(data.ResourceName, "retry_policy.0.max_delivery_attempts", "11"), - resource.TestCheckResourceAttr(data.ResourceName, "retry_policy.0.event_time_to_live", "11"), - resource.TestCheckResourceAttr(data.ResourceName, "labels.0", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "labels.2", "test2"), - ), - }, - { - Config: testAccAzureRMEventGridEventSubscription_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "included_event_types.0", "Microsoft.Storage.BlobCreated"), - resource.TestCheckResourceAttr(data.ResourceName, "included_event_types.1", "Microsoft.Storage.BlobDeleted"), - resource.TestCheckResourceAttr(data.ResourceName, "subject_filter.0.subject_ends_with", ".jpg"), - resource.TestCheckResourceAttr(data.ResourceName, "subject_filter.0.subject_begins_with", "test/test"), - resource.TestCheckResourceAttr(data.ResourceName, "retry_policy.0.max_delivery_attempts", "10"), - resource.TestCheckResourceAttr(data.ResourceName, "retry_policy.0.event_time_to_live", "12"), - resource.TestCheckResourceAttr(data.ResourceName, "labels.0", "test4"), - resource.TestCheckResourceAttr(data.ResourceName, "labels.2", "test6"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_filter(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_filter(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "included_event_types.0", "Microsoft.Storage.BlobCreated"), - resource.TestCheckResourceAttr(data.ResourceName, "included_event_types.1", "Microsoft.Storage.BlobDeleted"), - resource.TestCheckResourceAttr(data.ResourceName, "subject_filter.0.subject_ends_with", ".jpg"), - resource.TestCheckResourceAttr(data.ResourceName, "subject_filter.0.subject_begins_with", "test/test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridEventSubscription_advancedFilter(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_event_subscription", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridEventSubscriptionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridEventSubscription_advancedFilter(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridEventSubscriptionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.bool_equals.0.key", "subject"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.bool_equals.0.value", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_greater_than.0.key", "data.metadataVersion"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_greater_than.0.value", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_greater_than_or_equals.0.key", "data.contentLength"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_greater_than_or_equals.0.value", "42"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_less_than.0.key", "data.contentLength"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_less_than.0.value", "42.1"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_less_than_or_equals.0.key", "data.metadataVersion"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_less_than_or_equals.0.value", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_in.0.key", "data.contentLength"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_in.0.values.0", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_not_in.0.key", "data.contentLength"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.number_not_in.0.values.0", "5"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_begins_with.0.key", "subject"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_begins_with.0.values.0", "foo"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_ends_with.0.key", "subject"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_ends_with.0.values.0", "bar"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_contains.0.key", "data.contentType"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_contains.0.values.0", "application"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_in.0.key", "data.blobType"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_in.0.values.0", "Block"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_not_in.0.key", "data.blobType"), - resource.TestCheckResourceAttr(data.ResourceName, "advanced_filter.0.string_not_in.0.values.0", "Page"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventGridEventSubscriptionDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.EventSubscriptionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventgrid_event_subscription" { - continue - } - - name := rs.Primary.Attributes["name"] - scope := rs.Primary.Attributes["scope"] - - resp, err := client.Get(ctx, scope, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("EventGrid Event Subscription still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMEventGridEventSubscriptionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.EventSubscriptionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - scope, hasScope := rs.Primary.Attributes["scope"] - if !hasScope { - return fmt.Errorf("Bad: no scope found in state for EventGrid Event Subscription: %s", name) - } - - resp, err := client.Get(ctx, scope, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: EventGrid Event Subscription %q (scope: %s) does not exist", name, scope) - } - - return fmt.Errorf("Bad: Get on eventGridEventSubscriptionsClient: %s", err) - } - - return nil - } -} - -func testAccAzureRMEventGridEventSubscription_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestacc%s" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - account_tier = "Standard" - account_replication_type = "LRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_storage_queue" "test" { - name = "mysamplequeue-%d" - storage_account_name = "${azurerm_storage_account.test.name}" -} - -resource "azurerm_storage_container" "test" { - name = "vhds" - storage_account_name = "${azurerm_storage_account.test.name}" - container_access_type = "private" -} - -resource "azurerm_storage_blob" "test" { - name = "herpderp1.vhd" - - storage_account_name = "${azurerm_storage_account.test.name}" - storage_container_name = "${azurerm_storage_container.test.name}" - - type = "Page" - size = 5120 -} - -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctesteg-%d" - scope = "${azurerm_resource_group.test.id}" - - storage_queue_endpoint { - storage_account_id = "${azurerm_storage_account.test.id}" - queue_name = "${azurerm_storage_queue.test.name}" - } - - storage_blob_dead_letter_destination { - storage_account_id = "${azurerm_storage_account.test.id}" - storage_blob_container_name = "${azurerm_storage_container.test.name}" - } - - retry_policy { - event_time_to_live = 11 - max_delivery_attempts = 11 - } - - labels = ["test", "test1", "test2"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridEventSubscription_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventGridEventSubscription_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_eventgrid_event_subscription" "import" { - name = azurerm_eventgrid_event_subscription.test.name - scope = azurerm_eventgrid_event_subscription.test.scope -} -`, template) -} - -func testAccAzureRMEventGridEventSubscription_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestacc%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_storage_queue" "test" { - name = "mysamplequeue-%d" - storage_account_name = azurerm_storage_account.test.name -} - -resource "azurerm_storage_container" "test" { - name = "vhds" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_storage_blob" "test" { - name = "herpderp1.vhd" - - storage_account_name = azurerm_storage_account.test.name - storage_container_name = azurerm_storage_container.test.name - - type = "Page" - size = 5120 -} - -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctest-eg-%d" - scope = azurerm_resource_group.test.id - - storage_queue_endpoint { - storage_account_id = azurerm_storage_account.test.id - queue_name = azurerm_storage_queue.test.name - } - - storage_blob_dead_letter_destination { - storage_account_id = azurerm_storage_account.test.id - storage_blob_container_name = azurerm_storage_container.test.name - } - - retry_policy { - event_time_to_live = 12 - max_delivery_attempts = 10 - } - - subject_filter { - subject_begins_with = "test/test" - subject_ends_with = ".jpg" - } - - included_event_types = ["Microsoft.Storage.BlobCreated", "Microsoft.Storage.BlobDeleted"] - labels = ["test4", "test5", "test6"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridEventSubscription_eventHubID(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctest-eg-%d" - scope = azurerm_resource_group.test.id - event_delivery_schema = "CloudEventSchemaV1_0" - - eventhub_endpoint_id = azurerm_eventhub.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridEventSubscription_serviceBusQueueID(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_servicebus_namespace" "example" { - name = "acctestservicebusnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} -resource "azurerm_servicebus_queue" "test" { - name = "acctestservicebusqueue-%d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_servicebus_namespace.example.name - enable_partitioning = true -} -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctest-eg-%d" - scope = azurerm_resource_group.test.id - event_delivery_schema = "CloudEventSchemaV1_0" - service_bus_queue_endpoint_id = azurerm_servicebus_queue.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridEventSubscription_serviceBusTopicID(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} -resource "azurerm_servicebus_namespace" "example" { - name = "acctestservicebusnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} -resource "azurerm_servicebus_topic" "test" { - name = "acctestservicebustopic-%d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_servicebus_namespace.example.name - enable_partitioning = true -} -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctest-eg-%d" - scope = azurerm_resource_group.test.id - event_delivery_schema = "CloudEventSchemaV1_0" - service_bus_topic_endpoint_id = azurerm_servicebus_topic.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridEventSubscription_filter(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestacc%s" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - account_tier = "Standard" - account_replication_type = "LRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_storage_queue" "test" { - name = "mysamplequeue-%d" - storage_account_name = "${azurerm_storage_account.test.name}" -} - -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctest-eg-%d" - scope = "${azurerm_resource_group.test.id}" - - storage_queue_endpoint { - storage_account_id = "${azurerm_storage_account.test.id}" - queue_name = "${azurerm_storage_queue.test.name}" - } - - included_event_types = ["Microsoft.Storage.BlobCreated", "Microsoft.Storage.BlobDeleted"] - - subject_filter { - subject_begins_with = "test/test" - subject_ends_with = ".jpg" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventGridEventSubscription_advancedFilter(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestacc%s" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - account_tier = "Standard" - account_replication_type = "LRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_storage_queue" "test" { - name = "mysamplequeue-%d" - storage_account_name = "${azurerm_storage_account.test.name}" -} - -resource "azurerm_eventgrid_event_subscription" "test" { - name = "acctesteg-%d" - scope = "${azurerm_storage_account.test.id}" - - storage_queue_endpoint { - storage_account_id = "${azurerm_storage_account.test.id}" - queue_name = "${azurerm_storage_queue.test.name}" - } - - advanced_filter { - bool_equals { - key = "subject" - value = true - } - number_greater_than { - key = "data.metadataVersion" - value = 1 - } - number_greater_than_or_equals { - key = "data.contentLength" - value = 42.0 - } - number_less_than { - key = "data.contentLength" - value = 42.1 - } - number_less_than_or_equals { - key = "data.metadataVersion" - value = 2 - } - number_in { - key = "data.contentLength" - values = [0, 1, 1, 2, 3] - } - number_not_in { - key = "data.contentLength" - values = [5, 8, 13, 21, 34] - } - string_begins_with { - key = "subject" - values = ["foo"] - } - string_ends_with { - key = "subject" - values = ["bar"] - } - string_contains { - key = "data.contentType" - values = ["application", "octet-stream"] - } - string_in { - key = "data.blobType" - values = ["Block"] - } - string_not_in { - key = "data.blobType" - values = ["Page"] - } - } - -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventgrid/tests/eventgrid_system_topic_resource_test.go b/azurerm/internal/services/eventgrid/tests/eventgrid_system_topic_resource_test.go deleted file mode 100644 index 194590b7194e..000000000000 --- a/azurerm/internal/services/eventgrid/tests/eventgrid_system_topic_resource_test.go +++ /dev/null @@ -1,217 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventGridSystemTopic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridSystemTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridSystemTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridSystemTopicExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "source_arm_resource_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "topic_type"), - resource.TestCheckResourceAttrSet(data.ResourceName, "metric_arm_resource_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridSystemTopic_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridSystemTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridSystemTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridSystemTopicExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventGridSystemTopic_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_system_topic"), - }, - }, - }) -} - -func TestAccAzureRMEventGridSystemTopic_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_system_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridSystemTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridSystemTopic_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridSystemTopicExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Foo", "Bar"), - resource.TestCheckResourceAttrSet(data.ResourceName, "source_arm_resource_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "topic_type"), - resource.TestCheckResourceAttrSet(data.ResourceName, "metric_arm_resource_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventGridSystemTopicDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.SystemTopicsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventgrid_system_topic" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Event Grid System Topic still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMEventGridSystemTopicExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.SystemTopicsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Event Grid System Topic: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Event Grid System Topic %q (resource group: %s) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on EventGridSystemTopicsClient: %s", err) - } - - return nil - } -} - -func testAccAzureRMEventGridSystemTopic_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestegst%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_eventgrid_system_topic" "test" { - name = "acctestEGST%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - source_arm_resource_id = azurerm_storage_account.test.id - topic_type = "Microsoft.Storage.StorageAccounts" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12), data.RandomIntOfLength(10)) -} - -func testAccAzureRMEventGridSystemTopic_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventGridSystemTopic_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_eventgrid_system_topic" "import" { - name = azurerm_eventgrid_system_topic.test.name - location = azurerm_eventgrid_system_topic.test.location - resource_group_name = azurerm_eventgrid_system_topic.test.resource_group_name - source_arm_resource_id = azurerm_eventgrid_system_topic.test.source_arm_resource_id - topic_type = azurerm_eventgrid_system_topic.test.topic_type -} -`, template) -} - -func testAccAzureRMEventGridSystemTopic_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eg-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestegst%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_eventgrid_system_topic" "test" { - name = "acctestEGST%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - source_arm_resource_id = azurerm_storage_account.test.id - topic_type = "Microsoft.Storage.StorageAccounts" - - tags = { - "Foo" = "Bar" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(12), data.RandomIntOfLength(10)) -} diff --git a/azurerm/internal/services/eventgrid/tests/eventgrid_topic_data_source_test.go b/azurerm/internal/services/eventgrid/tests/eventgrid_topic_data_source_test.go deleted file mode 100644 index ddfe6b24e22a..000000000000 --- a/azurerm/internal/services/eventgrid/tests/eventgrid_topic_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMEventGridTopic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventgrid_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMEventGridTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMEventGridTopic_basic(data acceptance.TestData) string { - template := testAccAzureRMEventGridTopic_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_eventgrid_topic" "test" { - name = azurerm_eventgrid_topic.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/eventgrid/tests/eventgrid_topic_resource_test.go b/azurerm/internal/services/eventgrid/tests/eventgrid_topic_resource_test.go deleted file mode 100644 index 5b06f0fbf10d..000000000000 --- a/azurerm/internal/services/eventgrid/tests/eventgrid_topic_resource_test.go +++ /dev/null @@ -1,249 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventGridTopic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridTopicExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridTopic_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridTopicExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventGridTopic_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventgrid_topic"), - }, - }, - }) -} - -func TestAccAzureRMEventGridTopic_mapping(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridTopic_mapping(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridTopicExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_fields.0.topic", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_fields.0.topic", "test"), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_default_values.0.data_version", "1.0"), - resource.TestCheckResourceAttr(data.ResourceName, "input_mapping_default_values.0.subject", "DefaultSubject"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventGridTopic_basicWithTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventgrid_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventGridTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventGridTopic_basicWithTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventGridTopicExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.foo", "bar"), - resource.TestCheckResourceAttrSet(data.ResourceName, "endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventGridTopicDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.TopicsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventgrid_topic" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("EventGrid Topic still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMEventGridTopicExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).EventGrid.TopicsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for EventGrid Topic: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: EventGrid Topic %q (resource group: %s) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on eventGridTopicsClient: %s", err) - } - - return nil - } -} - -func testAccAzureRMEventGridTopic_basic(data acceptance.TestData) string { - // TODO: confirm if this is still the case - // currently only supported in "West Central US" & "West US 2" - location := "westus2" - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventgrid_topic" "test" { - name = "acctesteg-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, location, data.RandomInteger) -} - -func testAccAzureRMEventGridTopic_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventGridTopic_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_eventgrid_topic" "import" { - name = azurerm_eventgrid_topic.test.name - location = azurerm_eventgrid_topic.test.location - resource_group_name = azurerm_eventgrid_topic.test.resource_group_name -} -`, template) -} - -func testAccAzureRMEventGridTopic_mapping(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} -resource "azurerm_eventgrid_topic" "test" { - name = "acctesteg-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - input_schema = "CustomEventSchema" - input_mapping_fields { - topic = "test" - event_type = "test" - } - input_mapping_default_values { - data_version = "1.0" - subject = "DefaultSubject" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventGridTopic_basicWithTags(data acceptance.TestData) string { - // currently only supported in "West Central US" & "West US 2" - location := "westus2" - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventgrid_topic" "test" { - name = "acctesteg-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - "foo" = "bar" - } -} -`, data.RandomInteger, location, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventgrid/validate/domain_id.go b/azurerm/internal/services/eventgrid/validate/domain_id.go new file mode 100644 index 000000000000..68d6866f177a --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/domain_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" +) + +func DomainID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DomainID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventgrid/validate/domain_id_test.go b/azurerm/internal/services/eventgrid/validate/domain_id_test.go new file mode 100644 index 000000000000..0dbf8caac395 --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/domain_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDomainID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/DOMAINS/DOMAIN1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DomainID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventgrid/validate/domain_topic_id.go b/azurerm/internal/services/eventgrid/validate/domain_topic_id.go new file mode 100644 index 000000000000..5f8257c7bc7b --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/domain_topic_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" +) + +func DomainTopicID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DomainTopicID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventgrid/validate/domain_topic_id_test.go b/azurerm/internal/services/eventgrid/validate/domain_topic_id_test.go new file mode 100644 index 000000000000..3f083c07d996 --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/domain_topic_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDomainTopicID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DomainName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Valid: false, + }, + + { + // missing value for DomainName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/", + Valid: false, + }, + + { + // missing TopicName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/", + Valid: false, + }, + + { + // missing value for TopicName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/domains/domain1/topics/topic1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/DOMAINS/DOMAIN1/TOPICS/TOPIC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DomainTopicID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventgrid/validate/system_topic_id.go b/azurerm/internal/services/eventgrid/validate/system_topic_id.go new file mode 100644 index 000000000000..b94159fa29d5 --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/system_topic_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" +) + +func SystemTopicID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SystemTopicID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventgrid/validate/system_topic_id_test.go b/azurerm/internal/services/eventgrid/validate/system_topic_id_test.go new file mode 100644 index 000000000000..055334dad7b7 --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/system_topic_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSystemTopicID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/systemTopics/systemTopic1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/SYSTEMTOPICS/SYSTEMTOPIC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SystemTopicID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventgrid/validate/topic_id.go b/azurerm/internal/services/eventgrid/validate/topic_id.go new file mode 100644 index 000000000000..29a88772575f --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/topic_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventgrid/parse" +) + +func TopicID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.TopicID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventgrid/validate/topic_id_test.go b/azurerm/internal/services/eventgrid/validate/topic_id_test.go new file mode 100644 index 000000000000..029b23c13e7d --- /dev/null +++ b/azurerm/internal/services/eventgrid/validate/topic_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestTopicID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.EventGrid/topics/topic1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.EVENTGRID/TOPICS/TOPIC1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := TopicID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source.go b/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source.go index 180c4184bbbc..6bc6d142a88f 100644 --- a/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source.go +++ b/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceEventHubAuthorizationRule() *schema.Resource { +func EventHubAuthorizationRuleDataSource() *schema.Resource { return &schema.Resource{ - Read: dataSourceEventHubAuthorizationRuleRead, + Read: EventHubAuthorizationRuleDataSourceRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -69,7 +69,7 @@ func dataSourceEventHubAuthorizationRule() *schema.Resource { } } -func dataSourceEventHubAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { +func EventHubAuthorizationRuleDataSourceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source_test.go b/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source_test.go new file mode 100644 index 000000000000..fd156692cc50 --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_authorization_rule_data_source_test.go @@ -0,0 +1,74 @@ +package eventhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventHubAuthorizationRuleDataSource struct { +} + +func TestAccEventHubAuthorizationRuleDataSource(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_authorization_rule", "test") + r := EventHubAuthorizationRuleDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.base(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("namespace_name").Exists(), + check.That(data.ResourceName).Key("eventhub_name").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + ), + }, + }) +} + +func TestAccEventHubAuthorizationRuleDataSource_withAliasConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_authorization_rule", "test") + r := EventHubAuthorizationRuleDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("primary_connection_string_alias").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string_alias").Exists(), + ), + }, + }) +} + +func (EventHubAuthorizationRuleDataSource) base(data acceptance.TestData, listen, send, manage bool) string { + return fmt.Sprintf(` +%s + +data "azurerm_eventhub_authorization_rule" "test" { + name = azurerm_eventhub_authorization_rule.test.name + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, EventHubAuthorizationRuleResource{}.base(data, listen, send, manage)) +} + +func (EventHubAuthorizationRuleDataSource) withAliasConnectionString(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_eventhub_authorization_rule" "test" { + name = azurerm_eventhub_authorization_rule.test.name + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, EventHubAuthorizationRuleResource{}.withAliasConnectionString(data)) +} diff --git a/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource.go b/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource.go index c49e271bb615..a0a08873d501 100644 --- a/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource.go @@ -16,12 +16,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventHubAuthorizationRule() *schema.Resource { +func resourceEventHubAuthorizationRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventHubAuthorizationRuleCreateUpdate, - Read: resourceArmEventHubAuthorizationRuleRead, - Update: resourceArmEventHubAuthorizationRuleCreateUpdate, - Delete: resourceArmEventHubAuthorizationRuleDelete, + Create: resourceEventHubAuthorizationRuleCreateUpdate, + Read: resourceEventHubAuthorizationRuleRead, + Update: resourceEventHubAuthorizationRuleCreateUpdate, + Delete: resourceEventHubAuthorizationRuleDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -63,7 +63,7 @@ func resourceArmEventHubAuthorizationRule() *schema.Resource { } } -func resourceArmEventHubAuthorizationRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubAuthorizationRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -120,11 +120,11 @@ func resourceArmEventHubAuthorizationRuleCreateUpdate(d *schema.ResourceData, me d.SetId(*read.ID) - return resource.NonRetryableError(resourceArmEventHubAuthorizationRuleRead(d, meta)) + return resource.NonRetryableError(resourceEventHubAuthorizationRuleRead(d, meta)) }) } -func resourceArmEventHubAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -175,7 +175,7 @@ func resourceArmEventHubAuthorizationRuleRead(d *schema.ResourceData, meta inter return nil } -func resourceArmEventHubAuthorizationRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubAuthorizationRuleDelete(d *schema.ResourceData, meta interface{}) error { eventhubClient := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource_test.go b/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource_test.go new file mode 100644 index 000000000000..a8193e72e568 --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_authorization_rule_resource_test.go @@ -0,0 +1,345 @@ +package eventhub_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventHubAuthorizationRuleResource struct { +} + +func TestAccEventHubAuthorizationRule_listen(t *testing.T) { + testAccEventHubAuthorizationRule(t, true, false, false) +} + +func TestAccEventHubAuthorizationRule_send(t *testing.T) { + testAccEventHubAuthorizationRule(t, false, true, false) +} + +func TestAccEventHubAuthorizationRule_listensend(t *testing.T) { + testAccEventHubAuthorizationRule(t, true, true, false) +} + +func TestAccEventHubAuthorizationRule_manage(t *testing.T) { + testAccEventHubAuthorizationRule(t, true, true, true) +} + +func testAccEventHubAuthorizationRule(t *testing.T, listen, send, manage bool) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") + r := EventHubAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.base(data, listen, send, manage), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("namespace_name").Exists(), + check.That(data.ResourceName).Key("eventhub_name").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("listen").HasValue(strconv.FormatBool(listen)), + check.That(data.ResourceName).Key("send").HasValue(strconv.FormatBool(send)), + check.That(data.ResourceName).Key("manage").HasValue(strconv.FormatBool(manage)), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubAuthorizationRule_multi(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test1") + r := EventHubAuthorizationRuleResource{} + resourceTwoName := "azurerm_eventhub_authorization_rule.test2" + resourceThreeName := "azurerm_eventhub_authorization_rule.test3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multi(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(resourceTwoName).ExistsInAzure(r), + resource.TestCheckResourceAttr(resourceTwoName, "manage", "false"), + resource.TestCheckResourceAttr(resourceTwoName, "send", "true"), + resource.TestCheckResourceAttr(resourceTwoName, "listen", "true"), + resource.TestCheckResourceAttrSet(resourceTwoName, "primary_connection_string"), + resource.TestCheckResourceAttrSet(resourceTwoName, "secondary_connection_string"), + check.That(resourceThreeName).ExistsInAzure(r), + resource.TestCheckResourceAttr(resourceThreeName, "manage", "false"), + resource.TestCheckResourceAttr(resourceThreeName, "send", "true"), + resource.TestCheckResourceAttr(resourceThreeName, "listen", "true"), + resource.TestCheckResourceAttrSet(resourceThreeName, "primary_connection_string"), + resource.TestCheckResourceAttrSet(resourceThreeName, "secondary_connection_string"), + ), + }, + data.ImportStep(), + { + ResourceName: resourceTwoName, + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: resourceThreeName, + ImportState: true, + ImportStateVerify: true, + }, + }) +} + +func TestAccEventHubAuthorizationRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") + r := EventHubAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.base(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data, true, true, true), + ExpectError: acceptance.RequiresImportError("azurerm_eventhub_authorization_rule"), + }, + }) +} + +func TestAccEventHubAuthorizationRule_rightsUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") + r := EventHubAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.base(data, true, false, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("send").HasValue("false"), + check.That(data.ResourceName).Key("manage").HasValue("false"), + ), + }, + { + Config: r.base(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("namespace_name").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("manage").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubAuthorizationRule_withAliasConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") + r := EventHubAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("primary_connection_string_alias").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string_alias").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (EventHubAuthorizationRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + name := id.Path["authorizationRules"] + namespaceName := id.Path["namespaces"] + eventHubName := id.Path["eventhubs"] + + resp, err := clients.Eventhub.EventHubsClient.GetAuthorizationRule(ctx, id.ResourceGroup, namespaceName, eventHubName, name) + if err != nil { + return nil, fmt.Errorf("retrieving Event Hub Authorization Rule %q (eventhub %s / namespace %s / resource group: %s) does not exist", name, eventHubName, namespaceName, id.ResourceGroup) + } + + return utils.Bool(resp.AuthorizationRuleProperties != nil), nil +} + +func (EventHubAuthorizationRuleResource) base(data acceptance.TestData, listen, send, manage bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + name = "acctest-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = %[3]t + send = %[4]t + manage = %[5]t +} +`, data.RandomInteger, data.Locations.Primary, listen, send, manage) +} + +func (EventHubAuthorizationRuleResource) multi(data acceptance.TestData, listen, send, manage bool) string { + template := EventHubAuthorizationRuleResource{}.base(data, listen, send, manage) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub_authorization_rule" "test1" { + name = "acctestruleone-%d" + eventhub_name = azurerm_eventhub.test.name + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} + +resource "azurerm_eventhub_authorization_rule" "test2" { + name = "acctestruletwo-%d" + eventhub_name = azurerm_eventhub.test.name + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} + +resource "azurerm_eventhub_authorization_rule" "test3" { + name = "acctestrulethree-%d" + eventhub_name = azurerm_eventhub.test.name + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (EventHubAuthorizationRuleResource) requiresImport(data acceptance.TestData, listen, send, manage bool) string { + template := EventHubAuthorizationRuleResource{}.base(data, listen, send, manage) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub_authorization_rule" "import" { + name = azurerm_eventhub_authorization_rule.test.name + namespace_name = azurerm_eventhub_authorization_rule.test.namespace_name + eventhub_name = azurerm_eventhub_authorization_rule.test.eventhub_name + resource_group_name = azurerm_eventhub_authorization_rule.test.resource_group_name + listen = azurerm_eventhub_authorization_rule.test.listen + send = azurerm_eventhub_authorization_rule.test.send + manage = azurerm_eventhub_authorization_rule.test.manage +} +`, template) +} + +func (EventHubAuthorizationRuleResource) withAliasConnectionString(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-ehar-%[1]d" + location = "%[2]s" +} + +resource "azurerm_resource_group" "test2" { + name = "acctestRG2-ehar-%[1]d" + location = "%[3]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "test2" { + name = "acctesteventhubnamespace2-%[1]d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { + name = "acctest-EHN-DRC-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + partner_namespace_id = azurerm_eventhub_namespace.test2.id +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + name = "acctest-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = true + send = true + manage = true + + depends_on = [azurerm_eventhub_namespace_disaster_recovery_config.test] +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} diff --git a/azurerm/internal/services/eventhub/eventhub_cluster_resource.go b/azurerm/internal/services/eventhub/eventhub_cluster_resource.go index 2cb8361f1732..0857f047adb3 100644 --- a/azurerm/internal/services/eventhub/eventhub_cluster_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_cluster_resource.go @@ -3,6 +3,7 @@ package eventhub import ( "fmt" "log" + "regexp" "strings" "time" @@ -20,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventHubCluster() *schema.Resource { +func resourceEventHubCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventHubClusterCreateUpdate, - Read: resourceArmEventHubClusterRead, - Update: resourceArmEventHubClusterCreateUpdate, - Delete: resourceArmEventHubClusterDelete, + Create: resourceEventHubClusterCreateUpdate, + Read: resourceEventHubClusterRead, + Update: resourceEventHubClusterCreateUpdate, + Delete: resourceEventHubClusterDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.ClusterID(id) return err @@ -55,9 +56,10 @@ func resourceArmEventHubCluster() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - "Dedicated_1", - }, false), + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^Dedicated_[1-9][0-9]*$`), + "SKU name must match /^Dedicated_[1-9][0-9]*$/.", + ), }, "tags": tags.Schema(), @@ -65,7 +67,7 @@ func resourceArmEventHubCluster() *schema.Resource { } } -func resourceArmEventHubClusterCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubClusterCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.ClusterClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -100,10 +102,10 @@ func resourceArmEventHubClusterCreateUpdate(d *schema.ResourceData, meta interfa d.SetId(*read.ID) - return resourceArmEventHubClusterRead(d, meta) + return resourceEventHubClusterRead(d, meta) } -func resourceArmEventHubClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubClusterRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.ClusterClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -131,7 +133,7 @@ func resourceArmEventHubClusterRead(d *schema.ResourceData, meta interface{}) er return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmEventHubClusterDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubClusterDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.ClusterClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_cluster_resource_test.go b/azurerm/internal/services/eventhub/eventhub_cluster_resource_test.go new file mode 100644 index 000000000000..d3bc7846489a --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_cluster_resource_test.go @@ -0,0 +1,120 @@ +package eventhub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventHubClusterResource struct { +} + +func TestAccEventHubCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_cluster", "test") + r := EventHubClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_cluster", "test") + r := EventHubClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (EventHubClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Eventhub.ClusterClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.ClusterProperties != nil), nil +} + +func (EventHubClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_eventhub_cluster" "test" { + name = "acctesteventhubclusTER-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku_name = "Dedicated_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubClusterResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_eventhub_cluster" "test" { + name = "acctesteventhubclusTER-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku_name = "Dedicated_1" + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source.go b/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source.go index 60bf2c12ca4f..11f3dac1e8bb 100644 --- a/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source.go +++ b/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceEventHubConsumerGroup() *schema.Resource { +func EventHubConsumerGroupDataSource() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmEventHubConsumerGroupRead, + Read: EventHubConsumerGroupDataSourceRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -54,7 +54,7 @@ func dataSourceEventHubConsumerGroup() *schema.Resource { } } -func dataSourceArmEventHubConsumerGroupRead(d *schema.ResourceData, meta interface{}) error { +func EventHubConsumerGroupDataSourceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.ConsumerGroupClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source_test.go b/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source_test.go new file mode 100644 index 000000000000..c84457154ab5 --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_consumer_group_data_source_test.go @@ -0,0 +1,119 @@ +package eventhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventHubConsumerGroupDataSource struct { +} + +func TestAccEventHubConsumerGroupDataSource_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_consumer_group", "test") + r := EventHubConsumerGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("user_metadata").HasValue("some-meta-data"), + ), + }, + }) +} + +func TestAccEventHubConsumerGroupDataSource_completeDefault(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_consumer_group", "test") + r := EventHubConsumerGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.completeDefault(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("user_metadata").HasValue("some-meta-data"), + ), + }, + }) +} + +func (EventHubConsumerGroupDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 7 +} + +resource "azurerm_eventhub_consumer_group" "test" { + name = "acctesteventhubcg-%d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name + user_metadata = "some-meta-data" +} + +data "azurerm_eventhub_consumer_group" "test" { + name = azurerm_eventhub_consumer_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (EventHubConsumerGroupDataSource) completeDefault(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 7 +} + +data "azurerm_eventhub_consumer_group" "test" { + name = "$Default" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventhub/eventhub_consumer_group_resource.go b/azurerm/internal/services/eventhub/eventhub_consumer_group_resource.go index da984e9a7c01..fb69ddfdbc67 100644 --- a/azurerm/internal/services/eventhub/eventhub_consumer_group_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_consumer_group_resource.go @@ -1,178 +1,207 @@ package eventhub import ( + "context" "fmt" - "log" "time" "github.com/Azure/azure-sdk-for-go/services/preview/eventhub/mgmt/2018-01-01-preview/eventhub" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventHubConsumerGroup() *schema.Resource { - return &schema.Resource{ - Create: resourceArmEventHubConsumerGroupCreateUpdate, - Read: resourceArmEventHubConsumerGroupRead, - Update: resourceArmEventHubConsumerGroupCreateUpdate, - Delete: resourceArmEventHubConsumerGroupDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, +type ConsumerGroupObject struct { + Name string `tfschema:"name"` + NamespaceName string `tfschema:"namespace_name"` + EventHubName string `tfschema:"eventhub_name"` + ResourceGroupName string `tfschema:"resource_group_name"` + UserMetadata string `tfschema:"user_metadata"` +} + +var _ sdk.Resource = ConsumerGroupResource{} +var _ sdk.ResourceWithUpdate = ConsumerGroupResource{} + +type ConsumerGroupResource struct { +} + +func (r ConsumerGroupResource) ResourceType() string { + return "azurerm_eventhub_consumer_group" +} + +func (r ConsumerGroupResource) Arguments() map[string]*schema.Schema { + return map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateEventHubConsumerName(), + }, + + "namespace_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateEventHubNamespaceName(), }, - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), + "eventhub_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateEventHubName(), }, - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateEventHubConsumerName(), - }, - - "namespace_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateEventHubNamespaceName(), - }, - - "eventhub_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateEventHubName(), - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "user_metadata": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringLenBetween(1, 1024), - }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "user_metadata": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringLenBetween(1, 1024), }, } } -func resourceArmEventHubConsumerGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Eventhub.ConsumerGroupClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - log.Printf("[INFO] preparing arguments for AzureRM EventHub Consumer Group creation.") +func (r ConsumerGroupResource) Attributes() map[string]*schema.Schema { + return map[string]*schema.Schema{} +} + +func (r ConsumerGroupResource) Create() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + metadata.Logger.Info("Decoding state..") + var state ConsumerGroupObject + if err := metadata.Decode(&state); err != nil { + return err + } - name := d.Get("name").(string) - namespaceName := d.Get("namespace_name").(string) - eventHubName := d.Get("eventhub_name").(string) - resGroup := d.Get("resource_group_name").(string) + metadata.Logger.Infof("creating Consumer Group %q..", state.Name) + client := metadata.Client.Eventhub.ConsumerGroupClient + subscriptionId := metadata.Client.Account.SubscriptionId - if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, namespaceName, eventHubName, name) - if err != nil { + id := parse.NewEventHubConsumerGroupID(subscriptionId, state.ResourceGroupName, state.NamespaceName, state.EventHubName, state.Name) + existing, err := client.Get(ctx, state.ResourceGroupName, state.NamespaceName, state.EventHubName, state.Name) + if err != nil && !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for the presence of an existing Consumer Group %q: %+v", state.Name, err) + } if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing EventHub Consumer Group %q (EventHub %q / Namespace %q / Resource Group %q): %s", name, eventHubName, namespaceName, resGroup, err) + return metadata.ResourceRequiresImport(r.ResourceType(), id) } - } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_eventhub_consumer_group", *existing.ID) - } - } + parameters := eventhub.ConsumerGroup{ + Name: utils.String(state.Name), + ConsumerGroupProperties: &eventhub.ConsumerGroupProperties{ + UserMetadata: utils.String(state.UserMetadata), + }, + } - userMetaData := d.Get("user_metadata").(string) + if _, err := client.CreateOrUpdate(ctx, state.ResourceGroupName, state.NamespaceName, state.EventHubName, state.Name, parameters); err != nil { + return fmt.Errorf("creating Consumer Group %q (EventHub %q / Namespace %q / Resource Group %q): %+v", state.Name, state.EventHubName, state.NamespaceName, state.ResourceGroupName, err) + } - parameters := eventhub.ConsumerGroup{ - Name: &name, - ConsumerGroupProperties: &eventhub.ConsumerGroupProperties{ - UserMetadata: &userMetaData, + metadata.SetID(id) + return nil }, + Timeout: 30 * time.Minute, } +} - if _, err := client.CreateOrUpdate(ctx, resGroup, namespaceName, eventHubName, name, parameters); err != nil { - return err - } - - read, err := client.Get(ctx, resGroup, namespaceName, eventHubName, name) - - if err != nil { - return err - } +func (r ConsumerGroupResource) Update() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + id, err := parse.EventHubConsumerGroupID(metadata.ResourceData.Id()) + if err != nil { + return err + } - if read.ID == nil { - return fmt.Errorf("Cannot read EventHub Consumer Group %s (resource group %s) ID", name, resGroup) - } + metadata.Logger.Info("Decoding state..") + var state ConsumerGroupObject + if err := metadata.Decode(&state); err != nil { + return err + } - d.SetId(*read.ID) + metadata.Logger.Infof("updating Consumer Group %q..", state.Name) + client := metadata.Client.Eventhub.ConsumerGroupClient - return resourceArmEventHubConsumerGroupRead(d, meta) -} + parameters := eventhub.ConsumerGroup{ + Name: utils.String(id.ConsumergroupName), + ConsumerGroupProperties: &eventhub.ConsumerGroupProperties{ + UserMetadata: utils.String(state.UserMetadata), + }, + } -func resourceArmEventHubConsumerGroupRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Eventhub.ConsumerGroupClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.NamespaceName, id.EventhubName, id.ConsumergroupName, parameters); err != nil { + return fmt.Errorf("updating Consumer Group %q (EventHub %q / Namespace %q / Resource Group %q): %+v", id.ConsumergroupName, id.EventhubName, id.NamespaceName, id.ResourceGroup, err) + } - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resGroup := id.ResourceGroup - namespaceName := id.Path["namespaces"] - eventHubName := id.Path["eventhubs"] - name := id.Path["consumergroups"] - - resp, err := client.Get(ctx, resGroup, namespaceName, eventHubName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") return nil - } - return fmt.Errorf("Error making Read request on Azure EventHub Consumer Group %s: %+v", name, err) + }, + Timeout: 30 * time.Minute, } +} - d.Set("name", name) - d.Set("eventhub_name", eventHubName) - d.Set("namespace_name", namespaceName) - d.Set("resource_group_name", resGroup) +func (r ConsumerGroupResource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.Eventhub.ConsumerGroupClient + id, err := parse.EventHubConsumerGroupID(metadata.ResourceData.Id()) + if err != nil { + return err + } - if resp.ConsumerGroupProperties != nil { - d.Set("user_metadata", resp.ConsumerGroupProperties.UserMetadata) - } + metadata.Logger.Infof("retrieving Consumer Group %q..", id.ConsumergroupName) + resp, err := client.Get(ctx, id.ResourceGroup, id.NamespaceName, id.EventhubName, id.ConsumergroupName) + if err != nil { + return fmt.Errorf("reading Consumer Group %q (EventHub %q / Namespace %q / Resource Group %q): %+v", id.ConsumergroupName, id.EventhubName, id.NamespaceName, id.ResourceGroup, err) + } - return nil -} + state := ConsumerGroupObject{ + Name: id.ConsumergroupName, + NamespaceName: id.NamespaceName, + EventHubName: id.EventhubName, + ResourceGroupName: id.ResourceGroup, + } -func resourceArmEventHubConsumerGroupDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Eventhub.ConsumerGroupClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() + if props := resp.ConsumerGroupProperties; props != nil { + state.UserMetadata = utils.NormalizeNilableString(props.UserMetadata) + } - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err + return metadata.Encode(&state) + }, + Timeout: 5 * time.Minute, } - resGroup := id.ResourceGroup - namespaceName := id.Path["namespaces"] - eventHubName := id.Path["eventhubs"] - name := id.Path["consumergroups"] +} + +func (r ConsumerGroupResource) Delete() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.Eventhub.ConsumerGroupClient + id, err := parse.EventHubConsumerGroupID(metadata.ResourceData.Id()) + if err != nil { + return err + } - resp, err := client.Delete(ctx, resGroup, namespaceName, eventHubName, name) + metadata.Logger.Infof("deleting Consumer Group %q..", id.ConsumergroupName) + if resp, err := client.Delete(ctx, id.ResourceGroup, id.NamespaceName, id.EventhubName, id.ConsumergroupName); err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("deleting Consumer Group %q (EventHub %q / Namespace %q / Resource Group %q): %+v", id.ConsumergroupName, id.EventhubName, id.NamespaceName, id.ResourceGroup, err) + } + } - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing Azure ARM delete request of EventHub Consumer Group '%s': %+v", name, err) - } + return nil + }, + Timeout: 30 * time.Minute, } +} + +func (r ConsumerGroupResource) ModelObject() interface{} { + return ConsumerGroupObject{} +} - return nil +func (r ConsumerGroupResource) IDValidationFunc() schema.SchemaValidateFunc { + return validate.EventHubConsumerGroupID } diff --git a/azurerm/internal/services/eventhub/eventhub_consumer_group_resource_test.go b/azurerm/internal/services/eventhub/eventhub_consumer_group_resource_test.go new file mode 100644 index 000000000000..5f2ad1ba2d5c --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_consumer_group_resource_test.go @@ -0,0 +1,187 @@ +package eventhub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventHubConsumerGroupResource struct { +} + +func TestAccEventHubConsumerGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") + r := EventHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubConsumerGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") + r := EventHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventhub_consumer_group"), + }, + }) +} + +func TestAccEventHubConsumerGroup_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") + r := EventHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubConsumerGroup_userMetadataUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") + r := EventHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("user_metadata").HasValue("some-meta-data"), + ), + }, + data.ImportStep(), + }) +} + +func (EventHubConsumerGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EventHubConsumerGroupID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Eventhub.ConsumerGroupClient.Get(ctx, id.ResourceGroup, id.NamespaceName, id.EventhubName, id.ConsumergroupName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.ConsumerGroupProperties != nil), nil +} + +func (EventHubConsumerGroupResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 7 +} + +resource "azurerm_eventhub_consumer_group" "test" { + name = "acctesteventhubcg-%d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (EventHubConsumerGroupResource) requiresImport(data acceptance.TestData) string { + template := EventHubConsumerGroupResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub_consumer_group" "import" { + name = azurerm_eventhub_consumer_group.test.name + namespace_name = azurerm_eventhub_consumer_group.test.namespace_name + eventhub_name = azurerm_eventhub_consumer_group.test.eventhub_name + resource_group_name = azurerm_eventhub_consumer_group.test.resource_group_name +} +`, template) +} + +func (EventHubConsumerGroupResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 7 +} + +resource "azurerm_eventhub_consumer_group" "test" { + name = "acctesteventhubcg-%d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name + user_metadata = "some-meta-data" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventhub/eventhub_data_source_test.go b/azurerm/internal/services/eventhub/eventhub_data_source_test.go new file mode 100644 index 000000000000..feba670fb53d --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_data_source_test.go @@ -0,0 +1,63 @@ +package eventhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventHubDataSource struct { +} + +func TestAccEventHubDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub", "test") + r := EventHubDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("partition_count").HasValue("2"), + check.That(data.ResourceName).Key("partition_ids.#").HasValue("2"), + ), + }, + }) +} + +func (EventHubDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctest-EHN-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctest-eh-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + partition_count = 2 + message_retention = 1 +} + +data "azurerm_eventhub" "test" { + name = azurerm_eventhub.test.name + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source.go b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source.go index 38990f1ae47b..49372ef20d05 100644 --- a/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source.go +++ b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceEventHubNamespaceAuthorizationRule() *schema.Resource { +func EventHubNamespaceDataSourceAuthorizationRule() *schema.Resource { return &schema.Resource{ - Read: dataSourceEventHubNamespaceAuthorizationRuleRead, + Read: EventHubNamespaceDataSourceAuthorizationRuleRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -88,7 +88,7 @@ func dataSourceEventHubNamespaceAuthorizationRule() *schema.Resource { } } -func dataSourceEventHubNamespaceAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { +func EventHubNamespaceDataSourceAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source_test.go b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source_test.go new file mode 100644 index 000000000000..1ed6e0c9505e --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_data_source_test.go @@ -0,0 +1,100 @@ +package eventhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventHubNamespaceAuthorizationRuleDataSource struct { +} + +func TestAccEventHubNamespaceAuthorizationRuleDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace_authorization_rule", "test") + r := EventHubNamespaceAuthorizationRuleDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("listen").Exists(), + check.That(data.ResourceName).Key("manage").Exists(), + check.That(data.ResourceName).Key("send").Exists(), + ), + }, + }) +} + +func TestAccEventHubNamespaceAuthorizationRuleDataSource_withAliasConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace_authorization_rule", "test") + r := EventHubNamespaceAuthorizationRuleDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + // `primary_connection_string_alias` and `secondary_connection_string_alias` are still `nil` while `data.azurerm_eventhub_namespace_authorization_rule` is retrieving resource. since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. + // So these two properties should be checked in the second run. + // And `depends_on` cannot be applied to `azurerm_eventhub_namespace_authorization_rule`. + // Because it would throw error message `BreakPairing operation is only allowed on primary namespace with valid secondary namespace.` while destroying `azurerm_eventhub_namespace_disaster_recovery_config` if `depends_on` is applied. + Config: r.withAliasConnectionString(data), + }, + { + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("primary_connection_string_alias").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string_alias").Exists(), + ), + }, + }) +} + +func (EventHubNamespaceAuthorizationRuleDataSource) basic(data acceptance.TestData, listen, send, manage bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctest-EHN-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_authorization_rule" "test" { + name = "acctest-EHN-AR%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = %[3]t + send = %[4]t + manage = %[5]t +} + +data "azurerm_eventhub_namespace_authorization_rule" "test" { + name = azurerm_eventhub_namespace_authorization_rule.test.name + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, listen, send, manage) +} + +func (EventHubNamespaceAuthorizationRuleDataSource) withAliasConnectionString(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_eventhub_namespace_authorization_rule" "test" { + name = azurerm_eventhub_namespace_authorization_rule.test.name + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, EventHubNamespaceAuthorizationRuleResource{}.withAliasConnectionString(data)) +} diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource.go b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource.go index a988b8ffa6f0..0fff73d196de 100644 --- a/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventHubNamespaceAuthorizationRule() *schema.Resource { +func resourceEventHubNamespaceAuthorizationRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventHubNamespaceAuthorizationRuleCreateUpdate, - Read: resourceArmEventHubNamespaceAuthorizationRuleRead, - Update: resourceArmEventHubNamespaceAuthorizationRuleCreateUpdate, - Delete: resourceArmEventHubNamespaceAuthorizationRuleDelete, + Create: resourceEventHubNamespaceAuthorizationRuleCreateUpdate, + Read: resourceEventHubNamespaceAuthorizationRuleRead, + Update: resourceEventHubNamespaceAuthorizationRuleCreateUpdate, + Delete: resourceEventHubNamespaceAuthorizationRuleDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -71,7 +71,7 @@ func resourceArmEventHubNamespaceAuthorizationRule() *schema.Resource { } } -func resourceArmEventHubNamespaceAuthorizationRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceAuthorizationRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -120,10 +120,10 @@ func resourceArmEventHubNamespaceAuthorizationRuleCreateUpdate(d *schema.Resourc d.SetId(*read.ID) - return resourceArmEventHubNamespaceAuthorizationRuleRead(d, meta) + return resourceEventHubNamespaceAuthorizationRuleRead(d, meta) } -func resourceArmEventHubNamespaceAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -133,16 +133,16 @@ func resourceArmEventHubNamespaceAuthorizationRuleRead(d *schema.ResourceData, m return err } - resp, err := client.GetAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.Name) + resp, err := client.GetAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.AuthorizationRuleName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("retrieving Authorization Rule %q (EventHub Namespace %q / Resource Group %q) : %+v", id.Name, id.NamespaceName, id.ResourceGroup, err) + return fmt.Errorf("retrieving Authorization Rule %q (EventHub Namespace %q / Resource Group %q) : %+v", id.AuthorizationRuleName, id.NamespaceName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.AuthorizationRuleName) d.Set("namespace_name", id.NamespaceName) d.Set("resource_group_name", id.ResourceGroup) @@ -153,9 +153,9 @@ func resourceArmEventHubNamespaceAuthorizationRuleRead(d *schema.ResourceData, m d.Set("send", send) } - keysResp, err := client.ListKeys(ctx, id.ResourceGroup, id.NamespaceName, id.Name) + keysResp, err := client.ListKeys(ctx, id.ResourceGroup, id.NamespaceName, id.AuthorizationRuleName) if err != nil { - return fmt.Errorf("retrieving Keys for Authorization Rule %q (EventHub Namespace %q / Resource Group %q): %+v", id.Name, id.NamespaceName, id.ResourceGroup, err) + return fmt.Errorf("retrieving Keys for Authorization Rule %q (EventHub Namespace %q / Resource Group %q): %+v", id.AuthorizationRuleName, id.NamespaceName, id.ResourceGroup, err) } d.Set("primary_key", keysResp.PrimaryKey) @@ -168,7 +168,7 @@ func resourceArmEventHubNamespaceAuthorizationRuleRead(d *schema.ResourceData, m return nil } -func resourceArmEventHubNamespaceAuthorizationRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceAuthorizationRuleDelete(d *schema.ResourceData, meta interface{}) error { eventhubClient := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -181,8 +181,8 @@ func resourceArmEventHubNamespaceAuthorizationRuleDelete(d *schema.ResourceData, locks.ByName(id.NamespaceName, eventHubNamespaceResourceName) defer locks.UnlockByName(id.NamespaceName, eventHubNamespaceResourceName) - if _, err := eventhubClient.DeleteAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.Name); err != nil { - return fmt.Errorf("deleting Authorization Rule %q (EventHub Namespace %q / Resource Group %q): %+v", id.Name, id.NamespaceName, id.ResourceGroup, err) + if _, err := eventhubClient.DeleteAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.AuthorizationRuleName); err != nil { + return fmt.Errorf("deleting Authorization Rule %q (EventHub Namespace %q / Resource Group %q): %+v", id.AuthorizationRuleName, id.NamespaceName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource_test.go b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource_test.go new file mode 100644 index 000000000000..3f3a5ea20ad3 --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_namespace_authorization_rule_resource_test.go @@ -0,0 +1,319 @@ +package eventhub_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventHubNamespaceAuthorizationRuleResource struct { +} + +func TestAccEventHubNamespaceAuthorizationRule_listen(t *testing.T) { + testAccEventHubNamespaceAuthorizationRule(t, true, false, false) +} + +func TestAccEventHubNamespaceAuthorizationRule_send(t *testing.T) { + testAccEventHubNamespaceAuthorizationRule(t, false, true, false) +} + +func TestAccEventHubNamespaceAuthorizationRule_listensend(t *testing.T) { + testAccEventHubNamespaceAuthorizationRule(t, true, true, false) +} + +func TestAccEventHubNamespaceAuthorizationRule_manage(t *testing.T) { + testAccEventHubNamespaceAuthorizationRule(t, true, true, true) +} + +func testAccEventHubNamespaceAuthorizationRule(t *testing.T, listen, send, manage bool) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") + r := EventHubNamespaceAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.base(data, listen, send, manage), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("namespace_name").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("listen").HasValue(strconv.FormatBool(listen)), + check.That(data.ResourceName).Key("send").HasValue(strconv.FormatBool(send)), + check.That(data.ResourceName).Key("manage").HasValue(strconv.FormatBool(manage)), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespaceAuthorizationRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") + r := EventHubNamespaceAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.base(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data, true, true, true), + ExpectError: acceptance.RequiresImportError("azurerm_eventhub_namespace_authorization_rule"), + }, + }) +} + +func TestAccEventHubNamespaceAuthorizationRule_rightsUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") + r := EventHubNamespaceAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.base(data, true, false, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("send").HasValue("false"), + check.That(data.ResourceName).Key("manage").HasValue("false"), + ), + }, + { + Config: r.base(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("namespace_name").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("manage").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespaceAuthorizationRule_withAliasConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") + r := EventHubNamespaceAuthorizationRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + // `primary_connection_string_alias` and `secondary_connection_string_alias` are still `nil` in `azurerm_eventhub_namespace_authorization_rule` after created `azurerm_eventhub_namespace` successfully since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. + // So these two properties should be checked in the second run. + // And `depends_on` cannot be applied to `azurerm_eventhub_namespace_authorization_rule`. + // Because it would throw error message `BreakPairing operation is only allowed on primary namespace with valid secondary namespace.` while destroying `azurerm_eventhub_namespace_disaster_recovery_config` if `depends_on` is applied. + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("primary_connection_string_alias").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string_alias").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespaceAuthorizationRule_multi(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test1") + r := EventHubNamespaceAuthorizationRuleResource{} + resourceTwoName := "azurerm_eventhub_namespace_authorization_rule.test2" + resourceThreeName := "azurerm_eventhub_namespace_authorization_rule.test3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multi(data, true, true, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(resourceTwoName).ExistsInAzure(r), + resource.TestCheckResourceAttr(resourceTwoName, "manage", "false"), + resource.TestCheckResourceAttr(resourceTwoName, "send", "true"), + resource.TestCheckResourceAttr(resourceTwoName, "listen", "true"), + resource.TestCheckResourceAttrSet(resourceTwoName, "primary_connection_string"), + resource.TestCheckResourceAttrSet(resourceTwoName, "secondary_connection_string"), + check.That(resourceThreeName).ExistsInAzure(r), + resource.TestCheckResourceAttr(resourceThreeName, "manage", "false"), + resource.TestCheckResourceAttr(resourceThreeName, "send", "true"), + resource.TestCheckResourceAttr(resourceThreeName, "listen", "true"), + resource.TestCheckResourceAttrSet(resourceThreeName, "primary_connection_string"), + resource.TestCheckResourceAttrSet(resourceThreeName, "secondary_connection_string"), + ), + }, + data.ImportStep(), + data.ImportStepFor(resourceTwoName), + data.ImportStepFor(resourceThreeName), + }) +} + +func (EventHubNamespaceAuthorizationRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NamespaceAuthorizationRuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Eventhub.NamespacesClient.GetAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.AuthorizationRuleName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.AuthorizationRuleProperties != nil), nil +} + +func (EventHubNamespaceAuthorizationRuleResource) base(data acceptance.TestData, listen, send, manage bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctest-EHN-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_authorization_rule" "test" { + name = "acctest-EHN-AR%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = %[3]t + send = %[4]t + manage = %[5]t +} +`, data.RandomInteger, data.Locations.Primary, listen, send, manage) +} + +func (EventHubNamespaceAuthorizationRuleResource) withAliasConnectionString(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-ehnar-%[1]d" + location = "%[2]s" +} + +resource "azurerm_resource_group" "test2" { + name = "acctestRG2-ehnar-%[1]d" + location = "%[3]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "test2" { + name = "acctesteventhubnamespace2-%[1]d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { + name = "acctest-EHN-DRC-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + partner_namespace_id = azurerm_eventhub_namespace.test2.id +} + +resource "azurerm_eventhub_namespace_authorization_rule" "test" { + name = "acctest-EHN-AR%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = true + send = true + manage = true +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} + +func (EventHubNamespaceAuthorizationRuleResource) requiresImport(data acceptance.TestData, listen, send, manage bool) string { + template := EventHubNamespaceAuthorizationRuleResource{}.base(data, listen, send, manage) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub_namespace_authorization_rule" "import" { + name = azurerm_eventhub_namespace_authorization_rule.test.name + namespace_name = azurerm_eventhub_namespace_authorization_rule.test.namespace_name + resource_group_name = azurerm_eventhub_namespace_authorization_rule.test.resource_group_name + listen = azurerm_eventhub_namespace_authorization_rule.test.listen + send = azurerm_eventhub_namespace_authorization_rule.test.send + manage = azurerm_eventhub_namespace_authorization_rule.test.manage +} +`, template) +} + +func (EventHubNamespaceAuthorizationRuleResource) multi(data acceptance.TestData, listen, send, manage bool) string { + template := EventHubNamespaceAuthorizationRuleResource{}.base(data, listen, send, manage) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub_namespace_authorization_rule" "test1" { + name = "acctestruleone-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + send = true + listen = true + manage = false +} + +resource "azurerm_eventhub_namespace_authorization_rule" "test2" { + name = "acctestruletwo-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + send = true + listen = true + manage = false +} + +resource "azurerm_eventhub_namespace_authorization_rule" "test3" { + name = "acctestrulethree-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + + send = true + listen = true + manage = false +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_data_source.go b/azurerm/internal/services/eventhub/eventhub_namespace_data_source.go index b383494ff9a7..8dee650bc277 100644 --- a/azurerm/internal/services/eventhub/eventhub_namespace_data_source.go +++ b/azurerm/internal/services/eventhub/eventhub_namespace_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceEventHubNamespace() *schema.Resource { +func EventHubNamespaceDataSource() *schema.Resource { return &schema.Resource{ - Read: dataSourceEventHubNamespaceRead, + Read: EventHubNamespaceDataSourceRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -107,7 +107,7 @@ func dataSourceEventHubNamespace() *schema.Resource { } } -func dataSourceEventHubNamespaceRead(d *schema.ResourceData, meta interface{}) error { +func EventHubNamespaceDataSourceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_data_source_test.go b/azurerm/internal/services/eventhub/eventhub_namespace_data_source_test.go new file mode 100644 index 000000000000..efd8cb0918f4 --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_namespace_data_source_test.go @@ -0,0 +1,128 @@ +package eventhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type EventHubNamespaceDataSource struct { +} + +func TestAccEventHubNamespaceDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace", "test") + r := EventHubNamespaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("Basic"), + ), + }, + }) +} + +func TestAccEventHubNamespaceDataSource_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace", "test") + r := EventHubNamespaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("Standard"), + check.That(data.ResourceName).Key("capacity").HasValue("2"), + check.That(data.ResourceName).Key("auto_inflate_enabled").HasValue("true"), + check.That(data.ResourceName).Key("maximum_throughput_units").HasValue("20"), + ), + }, + }) +} + +func TestAccEventHubNamespaceDataSource_withAliasConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace", "test") + r := EventHubNamespaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + // `default_primary_connection_string_alias` and `default_secondary_connection_string_alias` are still `nil` while `data.azurerm_eventhub_namespace` is retrieving resource. since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. + // So these two properties should be checked in the second run. + Config: r.withAliasConnectionString(data), + }, + { + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("default_primary_connection_string_alias").Exists(), + check.That(data.ResourceName).Key("default_secondary_connection_string_alias").Exists(), + ), + }, + }) +} + +func (EventHubNamespaceDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +data "azurerm_eventhub_namespace" "test" { + name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_eventhub_namespace.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + auto_inflate_enabled = true + maximum_throughput_units = 20 +} + +data "azurerm_eventhub_namespace" "test" { + name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_eventhub_namespace.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceDataSource) withAliasConnectionString(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_eventhub_namespace" "test" { + name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_eventhub_namespace.test.resource_group_name +} +`, EventHubNamespaceResource{}.withAliasConnectionString(data)) +} diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource.go b/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource.go index 62d6d5d338c9..773a34219411 100644 --- a/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource.go @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmEventHubNamespaceDisasterRecoveryConfig() *schema.Resource { +func resourceEventHubNamespaceDisasterRecoveryConfig() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventHubNamespaceDisasterRecoveryConfigCreate, - Read: resourceArmEventHubNamespaceDisasterRecoveryConfigRead, - Update: resourceArmEventHubNamespaceDisasterRecoveryConfigUpdate, - Delete: resourceArmEventHubNamespaceDisasterRecoveryConfigDelete, + Create: resourceEventHubNamespaceDisasterRecoveryConfigCreate, + Read: resourceEventHubNamespaceDisasterRecoveryConfigRead, + Update: resourceEventHubNamespaceDisasterRecoveryConfigUpdate, + Delete: resourceEventHubNamespaceDisasterRecoveryConfigDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -68,7 +68,7 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfig() *schema.Resource { } } -func resourceArmEventHubNamespaceDisasterRecoveryConfigCreate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceDisasterRecoveryConfigCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.DisasterRecoveryConfigsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -106,7 +106,7 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigCreate(d *schema.Resource return fmt.Errorf("Error creating/updating EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } - if err := resourceArmEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutCreate)); err != nil { + if err := resourceEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutCreate)); err != nil { return fmt.Errorf("Error waiting for replication to complete for EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } @@ -121,10 +121,10 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigCreate(d *schema.Resource d.SetId(*read.ID) - return resourceArmEventHubNamespaceDisasterRecoveryConfigRead(d, meta) + return resourceEventHubNamespaceDisasterRecoveryConfigRead(d, meta) } -func resourceArmEventHubNamespaceDisasterRecoveryConfigUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceDisasterRecoveryConfigUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.DisasterRecoveryConfigsClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -145,7 +145,7 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigUpdate(d *schema.Resource return fmt.Errorf("Error issuing break pairing request for EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } - if err := resourceArmEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutUpdate)); err != nil { + if err := resourceEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutUpdate)); err != nil { return fmt.Errorf("Error waiting for break pairing request to complete for EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } } @@ -164,14 +164,14 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigUpdate(d *schema.Resource return fmt.Errorf("Error creating/updating EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } - if err := resourceArmEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutUpdate)); err != nil { + if err := resourceEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutUpdate)); err != nil { return fmt.Errorf("Error waiting for replication to complete for EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } - return resourceArmEventHubNamespaceDisasterRecoveryConfigRead(d, meta) + return resourceEventHubNamespaceDisasterRecoveryConfigRead(d, meta) } -func resourceArmEventHubNamespaceDisasterRecoveryConfigRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceDisasterRecoveryConfigRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.DisasterRecoveryConfigsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -206,7 +206,7 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigRead(d *schema.ResourceDa return nil } -func resourceArmEventHubNamespaceDisasterRecoveryConfigDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceDisasterRecoveryConfigDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.DisasterRecoveryConfigsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -228,7 +228,7 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigDelete(d *schema.Resource return fmt.Errorf("Error breaking pairing for EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } - if err := resourceArmEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutDelete)); err != nil { + if err := resourceEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx, client, resourceGroup, namespaceName, name, d.Timeout(schema.TimeoutDelete)); err != nil { return fmt.Errorf("Error waiting for break pairing request to complete for EventHub Namespace Disaster Recovery Configs %q (Namespace %q / Resource Group %q): %s", name, namespaceName, resourceGroup, err) } @@ -244,7 +244,6 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigDelete(d *schema.Resource Timeout: d.Timeout(schema.TimeoutDelete), Refresh: func() (interface{}, string, error) { resp, err := client.Get(ctx, resourceGroup, namespaceName, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return resp, strconv.Itoa(resp.StatusCode), nil @@ -284,7 +283,7 @@ func resourceArmEventHubNamespaceDisasterRecoveryConfigDelete(d *schema.Resource return nil } -func resourceArmEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx context.Context, client *eventhub.DisasterRecoveryConfigsClient, resourceGroup, namespaceName, name string, timeout time.Duration) error { +func resourceEventHubNamespaceDisasterRecoveryConfigWaitForState(ctx context.Context, client *eventhub.DisasterRecoveryConfigsClient, resourceGroup, namespaceName, name string, timeout time.Duration) error { stateConf := &resource.StateChangeConf{ Pending: []string{string(eventhub.Accepted)}, Target: []string{string(eventhub.Succeeded)}, diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource_test.go b/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource_test.go new file mode 100644 index 000000000000..d55469d12fbf --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_namespace_disaster_recovery_config_resource_test.go @@ -0,0 +1,240 @@ +package eventhub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventHubNamespaceDisasterRecoveryConfigResource struct { +} + +func TestAccEventHubNamespaceDisasterRecoveryConfig_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_disaster_recovery_config", "test") + r := EventHubNamespaceDisasterRecoveryConfigResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespaceDisasterRecoveryConfig_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_disaster_recovery_config", "test") + + // skipping due to there being no way to delete a DRC once an alternate name has been set + // sdk bug: https://github.com/Azure/azure-sdk-for-go/issues/5893 + t.Skip() + + r := EventHubNamespaceDisasterRecoveryConfigResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespaceDisasterRecoveryConfig_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_disaster_recovery_config", "test") + r := EventHubNamespaceDisasterRecoveryConfigResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updated_removed(data), + }, + }) +} + +func (EventHubNamespaceDisasterRecoveryConfigResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + name := id.Path["disasterRecoveryConfigs"] + resourceGroup := id.ResourceGroup + namespaceName := id.Path["namespaces"] + + resp, err := clients.Eventhub.DisasterRecoveryConfigsClient.Get(ctx, resourceGroup, namespaceName, name) + if err != nil { + return nil, fmt.Errorf("retrieving EventHub Namespace Disaster Recovery Configs %q (namespace %q / resource group: %q): %v", name, namespaceName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ArmDisasterRecoveryProperties != nil), nil +} + +func (EventHubNamespaceDisasterRecoveryConfigResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "testa" { + name = "acctest-EHN-%[1]d-a" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "testb" { + name = "acctest-EHN-%[1]d-b" + location = "%[3]s" + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { + name = "acctest-EHN-DRC-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.testa.name + partner_namespace_id = azurerm_eventhub_namespace.testb.id +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} + +// nolint unused - mistakenly marked as unused +func (EventHubNamespaceDisasterRecoveryConfigResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "testa" { + name = "acctest-EHN-%[1]d-a" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "testb" { + name = "acctest-EHN-%[1]d-b" + location = "%[3]s" + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { + name = "${azurerm_eventhub_namespace.testa.name}-111" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.testa.name + partner_namespace_id = azurerm_eventhub_namespace.testb.id + alternate_name = "acctest-EHN-DRC-%[1]d-alt" +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} + +func (EventHubNamespaceDisasterRecoveryConfigResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "testa" { + name = "acctest-EHN-%[1]d-a" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "testb" { + name = "acctest-EHN-%[1]d-b" + location = "%[3]s" + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "testc" { + name = "acctest-EHN-%[1]d-c" + location = "%[3]s" + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { + name = "acctest-EHN-DRC-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.testa.name + partner_namespace_id = azurerm_eventhub_namespace.testc.id +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} + +func (EventHubNamespaceDisasterRecoveryConfigResource) updated_removed(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "testa" { + name = "acctest-EHN-%[1]d-a" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "testb" { + name = "acctest-EHN-%[1]d-b" + location = "%[3]s" + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "testc" { + name = "acctest-EHN-%[1]d-c" + location = "%[3]s" + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_resource.go b/azurerm/internal/services/eventhub/eventhub_namespace_resource.go index 14137c115e4f..9dd09c54bb1d 100644 --- a/azurerm/internal/services/eventhub/eventhub_namespace_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_namespace_resource.go @@ -26,15 +26,17 @@ import ( // Default Authorization Rule/Policy created by Azure, used to populate the // default connection strings and keys -var eventHubNamespaceDefaultAuthorizationRule = "RootManageSharedAccessKey" -var eventHubNamespaceResourceName = "azurerm_eventhub_namespace" +var ( + eventHubNamespaceDefaultAuthorizationRule = "RootManageSharedAccessKey" + eventHubNamespaceResourceName = "azurerm_eventhub_namespace" +) -func resourceArmEventHubNamespace() *schema.Resource { +func resourceEventHubNamespace() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventHubNamespaceCreateUpdate, - Read: resourceArmEventHubNamespaceRead, - Update: resourceArmEventHubNamespaceCreateUpdate, - Delete: resourceArmEventHubNamespaceDelete, + Create: resourceEventHubNamespaceCreateUpdate, + Read: resourceEventHubNamespaceRead, + Update: resourceEventHubNamespaceCreateUpdate, + Delete: resourceEventHubNamespaceDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.NamespaceID(id) @@ -94,7 +96,7 @@ func resourceArmEventHubNamespace() *schema.Resource { Type: schema.TypeString, Optional: true, ForceNew: true, - ValidateFunc: validate.ValidateEventHubDedicatedClusterID, + ValidateFunc: validate.ClusterID, }, "identity": { @@ -244,7 +246,7 @@ func resourceArmEventHubNamespace() *schema.Resource { } } -func resourceArmEventHubNamespaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -337,10 +339,10 @@ func resourceArmEventHubNamespaceCreateUpdate(d *schema.ResourceData, meta inter } } - return resourceArmEventHubNamespaceRead(d, meta) + return resourceEventHubNamespaceRead(d, meta) } -func resourceArmEventHubNamespaceRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -405,7 +407,7 @@ func resourceArmEventHubNamespaceRead(d *schema.ResourceData, meta interface{}) return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmEventHubNamespaceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubNamespaceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.NamespacesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/eventhub/eventhub_namespace_resource_test.go b/azurerm/internal/services/eventhub/eventhub_namespace_resource_test.go new file mode 100644 index 000000000000..dc6b55eb0cd6 --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_namespace_resource_test.go @@ -0,0 +1,902 @@ +package eventhub_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type EventHubNamespaceResource struct { +} + +func TestAccEventHubNamespace_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_basicWithIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithIdentity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_basicUpdateIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basicWithIdentity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventhub_namespace"), + }, + }) +} + +func TestAccEventHubNamespace_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_standardWithIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standardWithIdentity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_standardUpdateIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.standardWithIdentity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_networkrule_iprule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.networkrule_iprule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_networkrule_vnet(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.networkrule_vnet(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_networkruleVnetIpRule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.networkruleVnetIpRule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("network_rulesets.0.virtual_network_rule.#").HasValue("2"), + check.That(data.ResourceName).Key("network_rulesets.0.ip_rule.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_readDefaultKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "default_primary_connection_string", regexp.MustCompile("Endpoint=.+")), + resource.TestMatchResourceAttr(data.ResourceName, "default_secondary_connection_string", regexp.MustCompile("Endpoint=.+")), + resource.TestMatchResourceAttr(data.ResourceName, "default_primary_key", regexp.MustCompile(".+")), + resource.TestMatchResourceAttr(data.ResourceName, "default_secondary_key", regexp.MustCompile(".+")), + ), + }, + }) +} + +func TestAccEventHubNamespace_withAliasConnectionString(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + // `default_primary_connection_string_alias` and `default_secondary_connection_string_alias` are still `nil` in `azurerm_eventhub_namespace` after created `azurerm_eventhub_namespace` successfully since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. + // So these two properties should be checked in the second run. + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.withAliasConnectionString(data), + Check: resource.ComposeTestCheckFunc( + resource.TestMatchResourceAttr(data.ResourceName, "default_primary_connection_string_alias", regexp.MustCompile("Endpoint=.+")), + resource.TestMatchResourceAttr(data.ResourceName, "default_secondary_connection_string_alias", regexp.MustCompile("Endpoint=.+")), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_maximumThroughputUnits(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.maximumThroughputUnits(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_zoneRedundant(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.zoneRedundant(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_dedicatedClusterID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.dedicatedClusterID(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHubNamespace_NonStandardCasing(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nonStandardCasing(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.nonStandardCasing(data), + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + }) +} + +func TestAccEventHubNamespace_BasicWithTagsUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.basicWithTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + }) +} + +func TestAccEventHubNamespace_BasicWithCapacity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.capacity(data, 20), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("capacity").HasValue("20"), + ), + }, + }) +} + +func TestAccEventHubNamespace_BasicWithCapacityUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.capacity(data, 20), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("capacity").HasValue("20"), + ), + }, + { + Config: r.capacity(data, 2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("capacity").HasValue("2"), + ), + }, + }) +} + +func TestAccEventHubNamespace_BasicWithSkuUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("Basic"), + ), + }, + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("Standard"), + check.That(data.ResourceName).Key("capacity").HasValue("2"), + ), + }, + }) +} + +func TestAccEventHubNamespace_maximumThroughputUnitsUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.maximumThroughputUnits(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("Standard"), + check.That(data.ResourceName).Key("capacity").HasValue("2"), + check.That(data.ResourceName).Key("maximum_throughput_units").HasValue("20"), + ), + }, + { + Config: r.maximumThroughputUnitsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("Standard"), + check.That(data.ResourceName).Key("capacity").HasValue("1"), + check.That(data.ResourceName).Key("maximum_throughput_units").HasValue("1"), + ), + }, + }) +} + +func TestAccEventHubNamespace_autoInfalteDisabledWithAutoInflateUnits(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") + r := EventHubNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autoInfalteDisabledWithAutoInflateUnits(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func (EventHubNamespaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NamespaceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Eventhub.NamespacesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.EHNamespaceProperties != nil), nil +} + +func (EventHubNamespaceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) basicWithIdentity(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) withAliasConnectionString(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-ehn-%[1]d" + location = "%[2]s" +} + +resource "azurerm_resource_group" "test2" { + name = "acctestRG2-ehn-%[1]d" + location = "%[3]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku = "Standard" +} + +resource "azurerm_eventhub_namespace" "test2" { + name = "acctesteventhubnamespace2-%[1]d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + + sku = "Standard" +} + +resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { + name = "acctest-EHN-DRC-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + partner_namespace_id = azurerm_eventhub_namespace.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} + +func (EventHubNamespaceResource) requiresImport(data acceptance.TestData) string { + template := EventHubNamespaceResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub_namespace" "import" { + name = azurerm_eventhub_namespace.test.name + location = azurerm_eventhub_namespace.test.location + resource_group_name = azurerm_eventhub_namespace.test.resource_group_name + sku = azurerm_eventhub_namespace.test.sku +} +`, template) +} + +func (EventHubNamespaceResource) standard(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) standardWithIdentity(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) networkrule_iprule(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eh-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + + network_rulesets { + default_action = "Deny" + ip_rule { + ip_mask = "10.0.0.0/16" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) networkrule_vnet(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctvn-%[1]d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctsub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + + network_rulesets { + default_action = "Deny" + virtual_network_rule { + subnet_id = azurerm_subnet.test.id + + ignore_missing_virtual_network_service_endpoint = true + } + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (EventHubNamespaceResource) networkruleVnetIpRule(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctvn1-%[1]d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctsub1-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" + service_endpoints = ["Microsoft.EventHub"] +} + +resource "azurerm_virtual_network" "test2" { + name = "acctvn2-%[1]d" + address_space = ["10.1.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test2" { + name = "acctsub2-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test2.name + address_prefix = "10.1.1.0/24" + service_endpoints = ["Microsoft.EventHub"] +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + + network_rulesets { + default_action = "Deny" + + virtual_network_rule { + subnet_id = azurerm_subnet.test.id + } + + virtual_network_rule { + subnet_id = azurerm_subnet.test2.id + } + + ip_rule { + ip_mask = "10.0.1.0/24" + } + + ip_rule { + ip_mask = "10.1.1.0/24" + } + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (EventHubNamespaceResource) nonStandardCasing(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "basic" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) maximumThroughputUnits(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + auto_inflate_enabled = true + maximum_throughput_units = 20 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) zoneRedundant(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + zone_redundant = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) dedicatedClusterID(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_cluster" "test" { + name = "acctesteventhubcluster-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku_name = "Dedicated_1" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = "2" + dedicated_cluster_id = azurerm_eventhub_cluster.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (EventHubNamespaceResource) basicWithTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) capacity(data acceptance.TestData, capacity int) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" + capacity = %d +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, capacity) +} + +func (EventHubNamespaceResource) maximumThroughputUnitsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = 1 + auto_inflate_enabled = true + maximum_throughput_units = 1 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (EventHubNamespaceResource) autoInfalteDisabledWithAutoInflateUnits(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = 1 + auto_inflate_enabled = false + maximum_throughput_units = 0 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventhub/eventhub_resource.go b/azurerm/internal/services/eventhub/eventhub_resource.go index 6543a22bd7a9..6a42ea243cd6 100644 --- a/azurerm/internal/services/eventhub/eventhub_resource.go +++ b/azurerm/internal/services/eventhub/eventhub_resource.go @@ -3,9 +3,10 @@ package eventhub import ( "fmt" "log" - "strings" "time" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/validate" + "github.com/Azure/azure-sdk-for-go/services/preview/eventhub/mgmt/2018-01-01-preview/eventhub" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -19,12 +20,12 @@ import ( var eventHubResourceName = "azurerm_eventhub" -func resourceArmEventHub() *schema.Resource { +func resourceEventHub() *schema.Resource { return &schema.Resource{ - Create: resourceArmEventHubCreateUpdate, - Read: resourceArmEventHubRead, - Update: resourceArmEventHubCreateUpdate, - Delete: resourceArmEventHubDelete, + Create: resourceEventHubCreateUpdate, + Read: resourceEventHubRead, + Update: resourceEventHubCreateUpdate, + Delete: resourceEventHubDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -57,13 +58,13 @@ func resourceArmEventHub() *schema.Resource { Type: schema.TypeInt, Required: true, ForceNew: true, - ValidateFunc: ValidateEventHubPartitionCount, + ValidateFunc: validate.ValidateEventHubPartitionCount, }, "message_retention": { Type: schema.TypeInt, Required: true, - ValidateFunc: ValidateEventHubMessageRetentionCount, + ValidateFunc: validate.ValidateEventHubMessageRetentionCount, }, "capture_description": { @@ -121,7 +122,7 @@ func resourceArmEventHub() *schema.Resource { "archive_name_format": { Type: schema.TypeString, Required: true, - ValidateFunc: ValidateEventHubArchiveNameFormat, + ValidateFunc: validate.ValidateEventHubArchiveNameFormat, }, "blob_container_name": { Type: schema.TypeString, @@ -149,7 +150,7 @@ func resourceArmEventHub() *schema.Resource { } } -func resourceArmEventHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -201,10 +202,10 @@ func resourceArmEventHubCreateUpdate(d *schema.ResourceData, meta interface{}) e d.SetId(*read.ID) - return resourceArmEventHubRead(d, meta) + return resourceEventHubRead(d, meta) } -func resourceArmEventHubRead(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -244,7 +245,7 @@ func resourceArmEventHubRead(d *schema.ResourceData, meta interface{}) error { return nil } -func resourceArmEventHubDelete(d *schema.ResourceData, meta interface{}) error { +func resourceEventHubDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Eventhub.EventHubsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -257,7 +258,6 @@ func resourceArmEventHubDelete(d *schema.ResourceData, meta interface{}) error { namespaceName := id.Path["namespaces"] name := id.Path["eventhubs"] resp, err := client.Delete(ctx, resourceGroup, namespaceName, name) - if err != nil { if utils.ResponseWasNotFound(resp) { return nil @@ -269,50 +269,6 @@ func resourceArmEventHubDelete(d *schema.ResourceData, meta interface{}) error { return nil } -func ValidateEventHubPartitionCount(v interface{}, _ string) (warnings []string, errors []error) { - value := v.(int) - - if !(1024 >= value && value >= 1) { - errors = append(errors, fmt.Errorf("EventHub Partition Count has to be between 1 and 32 or between 1 and 1024 if using a dedicated Event Hubs Cluster")) - } - - return warnings, errors -} - -func ValidateEventHubMessageRetentionCount(v interface{}, _ string) (warnings []string, errors []error) { - value := v.(int) - - if !(90 >= value && value >= 1) { - errors = append(errors, fmt.Errorf("EventHub Retention Count has to be between 1 and 7 or between 1 and 90 if using a dedicated Event Hubs Cluster")) - } - - return warnings, errors -} - -func ValidateEventHubArchiveNameFormat(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - requiredComponents := []string{ - "{Namespace}", - "{EventHub}", - "{PartitionId}", - "{Year}", - "{Month}", - "{Day}", - "{Hour}", - "{Minute}", - "{Second}", - } - - for _, component := range requiredComponents { - if !strings.Contains(value, component) { - errors = append(errors, fmt.Errorf("%s needs to contain %q", k, component)) - } - } - - return warnings, errors -} - func expandEventHubCaptureDescription(d *schema.ResourceData) *eventhub.CaptureDescription { inputs := d.Get("capture_description").([]interface{}) input := inputs[0].(map[string]interface{}) diff --git a/azurerm/internal/services/eventhub/eventhub_resource_test.go b/azurerm/internal/services/eventhub/eventhub_resource_test.go new file mode 100644 index 000000000000..514eb34fe40f --- /dev/null +++ b/azurerm/internal/services/eventhub/eventhub_resource_test.go @@ -0,0 +1,526 @@ +package eventhub_test + +import ( + "context" + "fmt" + "strconv" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type EventHubResource struct { +} + +func TestAccEventHubPartitionCount_validation(t *testing.T) { + cases := []struct { + Value int + ErrCount int + }{ + { + Value: 0, + ErrCount: 1, + }, + { + Value: 1, + ErrCount: 0, + }, + { + Value: 2, + ErrCount: 0, + }, + { + Value: 3, + ErrCount: 0, + }, + { + Value: 21, + ErrCount: 0, + }, + { + Value: 1024, + ErrCount: 0, + }, + { + Value: 1025, + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := validate.ValidateEventHubPartitionCount(tc.Value, "azurerm_eventhub") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Azure RM EventHub Partition Count to trigger a validation error") + } + } +} + +func TestAccEventHubMessageRetentionCount_validation(t *testing.T) { + cases := []struct { + Value int + ErrCount int + }{ + { + Value: 0, + ErrCount: 1, + }, { + Value: 1, + ErrCount: 0, + }, { + Value: 2, + ErrCount: 0, + }, { + Value: 3, + ErrCount: 0, + }, { + Value: 4, + ErrCount: 0, + }, { + Value: 5, + ErrCount: 0, + }, { + Value: 6, + ErrCount: 0, + }, { + Value: 90, + ErrCount: 0, + }, { + Value: 91, + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := validate.ValidateEventHubMessageRetentionCount(tc.Value, "azurerm_eventhub") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Azure RM EventHub Message Retention Count to trigger a validation error") + } + } +} + +func TestAccEventHubArchiveNameFormat_validation(t *testing.T) { + cases := []struct { + Value string + ErrCount int + }{ + { + Value: "", + ErrCount: 9, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 0, + }, + { + Value: "Prod_{Eventub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Month}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}/{Day}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Hour}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Minute}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Second}", + ErrCount: 1, + }, + { + Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}", + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := validate.ValidateEventHubArchiveNameFormat(tc.Value, "azurerm_eventhub") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected %q to trigger a validation error", tc.Value) + } + } +} + +func TestAccEventHub_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, 2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHub_basicOnePartition(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, 1), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("partition_count").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHub_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, 2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_eventhub"), + }, + }) +} + +func TestAccEventHub_partitionCountUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, 2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("partition_count").HasValue("2"), + ), + }, + { + Config: r.partitionCountUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("partition_count").HasValue("10"), + ), + }, + }) +} + +func TestAccEventHub_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHub_captureDescription(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.captureDescription(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("capture_description.0.enabled").HasValue("true"), + check.That(data.ResourceName).Key("capture_description.0.skip_empty_archives").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccEventHub_captureDescriptionDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.captureDescription(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("capture_description.0.enabled").HasValue("true"), + ), + }, + { + Config: r.captureDescription(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("capture_description.0.enabled").HasValue("false"), + ), + }, + }) +} + +func TestAccEventHub_messageRetentionUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") + r := EventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("message_retention").HasValue("7"), + ), + }, + { + Config: r.messageRetentionUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("message_retention").HasValue("5"), + ), + }, + }) +} + +func (EventHubResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + namespaceName := id.Path["namespaces"] + name := id.Path["eventhubs"] + + resp, err := clients.Eventhub.EventHubsClient.Get(ctx, id.ResourceGroup, namespaceName, name) + if err != nil { + return nil, fmt.Errorf("retrieving Event Hub %q (namespace %q / resource group: %q): %v", name, namespaceName, id.ResourceGroup, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (EventHubResource) basic(data acceptance.TestData, partitionCount int) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = %d + message_retention = 1 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, partitionCount) +} + +func (EventHubResource) requiresImport(data acceptance.TestData) string { + template := EventHubResource{}.basic(data, 2) + return fmt.Sprintf(` +%s + +resource "azurerm_eventhub" "import" { + name = azurerm_eventhub.test.name + namespace_name = azurerm_eventhub.test.namespace_name + resource_group_name = azurerm_eventhub.test.resource_group_name + partition_count = azurerm_eventhub.test.partition_count + message_retention = azurerm_eventhub.test.message_retention +} +`, template) +} + +func (EventHubResource) partitionCountUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 10 + message_retention = 1 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (EventHubResource) standard(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctest-EHN-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctest-EH-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 7 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (EventHubResource) captureDescription(data acceptance.TestData, enabled bool) string { + enabledString := strconv.FormatBool(enabled) + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctest-EHN%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctest-EH%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 7 + + capture_description { + enabled = %s + encoding = "Avro" + interval_in_seconds = 60 + size_limit_in_bytes = 10485760 + skip_empty_archives = true + + destination { + name = "EventHubArchive.AzureBlockBlob" + archive_name_format = "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}" + blob_container_name = azurerm_storage_container.test.name + storage_account_id = azurerm_storage_account.test.id + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, enabledString) +} + +func (EventHubResource) messageRetentionUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-eventhub-%d" + location = "%s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctest-EHN-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctest-EH-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 5 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/eventhub/parse/cluster.go b/azurerm/internal/services/eventhub/parse/cluster.go index 26d20d4aa44d..60f9513056bc 100644 --- a/azurerm/internal/services/eventhub/parse/cluster.go +++ b/azurerm/internal/services/eventhub/parse/cluster.go @@ -1,23 +1,63 @@ package parse -import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type ClusterId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewClusterID(subscriptionId, resourceGroup, name string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ClusterId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) +} + +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventHub/clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } +// ClusterID parses a Cluster ID into an ClusterId struct func ClusterID(input string) (*ClusterId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { return nil, err } - cluster := ClusterId{ - ResourceGroup: id.ResourceGroup, + resourceId := ClusterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if cluster.Name, err = id.PopSegment("clusters"); err != nil { + if resourceId.Name, err = id.PopSegment("clusters"); err != nil { return nil, err } @@ -25,5 +65,5 @@ func ClusterID(input string) (*ClusterId, error) { return nil, err } - return &cluster, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/eventhub/parse/cluster_test.go b/azurerm/internal/services/eventhub/parse/cluster_test.go index 345e20f0426e..4c3bfb87fea2 100644 --- a/azurerm/internal/services/eventhub/parse/cluster_test.go +++ b/azurerm/internal/services/eventhub/parse/cluster_test.go @@ -1,59 +1,91 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = ClusterId{} + +func TestClusterIDFormatter(t *testing.T) { + actual := NewClusterID("12345678-1234-9876-4563-123456789012", "group1", "cluster1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/clusters/cluster1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestClusterID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *ClusterId + Input string + Error bool + Expected *ClusterId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", Error: true, }, + { - Name: "Missing Clusters Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/clusters/", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/clusters/", Error: true, }, + { - Name: "Clusters ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/clusters/cluster1", - Error: false, - Expect: &ClusterId{ - ResourceGroup: "group1", - Name: "cluster1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/clusters/cluster1", + Expected: &ClusterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "cluster1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/Clusters/cluster1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/CLUSTERS/CLUSTER1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := ClusterID(v.Input) if err != nil { @@ -61,15 +93,20 @@ func TestClusterID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/eventhub/parse/event_hub_consumer_group.go b/azurerm/internal/services/eventhub/parse/event_hub_consumer_group.go new file mode 100644 index 000000000000..caff057a3922 --- /dev/null +++ b/azurerm/internal/services/eventhub/parse/event_hub_consumer_group.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type EventHubConsumerGroupId struct { + SubscriptionId string + ResourceGroup string + NamespaceName string + EventhubName string + ConsumergroupName string +} + +func NewEventHubConsumerGroupID(subscriptionId, resourceGroup, namespaceName, eventhubName, consumergroupName string) EventHubConsumerGroupId { + return EventHubConsumerGroupId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NamespaceName: namespaceName, + EventhubName: eventhubName, + ConsumergroupName: consumergroupName, + } +} + +func (id EventHubConsumerGroupId) String() string { + segments := []string{ + fmt.Sprintf("Consumergroup Name %q", id.ConsumergroupName), + fmt.Sprintf("Eventhub Name %q", id.EventhubName), + fmt.Sprintf("Namespace Name %q", id.NamespaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Event Hub Consumer Group", segmentsStr) +} + +func (id EventHubConsumerGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventHub/namespaces/%s/eventhubs/%s/consumergroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NamespaceName, id.EventhubName, id.ConsumergroupName) +} + +// EventHubConsumerGroupID parses a EventHubConsumerGroup ID into an EventHubConsumerGroupId struct +func EventHubConsumerGroupID(input string) (*EventHubConsumerGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := EventHubConsumerGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NamespaceName, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + if resourceId.EventhubName, err = id.PopSegment("eventhubs"); err != nil { + return nil, err + } + if resourceId.ConsumergroupName, err = id.PopSegment("consumergroups"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/eventhub/parse/event_hub_consumer_group_test.go b/azurerm/internal/services/eventhub/parse/event_hub_consumer_group_test.go new file mode 100644 index 000000000000..a49dcafd7617 --- /dev/null +++ b/azurerm/internal/services/eventhub/parse/event_hub_consumer_group_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = EventHubConsumerGroupId{} + +func TestEventHubConsumerGroupIDFormatter(t *testing.T) { + actual := NewEventHubConsumerGroupID("12345678-1234-9876-4563-123456789012", "group1", "namespace1", "eventhub1", "consumergroup1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/consumergroups/consumergroup1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestEventHubConsumerGroupID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *EventHubConsumerGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", + Error: true, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", + Error: true, + }, + + { + // missing EventhubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/", + Error: true, + }, + + { + // missing value for EventhubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/", + Error: true, + }, + + { + // missing ConsumergroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/", + Error: true, + }, + + { + // missing value for ConsumergroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/consumergroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/consumergroups/consumergroup1", + Expected: &EventHubConsumerGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + NamespaceName: "namespace1", + EventhubName: "eventhub1", + ConsumergroupName: "consumergroup1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/NAMESPACES/NAMESPACE1/EVENTHUBS/EVENTHUB1/CONSUMERGROUPS/CONSUMERGROUP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := EventHubConsumerGroupID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NamespaceName != v.Expected.NamespaceName { + t.Fatalf("Expected %q but got %q for NamespaceName", v.Expected.NamespaceName, actual.NamespaceName) + } + if actual.EventhubName != v.Expected.EventhubName { + t.Fatalf("Expected %q but got %q for EventhubName", v.Expected.EventhubName, actual.EventhubName) + } + if actual.ConsumergroupName != v.Expected.ConsumergroupName { + t.Fatalf("Expected %q but got %q for ConsumergroupName", v.Expected.ConsumergroupName, actual.ConsumergroupName) + } + } +} diff --git a/azurerm/internal/services/eventhub/parse/namespace.go b/azurerm/internal/services/eventhub/parse/namespace.go index ed59ae445ba1..cef7fb0be913 100644 --- a/azurerm/internal/services/eventhub/parse/namespace.go +++ b/azurerm/internal/services/eventhub/parse/namespace.go @@ -1,23 +1,63 @@ package parse -import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type NamespaceId struct { - Name string - ResourceGroup string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewNamespaceID(subscriptionId, resourceGroup, name string) NamespaceId { + return NamespaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id NamespaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Namespace", segmentsStr) +} + +func (id NamespaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventHub/namespaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } +// NamespaceID parses a Namespace ID into an NamespaceId struct func NamespaceID(input string) (*NamespaceId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { return nil, err } - rule := NamespaceId{ - ResourceGroup: id.ResourceGroup, + resourceId := NamespaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if rule.Name, err = id.PopSegment("namespaces"); err != nil { + if resourceId.Name, err = id.PopSegment("namespaces"); err != nil { return nil, err } @@ -25,5 +65,5 @@ func NamespaceID(input string) (*NamespaceId, error) { return nil, err } - return &rule, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/eventhub/parse/namespace_authorization_rule.go b/azurerm/internal/services/eventhub/parse/namespace_authorization_rule.go index c3453785a204..f7e83c3168e4 100644 --- a/azurerm/internal/services/eventhub/parse/namespace_authorization_rule.go +++ b/azurerm/internal/services/eventhub/parse/namespace_authorization_rule.go @@ -1,28 +1,69 @@ package parse -import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type NamespaceAuthorizationRuleId struct { - ResourceGroup string - NamespaceName string - Name string + SubscriptionId string + ResourceGroup string + NamespaceName string + AuthorizationRuleName string +} + +func NewNamespaceAuthorizationRuleID(subscriptionId, resourceGroup, namespaceName, authorizationRuleName string) NamespaceAuthorizationRuleId { + return NamespaceAuthorizationRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NamespaceName: namespaceName, + AuthorizationRuleName: authorizationRuleName, + } +} + +func (id NamespaceAuthorizationRuleId) String() string { + segments := []string{ + fmt.Sprintf("Authorization Rule Name %q", id.AuthorizationRuleName), + fmt.Sprintf("Namespace Name %q", id.NamespaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Namespace Authorization Rule", segmentsStr) } +func (id NamespaceAuthorizationRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.EventHub/namespaces/%s/authorizationRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NamespaceName, id.AuthorizationRuleName) +} + +// NamespaceAuthorizationRuleID parses a NamespaceAuthorizationRule ID into an NamespaceAuthorizationRuleId struct func NamespaceAuthorizationRuleID(input string) (*NamespaceAuthorizationRuleId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { return nil, err } - rule := NamespaceAuthorizationRuleId{ - ResourceGroup: id.ResourceGroup, + resourceId := NamespaceAuthorizationRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if rule.NamespaceName, err = id.PopSegment("namespaces"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if rule.Name, err = id.PopSegment("authorizationRules"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NamespaceName, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + if resourceId.AuthorizationRuleName, err = id.PopSegment("authorizationRules"); err != nil { return nil, err } @@ -30,5 +71,5 @@ func NamespaceAuthorizationRuleID(input string) (*NamespaceAuthorizationRuleId, return nil, err } - return &rule, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/eventhub/parse/namespace_authorization_rule_test.go b/azurerm/internal/services/eventhub/parse/namespace_authorization_rule_test.go index eae18be19847..654158ad9eeb 100644 --- a/azurerm/internal/services/eventhub/parse/namespace_authorization_rule_test.go +++ b/azurerm/internal/services/eventhub/parse/namespace_authorization_rule_test.go @@ -1,70 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = NamespaceAuthorizationRuleId{} + +func TestNamespaceAuthorizationRuleIDFormatter(t *testing.T) { + actual := NewNamespaceAuthorizationRuleID("12345678-1234-9876-4563-123456789012", "group1", "namespace1", "rule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/rule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestNamespaceAuthorizationRuleID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *NamespaceAuthorizationRuleId + Input string + Error bool + Expected *NamespaceAuthorizationRuleId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Namespaces Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing Namespaces Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1", + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", Error: true, }, + { - Name: "Missing authorizationRules Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules", + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", Error: true, }, + { - Name: "Namespace Authorization Rule ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/rule1", - Error: false, - Expect: &NamespaceAuthorizationRuleId{ - ResourceGroup: "group1", - NamespaceName: "namespace1", - Name: "rule1", + // missing AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/", + Error: true, + }, + + { + // missing value for AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/rule1", + Expected: &NamespaceAuthorizationRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + NamespaceName: "namespace1", + AuthorizationRuleName: "rule1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/AuthorizationRules/rule1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/NAMESPACES/NAMESPACE1/AUTHORIZATIONRULES/RULE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := NamespaceAuthorizationRuleID(v.Input) if err != nil { @@ -72,19 +106,23 @@ func TestNamespaceAuthorizationRuleID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.NamespaceName != v.Expect.NamespaceName { - t.Fatalf("Expected %q but got %q for Name", v.Expect.NamespaceName, actual.NamespaceName) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NamespaceName != v.Expected.NamespaceName { + t.Fatalf("Expected %q but got %q for NamespaceName", v.Expected.NamespaceName, actual.NamespaceName) + } + if actual.AuthorizationRuleName != v.Expected.AuthorizationRuleName { + t.Fatalf("Expected %q but got %q for AuthorizationRuleName", v.Expected.AuthorizationRuleName, actual.AuthorizationRuleName) } } } diff --git a/azurerm/internal/services/eventhub/parse/namespace_test.go b/azurerm/internal/services/eventhub/parse/namespace_test.go index a45d82d90df1..015d6a8711f5 100644 --- a/azurerm/internal/services/eventhub/parse/namespace_test.go +++ b/azurerm/internal/services/eventhub/parse/namespace_test.go @@ -1,56 +1,91 @@ package parse -import "testing" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NamespaceId{} + +func TestNamespaceIDFormatter(t *testing.T) { + actual := NewNamespaceID("12345678-1234-9876-4563-123456789012", "group1", "namespace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} func TestNamespaceID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *NamespaceId + Input string + Error bool + Expected *NamespaceId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", Error: true, }, + { - Name: "Missing Namespaces Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", Error: true, }, + { - Name: "Namespaces Value ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1", - Expect: &NamespaceId{ - Name: "namespace1", - ResourceGroup: "group1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1", + Expected: &NamespaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "namespace1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventHub/Namespaces/namespace1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/NAMESPACES/NAMESPACE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := NamespaceID(v.Input) if err != nil { @@ -58,15 +93,20 @@ func TestNamespaceID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/eventhub/registration.go b/azurerm/internal/services/eventhub/registration.go index 9992df41ec4e..2da44f69c885 100644 --- a/azurerm/internal/services/eventhub/registration.go +++ b/azurerm/internal/services/eventhub/registration.go @@ -2,6 +2,7 @@ package eventhub import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" ) type Registration struct{} @@ -22,21 +23,38 @@ func (r Registration) WebsiteCategories() []string { func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ "azurerm_eventhub": dataSourceEventHub(), - "azurerm_eventhub_authorization_rule": dataSourceEventHubAuthorizationRule(), - "azurerm_eventhub_consumer_group": dataSourceEventHubConsumerGroup(), - "azurerm_eventhub_namespace": dataSourceEventHubNamespace(), - "azurerm_eventhub_namespace_authorization_rule": dataSourceEventHubNamespaceAuthorizationRule(), + "azurerm_eventhub_authorization_rule": EventHubAuthorizationRuleDataSource(), + "azurerm_eventhub_consumer_group": EventHubConsumerGroupDataSource(), + "azurerm_eventhub_namespace": EventHubNamespaceDataSource(), + "azurerm_eventhub_namespace_authorization_rule": EventHubNamespaceDataSourceAuthorizationRule(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_eventhub_authorization_rule": resourceArmEventHubAuthorizationRule(), - "azurerm_eventhub_cluster": resourceArmEventHubCluster(), - "azurerm_eventhub_consumer_group": resourceArmEventHubConsumerGroup(), - "azurerm_eventhub_namespace_authorization_rule": resourceArmEventHubNamespaceAuthorizationRule(), - "azurerm_eventhub_namespace_disaster_recovery_config": resourceArmEventHubNamespaceDisasterRecoveryConfig(), - "azurerm_eventhub_namespace": resourceArmEventHubNamespace(), - "azurerm_eventhub": resourceArmEventHub()} + "azurerm_eventhub_authorization_rule": resourceEventHubAuthorizationRule(), + "azurerm_eventhub_cluster": resourceEventHubCluster(), + "azurerm_eventhub_namespace_authorization_rule": resourceEventHubNamespaceAuthorizationRule(), + "azurerm_eventhub_namespace_disaster_recovery_config": resourceEventHubNamespaceDisasterRecoveryConfig(), + "azurerm_eventhub_namespace": resourceEventHubNamespace(), + "azurerm_eventhub": resourceEventHub(), + } +} + +// PackagePath is the relative path to this package +func (r Registration) PackagePath() string { + return "TODO" +} + +// DataSources returns a list of Data Sources supported by this Service +func (r Registration) DataSources() []sdk.DataSource { + return []sdk.DataSource{} +} + +// Resources returns a list of Resources supported by this Service +func (r Registration) Resources() []sdk.Resource { + return []sdk.Resource{ + ConsumerGroupResource{}, + } } diff --git a/azurerm/internal/services/eventhub/resourceids.go b/azurerm/internal/services/eventhub/resourceids.go new file mode 100644 index 000000000000..03e09ce22b41 --- /dev/null +++ b/azurerm/internal/services/eventhub/resourceids.go @@ -0,0 +1,6 @@ +package eventhub + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/clusters/cluster1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=EventHubConsumerGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/consumergroups/consumergroup1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Namespace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NamespaceAuthorizationRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/rule1 diff --git a/azurerm/internal/services/eventhub/tests/eventhub_authorization_rule_data_source_test.go b/azurerm/internal/services/eventhub/tests/eventhub_authorization_rule_data_source_test.go deleted file mode 100644 index 4ddb26fac7b3..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_authorization_rule_data_source_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMEventHubAuthorizationRule(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMEventHubAuthorizationRule_base(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "namespace_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMEventHubAuthorizationRule_withAliasConnectionString(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMEventHubAuthorizationRule_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string_alias"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string_alias"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMEventHubAuthorizationRule_base(data acceptance.TestData, listen, send, manage bool) string { - template := testAccAzureRMEventHubAuthorizationRule_base(data, listen, send, manage) - return fmt.Sprintf(` -%s - -data "azurerm_eventhub_authorization_rule" "test" { - name = azurerm_eventhub_authorization_rule.test.name - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} - -func testAccDataSourceAzureRMEventHubAuthorizationRule_withAliasConnectionString(data acceptance.TestData) string { - template := testAccAzureRMEventHubAuthorizationRule_withAliasConnectionString(data) - return fmt.Sprintf(` -%s - -data "azurerm_eventhub_authorization_rule" "test" { - name = azurerm_eventhub_authorization_rule.test.name - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_authorization_rule_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_authorization_rule_resource_test.go deleted file mode 100644 index d0071dfd3836..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_authorization_rule_resource_test.go +++ /dev/null @@ -1,398 +0,0 @@ -package tests - -import ( - "fmt" - "strconv" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventHubAuthorizationRule_listen(t *testing.T) { - testAccAzureRMEventHubAuthorizationRule(t, true, false, false) -} - -func TestAccAzureRMEventHubAuthorizationRule_send(t *testing.T) { - testAccAzureRMEventHubAuthorizationRule(t, false, true, false) -} - -func TestAccAzureRMEventHubAuthorizationRule_listensend(t *testing.T) { - testAccAzureRMEventHubAuthorizationRule(t, true, true, false) -} - -func TestAccAzureRMEventHubAuthorizationRule_manage(t *testing.T) { - testAccAzureRMEventHubAuthorizationRule(t, true, true, true) -} - -func testAccAzureRMEventHubAuthorizationRule(t *testing.T, listen, send, manage bool) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubAuthorizationRule_base(data, listen, send, manage), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "namespace_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "eventhub_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", strconv.FormatBool(listen)), - resource.TestCheckResourceAttr(data.ResourceName, "send", strconv.FormatBool(send)), - resource.TestCheckResourceAttr(data.ResourceName, "manage", strconv.FormatBool(manage)), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubAuthorizationRule_multi(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test1") - resourceTwoName := "azurerm_eventhub_authorization_rule.test2" - resourceThreeName := "azurerm_eventhub_authorization_rule.test3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMEventHubAuthorizationRule_multi(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - testCheckAzureRMEventHubAuthorizationRuleExists(resourceTwoName), - resource.TestCheckResourceAttr(resourceTwoName, "manage", "false"), - resource.TestCheckResourceAttr(resourceTwoName, "send", "true"), - resource.TestCheckResourceAttr(resourceTwoName, "listen", "true"), - resource.TestCheckResourceAttrSet(resourceTwoName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(resourceTwoName, "secondary_connection_string"), - testCheckAzureRMEventHubAuthorizationRuleExists(resourceThreeName), - resource.TestCheckResourceAttr(resourceThreeName, "manage", "false"), - resource.TestCheckResourceAttr(resourceThreeName, "send", "true"), - resource.TestCheckResourceAttr(resourceThreeName, "listen", "true"), - resource.TestCheckResourceAttrSet(resourceThreeName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(resourceThreeName, "secondary_connection_string"), - ), - }, - data.ImportStep(), - { - ResourceName: resourceTwoName, - ImportState: true, - ImportStateVerify: true, - }, - { - ResourceName: resourceThreeName, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMEventHubAuthorizationRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubAuthorizationRule_base(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubAuthorizationRule_requiresImport(data, true, true, true), - ExpectError: acceptance.RequiresImportError("azurerm_eventhub_authorization_rule"), - }, - }, - }) -} - -func TestAccAzureRMEventHubAuthorizationRule_rightsUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubAuthorizationRule_base(data, true, false, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - ), - }, - { - Config: testAccAzureRMEventHubAuthorizationRule_base(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "namespace_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubAuthorizationRule_withAliasConnectionString(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubAuthorizationRule_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string_alias"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string_alias"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventHubAuthorizationRuleDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.EventHubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub_authorization_rule" { - continue - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - eventHubName := rs.Primary.Attributes["eventhub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.GetAuthorizationRule(ctx, resourceGroup, namespaceName, eventHubName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - } - - return nil -} - -func testCheckAzureRMEventHubAuthorizationRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.EventHubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - eventHubName := rs.Primary.Attributes["eventhub_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Event Hub: %s", name) - } - - resp, err := conn.GetAuthorizationRule(ctx, resourceGroup, namespaceName, eventHubName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Event Hub Authorization Rule %q (eventhub %s / namespace %s / resource group: %s) does not exist", name, eventHubName, namespaceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on eventHubClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMEventHubAuthorizationRule_base(data acceptance.TestData, listen, send, manage bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventhub_authorization_rule" "test" { - name = "acctest-%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = %[3]t - send = %[4]t - manage = %[5]t -} -`, data.RandomInteger, data.Locations.Primary, listen, send, manage) -} - -func testAzureRMEventHubAuthorizationRule_multi(data acceptance.TestData, listen, send, manage bool) string { - template := testAccAzureRMEventHubAuthorizationRule_base(data, listen, send, manage) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub_authorization_rule" "test1" { - name = "acctestruleone-%d" - eventhub_name = azurerm_eventhub.test.name - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} - -resource "azurerm_eventhub_authorization_rule" "test2" { - name = "acctestruletwo-%d" - eventhub_name = azurerm_eventhub.test.name - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} - -resource "azurerm_eventhub_authorization_rule" "test3" { - name = "acctestrulethree-%d" - eventhub_name = azurerm_eventhub.test.name - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventHubAuthorizationRule_requiresImport(data acceptance.TestData, listen, send, manage bool) string { - template := testAccAzureRMEventHubAuthorizationRule_base(data, listen, send, manage) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub_authorization_rule" "import" { - name = azurerm_eventhub_authorization_rule.test.name - namespace_name = azurerm_eventhub_authorization_rule.test.namespace_name - eventhub_name = azurerm_eventhub_authorization_rule.test.eventhub_name - resource_group_name = azurerm_eventhub_authorization_rule.test.resource_group_name - listen = azurerm_eventhub_authorization_rule.test.listen - send = azurerm_eventhub_authorization_rule.test.send - manage = azurerm_eventhub_authorization_rule.test.manage -} -`, template) -} - -func testAccAzureRMEventHubAuthorizationRule_withAliasConnectionString(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-ehar-%[1]d" - location = "%[2]s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG2-ehar-%[1]d" - location = "%[3]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "test2" { - name = "acctesteventhubnamespace2-%[1]d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { - name = "acctest-EHN-DRC-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - partner_namespace_id = azurerm_eventhub_namespace.test2.id -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventhub_authorization_rule" "test" { - name = "acctest-%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = true - send = true - manage = true - - depends_on = [azurerm_eventhub_namespace_disaster_recovery_config.test] -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_cluster_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_cluster_resource_test.go deleted file mode 100644 index 4bc26126fe63..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_cluster_resource_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" -) - -func TestAccAzureRMEventHubCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMEventHubCluster_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMEventHubCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventHubClusterDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.ClusterClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub_cluster" { - continue - } - - id, err := parse.ClusterID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("EventHub Cluster still exists:\n%#v", resp.ClusterProperties) - } - } - - return nil -} - -func testCheckAzureRMEventHubClusterExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.ClusterClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.ClusterID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on clustersClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Event Hub Cluster %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testAccAzureRMEventHubCluster_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_eventhub_cluster" "test" { - name = "acctesteventhubclusTER-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku_name = "Dedicated_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubCluster_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_eventhub_cluster" "test" { - name = "acctesteventhubclusTER-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku_name = "Dedicated_1" - - tags = { - environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_consumer_group_data_source_test.go b/azurerm/internal/services/eventhub/tests/eventhub_consumer_group_data_source_test.go deleted file mode 100644 index edd20c779fb4..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_consumer_group_data_source_test.go +++ /dev/null @@ -1,125 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMEventHubConsumerGroup_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMEventHubConsumerGroup_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "user_metadata", "some-meta-data"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMEventHubConsumerGroupDefault_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMEventHubConsumerGroupDefault_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "user_metadata", "some-meta-data"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMEventHubConsumerGroup_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 7 -} - -resource "azurerm_eventhub_consumer_group" "test" { - name = "acctesteventhubcg-%d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name - user_metadata = "some-meta-data" -} - -data "azurerm_eventhub_consumer_group" "test" { - name = azurerm_eventhub_consumer_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccDataSourceAzureRMEventHubConsumerGroupDefault_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 7 -} - -data "azurerm_eventhub_consumer_group" "test" { - name = "$Default" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_consumer_group_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_consumer_group_resource_test.go deleted file mode 100644 index 3964098e35c3..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_consumer_group_resource_test.go +++ /dev/null @@ -1,242 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventHubConsumerGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubConsumerGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubConsumerGroup_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubConsumerGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubConsumerGroup_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventhub_consumer_group"), - }, - }, - }) -} - -func TestAccAzureRMEventHubConsumerGroup_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubConsumerGroup_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubConsumerGroup_userMetadataUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubConsumerGroup_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubConsumerGroup_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubConsumerGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "user_metadata", "some-meta-data"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMEventHubConsumerGroupDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.ConsumerGroupClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub_consumer_group" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - eventHubName := rs.Primary.Attributes["eventhub_name"] - - resp, err := conn.Get(ctx, resourceGroup, namespaceName, eventHubName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - } - - return nil -} - -func testCheckAzureRMEventHubConsumerGroupExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.ConsumerGroupClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Event Hub Consumer Group: %s", name) - } - - namespaceName := rs.Primary.Attributes["namespace_name"] - eventHubName := rs.Primary.Attributes["eventhub_name"] - - resp, err := conn.Get(ctx, resourceGroup, namespaceName, eventHubName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Event Hub Consumer Group %q (resource group: %q) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on eventHubConsumerGroupClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMEventHubConsumerGroup_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 7 -} - -resource "azurerm_eventhub_consumer_group" "test" { - name = "acctesteventhubcg-%d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventHubConsumerGroup_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventHubConsumerGroup_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub_consumer_group" "import" { - name = azurerm_eventhub_consumer_group.test.name - namespace_name = azurerm_eventhub_consumer_group.test.namespace_name - eventhub_name = azurerm_eventhub_consumer_group.test.eventhub_name - resource_group_name = azurerm_eventhub_consumer_group.test.resource_group_name -} -`, template) -} - -func testAccAzureRMEventHubConsumerGroup_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 7 -} - -resource "azurerm_eventhub_consumer_group" "test" { - name = "acctesteventhubcg-%d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name - user_metadata = "some-meta-data" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_data_source_test.go b/azurerm/internal/services/eventhub/tests/eventhub_data_source_test.go deleted file mode 100644 index 79fcd0a12231..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_data_source_test.go +++ /dev/null @@ -1,62 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMEventHub_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceEventHub_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "partition_count", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "partition_ids.#", "2"), - ), - }, - }, - }) -} - -func testAccDataSourceEventHub_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctest-EHN-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctest-eh-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - partition_count = 2 - message_retention = 1 -} - -data "azurerm_eventhub" "test" { - name = azurerm_eventhub.test.name - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_namespace_authorization_rule_data_source_test.go b/azurerm/internal/services/eventhub/tests/eventhub_namespace_authorization_rule_data_source_test.go deleted file mode 100644 index 1d7086c02478..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_namespace_authorization_rule_data_source_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMEventHubNamespaceAuthorizationRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceEventHubNamespaceAuthorizationRule_basic(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "listen"), - resource.TestCheckResourceAttrSet(data.ResourceName, "manage"), - resource.TestCheckResourceAttrSet(data.ResourceName, "send"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - // `primary_connection_string_alias` and `secondary_connection_string_alias` are still `nil` while `data.azurerm_eventhub_namespace_authorization_rule` is retrieving resource. since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. - // So these two properties should be checked in the second run. - // And `depends_on` cannot be applied to `azurerm_eventhub_namespace_authorization_rule`. - // Because it would throw error message `BreakPairing operation is only allowed on primary namespace with valid secondary namespace.` while destroying `azurerm_eventhub_namespace_disaster_recovery_config` if `depends_on` is applied. - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(data), - }, - { - Config: testAccDataSourceEventHubNamespaceAuthorizationRule_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string_alias"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string_alias"), - ), - }, - }, - }) -} - -func testAccDataSourceEventHubNamespaceAuthorizationRule_basic(data acceptance.TestData, listen, send, manage bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctest-EHN-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_authorization_rule" "test" { - name = "acctest-EHN-AR%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = %[3]t - send = %[4]t - manage = %[5]t -} - -data "azurerm_eventhub_namespace_authorization_rule" "test" { - name = azurerm_eventhub_namespace_authorization_rule.test.name - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, listen, send, manage) -} - -func testAccDataSourceEventHubNamespaceAuthorizationRule_withAliasConnectionString(data acceptance.TestData) string { - template := testAccAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(data) - return fmt.Sprintf(` -%s - -data "azurerm_eventhub_namespace_authorization_rule" "test" { - name = azurerm_eventhub_namespace_authorization_rule.test.name - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_namespace_authorization_rule_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_namespace_authorization_rule_resource_test.go deleted file mode 100644 index d4c63a2b8b2e..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_namespace_authorization_rule_resource_test.go +++ /dev/null @@ -1,382 +0,0 @@ -package tests - -import ( - "fmt" - "strconv" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_listen(t *testing.T) { - testAccAzureRMEventHubNamespaceAuthorizationRule(t, true, false, false) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_send(t *testing.T) { - testAccAzureRMEventHubNamespaceAuthorizationRule(t, false, true, false) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_listensend(t *testing.T) { - testAccAzureRMEventHubNamespaceAuthorizationRule(t, true, true, false) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_manage(t *testing.T) { - testAccAzureRMEventHubNamespaceAuthorizationRule(t, true, true, true) -} - -func testAccAzureRMEventHubNamespaceAuthorizationRule(t *testing.T, listen, send, manage bool) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_base(data, listen, send, manage), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "namespace_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", strconv.FormatBool(listen)), - resource.TestCheckResourceAttr(data.ResourceName, "send", strconv.FormatBool(send)), - resource.TestCheckResourceAttr(data.ResourceName, "manage", strconv.FormatBool(manage)), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_base(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_requiresImport(data, true, true, true), - ExpectError: acceptance.RequiresImportError("azurerm_eventhub_namespace_authorization_rule"), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_rightsUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_base(data, true, false, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - ), - }, - { - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_base(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "namespace_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - // `primary_connection_string_alias` and `secondary_connection_string_alias` are still `nil` in `azurerm_eventhub_namespace_authorization_rule` after created `azurerm_eventhub_namespace` successfully since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. - // So these two properties should be checked in the second run. - // And `depends_on` cannot be applied to `azurerm_eventhub_namespace_authorization_rule`. - // Because it would throw error message `BreakPairing operation is only allowed on primary namespace with valid secondary namespace.` while destroying `azurerm_eventhub_namespace_disaster_recovery_config` if `depends_on` is applied. - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string_alias"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string_alias"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespaceAuthorizationRule_multi(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_authorization_rule", "test1") - resourceTwoName := "azurerm_eventhub_namespace_authorization_rule.test2" - resourceThreeName := "azurerm_eventhub_namespace_authorization_rule.test3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMEventHubNamespaceAuthorizationRule_multi(data, true, true, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(resourceTwoName), - resource.TestCheckResourceAttr(resourceTwoName, "manage", "false"), - resource.TestCheckResourceAttr(resourceTwoName, "send", "true"), - resource.TestCheckResourceAttr(resourceTwoName, "listen", "true"), - resource.TestCheckResourceAttrSet(resourceTwoName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(resourceTwoName, "secondary_connection_string"), - testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(resourceThreeName), - resource.TestCheckResourceAttr(resourceThreeName, "manage", "false"), - resource.TestCheckResourceAttr(resourceThreeName, "send", "true"), - resource.TestCheckResourceAttr(resourceThreeName, "listen", "true"), - resource.TestCheckResourceAttrSet(resourceThreeName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(resourceThreeName, "secondary_connection_string"), - ), - }, - data.ImportStep(), - { - ResourceName: resourceTwoName, - ImportState: true, - ImportStateVerify: true, - }, - { - ResourceName: resourceThreeName, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testCheckAzureRMEventHubNamespaceAuthorizationRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub_authorization_rule" { - continue - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetAuthorizationRule(ctx, resourceGroup, namespaceName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - } - - return nil -} - -func testCheckAzureRMEventHubNamespaceAuthorizationRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Event Hub: %s", name) - } - - resp, err := client.GetAuthorizationRule(ctx, resourceGroup, namespaceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Event Hub Namespace Authorization Rule %q (namespace %q / resource group: %q) does not exist", name, namespaceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on eventHubClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMEventHubNamespaceAuthorizationRule_base(data acceptance.TestData, listen, send, manage bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctest-EHN-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_authorization_rule" "test" { - name = "acctest-EHN-AR%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = %[3]t - send = %[4]t - manage = %[5]t -} -`, data.RandomInteger, data.Locations.Primary, listen, send, manage) -} - -func testAccAzureRMEventHubNamespaceAuthorizationRule_withAliasConnectionString(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-ehnar-%[1]d" - location = "%[2]s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG2-ehnar-%[1]d" - location = "%[3]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "test2" { - name = "acctesteventhubnamespace2-%[1]d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { - name = "acctest-EHN-DRC-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - partner_namespace_id = azurerm_eventhub_namespace.test2.id -} - -resource "azurerm_eventhub_namespace_authorization_rule" "test" { - name = "acctest-EHN-AR%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = true - send = true - manage = true -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} - -func testAccAzureRMEventHubNamespaceAuthorizationRule_requiresImport(data acceptance.TestData, listen, send, manage bool) string { - template := testAccAzureRMEventHubNamespaceAuthorizationRule_base(data, listen, send, manage) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub_namespace_authorization_rule" "import" { - name = azurerm_eventhub_namespace_authorization_rule.test.name - namespace_name = azurerm_eventhub_namespace_authorization_rule.test.namespace_name - resource_group_name = azurerm_eventhub_namespace_authorization_rule.test.resource_group_name - listen = azurerm_eventhub_namespace_authorization_rule.test.listen - send = azurerm_eventhub_namespace_authorization_rule.test.send - manage = azurerm_eventhub_namespace_authorization_rule.test.manage -} -`, template) -} - -func testAzureRMEventHubNamespaceAuthorizationRule_multi(data acceptance.TestData, listen, send, manage bool) string { - template := testAccAzureRMEventHubNamespaceAuthorizationRule_base(data, listen, send, manage) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub_namespace_authorization_rule" "test1" { - name = "acctestruleone-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - send = true - listen = true - manage = false -} - -resource "azurerm_eventhub_namespace_authorization_rule" "test2" { - name = "acctestruletwo-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - send = true - listen = true - manage = false -} - -resource "azurerm_eventhub_namespace_authorization_rule" "test3" { - name = "acctestrulethree-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - - send = true - listen = true - manage = false -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_namespace_data_source_test.go b/azurerm/internal/services/eventhub/tests/eventhub_namespace_data_source_test.go deleted file mode 100644 index 12020c172f8f..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_namespace_data_source_test.go +++ /dev/null @@ -1,134 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMEventHubNamespace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Basic"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMEventHubNamespace_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceEventHubNamespace_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_inflate_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "maximum_throughput_units", "20"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMEventHubNamespace_withAliasConnectionString(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - // `default_primary_connection_string_alias` and `default_secondary_connection_string_alias` are still `nil` while `data.azurerm_eventhub_namespace` is retrieving resource. since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. - // So these two properties should be checked in the second run. - Config: testAccAzureRMEventHubNamespace_withAliasConnectionString(data), - }, - { - Config: testAccDataSourceEventHubNamespace_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "default_primary_connection_string_alias"), - resource.TestCheckResourceAttrSet(data.ResourceName, "default_secondary_connection_string_alias"), - ), - }, - }, - }) -} - -func testAccDataSourceEventHubNamespace_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} - -data "azurerm_eventhub_namespace" "test" { - name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_eventhub_namespace.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourceEventHubNamespace_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - auto_inflate_enabled = true - maximum_throughput_units = 20 -} - -data "azurerm_eventhub_namespace" "test" { - name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_eventhub_namespace.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourceEventHubNamespace_withAliasConnectionString(data acceptance.TestData) string { - template := testAccAzureRMEventHubNamespace_withAliasConnectionString(data) - return fmt.Sprintf(` -%s - -data "azurerm_eventhub_namespace" "test" { - name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_eventhub_namespace.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_namespace_disaster_recovery_config_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_namespace_disaster_recovery_config_resource_test.go deleted file mode 100644 index 056debd1a0ce..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_namespace_disaster_recovery_config_resource_test.go +++ /dev/null @@ -1,279 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventHubNamespaceDisasterRecoveryConfig_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_disaster_recovery_config", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespaceDisasterRecoveryConfig_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_disaster_recovery_config", "test") - - // skipping due to there being no way to delete a DRC once an alternate name has been set - // sdk bug: https://github.com/Azure/azure-sdk-for-go/issues/5893 - t.Skip() - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespaceDisasterRecoveryConfig_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace_disaster_recovery_config", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_updated_removed(data), - }, - }, - }) -} - -func testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.DisasterRecoveryConfigsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub_namespace_disaster_recovery_config" { - continue - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, namespaceName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: EventHub Namespace Disaster Recovery Configs %q (namespace %q / resource group: %q): %+v", name, namespaceName, resourceGroup, err) - } - } - } - - return nil -} - -func testCheckAzureRMEventHubNamespaceDisasterRecoveryConfigExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.DisasterRecoveryConfigsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, namespaceName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: EventHub Namespace Disaster Recovery Configs %q (namespace %q / resource group: %q) does not exist", name, namespaceName, resourceGroup) - } - - return fmt.Errorf("Bad: EventHub Namespace Disaster Recovery Configs %q (namespace %q / resource group: %q): %+v", name, namespaceName, resourceGroup, err) - } - - return nil - } -} - -func testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "testa" { - name = "acctest-EHN-%[1]d-a" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "testb" { - name = "acctest-EHN-%[1]d-b" - location = "%[3]s" - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { - name = "acctest-EHN-DRC-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.testa.name - partner_namespace_id = azurerm_eventhub_namespace.testb.id -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} - -// nolint unused - mistakenly marked as unused -func testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "testa" { - name = "acctest-EHN-%[1]d-a" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "testb" { - name = "acctest-EHN-%[1]d-b" - location = "%[3]s" - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { - name = "${azurerm_eventhub_namespace.testa.name}-111" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.testa.name - partner_namespace_id = azurerm_eventhub_namespace.testb.id - alternate_name = "acctest-EHN-DRC-%[1]d-alt" -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} - -func testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_updated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "testa" { - name = "acctest-EHN-%[1]d-a" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "testb" { - name = "acctest-EHN-%[1]d-b" - location = "%[3]s" - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "testc" { - name = "acctest-EHN-%[1]d-c" - location = "%[3]s" - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { - name = "acctest-EHN-DRC-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.testa.name - partner_namespace_id = azurerm_eventhub_namespace.testc.id -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} - -func testAccAzureRMEventHubNamespaceDisasterRecoveryConfig_updated_removed(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "testa" { - name = "acctest-EHN-%[1]d-a" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "testb" { - name = "acctest-EHN-%[1]d-b" - location = "%[3]s" - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "testc" { - name = "acctest-EHN-%[1]d-c" - location = "%[3]s" - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_namespace_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_namespace_resource_test.go deleted file mode 100644 index e156104de76a..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_namespace_resource_test.go +++ /dev/null @@ -1,1024 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMEventHubNamespace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_basicWithIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basicWithIdentity(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_basicUpdateIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMEventHubNamespace_basicWithIdentity(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubNamespace_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventhub_namespace"), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_standardWithIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_standardWithIdentity(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_standardUpdateIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMEventHubNamespace_standardWithIdentity(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_networkrule_iprule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_networkrule_iprule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_networkrule_vnet(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_networkrule_vnet(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_networkruleVnetIpRule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_networkruleVnetIpRule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "network_rulesets.0.virtual_network_rule.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "network_rulesets.0.ip_rule.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_readDefaultKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "default_primary_connection_string", regexp.MustCompile("Endpoint=.+")), - resource.TestMatchResourceAttr(data.ResourceName, "default_secondary_connection_string", regexp.MustCompile("Endpoint=.+")), - resource.TestMatchResourceAttr(data.ResourceName, "default_primary_key", regexp.MustCompile(".+")), - resource.TestMatchResourceAttr(data.ResourceName, "default_secondary_key", regexp.MustCompile(".+")), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_withAliasConnectionString(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - // `default_primary_connection_string_alias` and `default_secondary_connection_string_alias` are still `nil` in `azurerm_eventhub_namespace` after created `azurerm_eventhub_namespace` successfully since `azurerm_eventhub_namespace_disaster_recovery_config` hasn't been created. - // So these two properties should be checked in the second run. - Config: testAccAzureRMEventHubNamespace_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubNamespace_withAliasConnectionString(data), - Check: resource.ComposeTestCheckFunc( - resource.TestMatchResourceAttr(data.ResourceName, "default_primary_connection_string_alias", regexp.MustCompile("Endpoint=.+")), - resource.TestMatchResourceAttr(data.ResourceName, "default_secondary_connection_string_alias", regexp.MustCompile("Endpoint=.+")), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_maximumThroughputUnits(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_maximumThroughputUnits(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_zoneRedundant(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_zoneRedundant(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_dedicatedClusterID(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_dedicatedClusterID(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHubNamespace_NonStandardCasing(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespaceNonStandardCasing(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists("azurerm_eventhub_namespace.test"), - ), - }, - { - Config: testAccAzureRMEventHubNamespaceNonStandardCasing(data), - PlanOnly: true, - ExpectNonEmptyPlan: false, - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_BasicWithTagsUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHubNamespace_basicWithTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_BasicWithCapacity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_capacity(data, 20), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "20"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_BasicWithCapacityUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_capacity(data, 20), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "20"), - ), - }, - { - Config: testAccAzureRMEventHubNamespace_capacity(data, 2), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "2"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_BasicWithSkuUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Basic"), - ), - }, - { - Config: testAccAzureRMEventHubNamespace_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "2"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_maximumThroughputUnitsUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_maximumThroughputUnits(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "maximum_throughput_units", "20"), - ), - }, - { - Config: testAccAzureRMEventHubNamespace_maximumThroughputUnitsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "capacity", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "maximum_throughput_units", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHubNamespace_autoInfalteDisabledWithAutoInflateUnits(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHubNamespace_autoInfalteDisabledWithAutoInflateUnits(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubNamespaceExists(data.ResourceName), - ), - }, - }, - }) -} - -func testCheckAzureRMEventHubNamespaceDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub_namespace" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - } - - return nil -} - -func testCheckAzureRMEventHubNamespaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - namespaceName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Event Hub Namespace: %s", namespaceName) - } - - resp, err := conn.Get(ctx, resourceGroup, namespaceName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Event Hub Namespace %q (resource group: %q) does not exist", namespaceName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on eventHubNamespacesClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMEventHubNamespace_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_basicWithIdentity(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_withAliasConnectionString(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-ehn-%[1]d" - location = "%[2]s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG2-ehn-%[1]d" - location = "%[3]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku = "Standard" -} - -resource "azurerm_eventhub_namespace" "test2" { - name = "acctesteventhubnamespace2-%[1]d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - - sku = "Standard" -} - -resource "azurerm_eventhub_namespace_disaster_recovery_config" "test" { - name = "acctest-EHN-DRC-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - partner_namespace_id = azurerm_eventhub_namespace.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} - -func testAccAzureRMEventHubNamespace_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventHubNamespace_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub_namespace" "import" { - name = azurerm_eventhub_namespace.test.name - location = azurerm_eventhub_namespace.test.location - resource_group_name = azurerm_eventhub_namespace.test.resource_group_name - sku = azurerm_eventhub_namespace.test.sku -} -`, template) -} - -func testAccAzureRMEventHubNamespace_standard(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_standardWithIdentity(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_networkrule_iprule(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - - network_rulesets { - default_action = "Deny" - ip_rule { - ip_mask = "10.0.0.0/16" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_networkrule_vnet(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctvn-%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctsub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - - network_rulesets { - default_action = "Deny" - virtual_network_rule { - subnet_id = azurerm_subnet.test.id - - ignore_missing_virtual_network_service_endpoint = true - } - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMEventHubNamespace_networkruleVnetIpRule(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctvn1-%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctsub1-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" - service_endpoints = ["Microsoft.EventHub"] -} - -resource "azurerm_virtual_network" "test2" { - name = "acctvn2-%[1]d" - address_space = ["10.1.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test2" { - name = "acctsub2-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test2.name - address_prefix = "10.1.1.0/24" - service_endpoints = ["Microsoft.EventHub"] -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - - network_rulesets { - default_action = "Deny" - - virtual_network_rule { - subnet_id = azurerm_subnet.test.id - } - - virtual_network_rule { - subnet_id = azurerm_subnet.test2.id - } - - ip_rule { - ip_mask = "10.0.1.0/24" - } - - ip_rule { - ip_mask = "10.1.1.0/24" - } - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMEventHubNamespaceNonStandardCasing(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "basic" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_maximumThroughputUnits(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - auto_inflate_enabled = true - maximum_throughput_units = 20 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_zoneRedundant(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - zone_redundant = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_dedicatedClusterID(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_cluster" "test" { - name = "acctesteventhubcluster-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku_name = "Dedicated_1" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = "2" - dedicated_cluster_id = azurerm_eventhub_cluster.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_basicWithTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" - - tags = { - environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_capacity(data acceptance.TestData, capacity int) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" - capacity = %d -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, capacity) -} - -func testAccAzureRMEventHubNamespace_maximumThroughputUnitsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = 1 - auto_inflate_enabled = true - maximum_throughput_units = 1 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMEventHubNamespace_autoInfalteDisabledWithAutoInflateUnits(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = 1 - auto_inflate_enabled = false - maximum_throughput_units = 0 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventhub/tests/eventhub_resource_test.go b/azurerm/internal/services/eventhub/tests/eventhub_resource_test.go deleted file mode 100644 index 5a477fa65984..000000000000 --- a/azurerm/internal/services/eventhub/tests/eventhub_resource_test.go +++ /dev/null @@ -1,593 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "strconv" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub" -) - -func TestAccAzureRMEventHubPartitionCount_validation(t *testing.T) { - cases := []struct { - Value int - ErrCount int - }{ - { - Value: 0, - ErrCount: 1, - }, - { - Value: 1, - ErrCount: 0, - }, - { - Value: 2, - ErrCount: 0, - }, - { - Value: 3, - ErrCount: 0, - }, - { - Value: 21, - ErrCount: 0, - }, - { - Value: 1024, - ErrCount: 0, - }, - { - Value: 1025, - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := eventhub.ValidateEventHubPartitionCount(tc.Value, "azurerm_eventhub") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Azure RM EventHub Partition Count to trigger a validation error") - } - } -} - -func TestAccAzureRMEventHubMessageRetentionCount_validation(t *testing.T) { - cases := []struct { - Value int - ErrCount int - }{ - { - Value: 0, - ErrCount: 1, - }, { - Value: 1, - ErrCount: 0, - }, { - Value: 2, - ErrCount: 0, - }, { - Value: 3, - ErrCount: 0, - }, { - Value: 4, - ErrCount: 0, - }, { - Value: 5, - ErrCount: 0, - }, { - Value: 6, - ErrCount: 0, - }, { - Value: 90, - ErrCount: 0, - }, { - Value: 91, - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := eventhub.ValidateEventHubMessageRetentionCount(tc.Value, "azurerm_eventhub") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Azure RM EventHub Message Retention Count to trigger a validation error") - } - } -} - -func TestAccAzureRMEventHubArchiveNameFormat_validation(t *testing.T) { - cases := []struct { - Value string - ErrCount int - }{ - { - Value: "", - ErrCount: 9, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 0, - }, - { - Value: "Prod_{Eventub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Month}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}/{Day}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Hour}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Minute}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Second}", - ErrCount: 1, - }, - { - Value: "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}", - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := eventhub.ValidateEventHubArchiveNameFormat(tc.Value, "azurerm_eventhub") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected %q to trigger a validation error", tc.Value) - } - } -} - -func TestAccAzureRMEventHub_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_basic(data, 2), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHub_basicOnePartition(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_basic(data, 1), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "partition_count", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHub_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_basic(data, 2), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMEventHub_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_eventhub"), - }, - }, - }) -} - -func TestAccAzureRMEventHub_partitionCountUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_basic(data, 2), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "partition_count", "2"), - ), - }, - { - Config: testAccAzureRMEventHub_partitionCountUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "partition_count", "10"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHub_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHub_captureDescription(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_captureDescription(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "capture_description.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "capture_description.0.skip_empty_archives", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMEventHub_captureDescriptionDisabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_captureDescription(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "capture_description.0.enabled", "true"), - ), - }, - { - Config: testAccAzureRMEventHub_captureDescription(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "capture_description.0.enabled", "false"), - ), - }, - }, - }) -} - -func TestAccAzureRMEventHub_messageRetentionUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMEventHub_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "message_retention", "7"), - ), - }, - { - Config: testAccAzureRMEventHub_messageRetentionUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMEventHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "message_retention", "5"), - ), - }, - }, - }) -} - -func testCheckAzureRMEventHubDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.EventHubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_eventhub" { - continue - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, namespaceName, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("EventHub still exists:\n%#v", resp.Properties) - } - } - - return nil -} - -func testCheckAzureRMEventHubExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Eventhub.EventHubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Event Hub: %s", name) - } - - resp, err := conn.Get(ctx, resourceGroup, namespaceName, name) - if err != nil { - return fmt.Errorf("Bad: Get on eventHubClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Event Hub %q (namespace %q / resource group: %q) does not exist", name, namespaceName, resourceGroup) - } - - return nil - } -} - -func testAccAzureRMEventHub_basic(data acceptance.TestData, partitionCount int) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = %d - message_retention = 1 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, partitionCount) -} - -func testAccAzureRMEventHub_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMEventHub_basic(data, 2) - return fmt.Sprintf(` -%s - -resource "azurerm_eventhub" "import" { - name = azurerm_eventhub.test.name - namespace_name = azurerm_eventhub.test.namespace_name - resource_group_name = azurerm_eventhub.test.resource_group_name - partition_count = azurerm_eventhub.test.partition_count - message_retention = azurerm_eventhub.test.message_retention -} -`, template) -} - -func testAccAzureRMEventHub_partitionCountUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 10 - message_retention = 1 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventHub_standard(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctest-EHN-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctest-EH-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 7 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMEventHub_captureDescription(data acceptance.TestData, enabled bool) string { - enabledString := strconv.FormatBool(enabled) - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctest-EHN%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctest-EH%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 7 - - capture_description { - enabled = %s - encoding = "Avro" - interval_in_seconds = 60 - size_limit_in_bytes = 10485760 - skip_empty_archives = true - - destination { - name = "EventHubArchive.AzureBlockBlob" - archive_name_format = "Prod_{EventHub}/{Namespace}\\{PartitionId}_{Year}_{Month}/{Day}/{Hour}/{Minute}/{Second}" - blob_container_name = azurerm_storage_container.test.name - storage_account_id = azurerm_storage_account.test.id - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, enabledString) -} - -func testAccAzureRMEventHub_messageRetentionUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-eventhub-%d" - location = "%s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctest-EHN-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctest-EH-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 5 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/eventhub/validate/cluster_id.go b/azurerm/internal/services/eventhub/validate/cluster_id.go new file mode 100644 index 000000000000..b26418d3b07e --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/cluster_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" +) + +func ClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventhub/validate/cluster_id_test.go b/azurerm/internal/services/eventhub/validate/cluster_id_test.go new file mode 100644 index 000000000000..07048bd785f4 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/cluster_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestClusterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/clusters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/clusters/cluster1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/CLUSTERS/CLUSTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ClusterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventhub/validate/event_hub_consumer_group_id.go b/azurerm/internal/services/eventhub/validate/event_hub_consumer_group_id.go new file mode 100644 index 000000000000..91259b84fe69 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/event_hub_consumer_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" +) + +func EventHubConsumerGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.EventHubConsumerGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventhub/validate/event_hub_consumer_group_id_test.go b/azurerm/internal/services/eventhub/validate/event_hub_consumer_group_id_test.go new file mode 100644 index 000000000000..80d4864905b5 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/event_hub_consumer_group_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestEventHubConsumerGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", + Valid: false, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", + Valid: false, + }, + + { + // missing EventhubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/", + Valid: false, + }, + + { + // missing value for EventhubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/", + Valid: false, + }, + + { + // missing ConsumergroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/", + Valid: false, + }, + + { + // missing value for ConsumergroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/consumergroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/eventhubs/eventhub1/consumergroups/consumergroup1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/NAMESPACES/NAMESPACE1/EVENTHUBS/EVENTHUB1/CONSUMERGROUPS/CONSUMERGROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := EventHubConsumerGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventhub/validate/eventhub_archive.go b/azurerm/internal/services/eventhub/validate/eventhub_archive.go new file mode 100644 index 000000000000..00ebdb0eee27 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/eventhub_archive.go @@ -0,0 +1,30 @@ +package validate + +import ( + "fmt" + "strings" +) + +func ValidateEventHubArchiveNameFormat(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + requiredComponents := []string{ + "{Namespace}", + "{EventHub}", + "{PartitionId}", + "{Year}", + "{Month}", + "{Day}", + "{Hour}", + "{Minute}", + "{Second}", + } + + for _, component := range requiredComponents { + if !strings.Contains(value, component) { + errors = append(errors, fmt.Errorf("%s needs to contain %q", k, component)) + } + } + + return warnings, errors +} diff --git a/azurerm/internal/services/eventhub/validate/eventhub_dedicated_cluster_id.go b/azurerm/internal/services/eventhub/validate/eventhub_dedicated_cluster_id.go deleted file mode 100644 index d4b15a75691b..000000000000 --- a/azurerm/internal/services/eventhub/validate/eventhub_dedicated_cluster_id.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" -) - -func ValidateEventHubDedicatedClusterID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ClusterID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/eventhub/validate/eventhub_message_retention.go b/azurerm/internal/services/eventhub/validate/eventhub_message_retention.go new file mode 100644 index 000000000000..9dc6faf4dc67 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/eventhub_message_retention.go @@ -0,0 +1,13 @@ +package validate + +import "fmt" + +func ValidateEventHubMessageRetentionCount(v interface{}, _ string) (warnings []string, errors []error) { + value := v.(int) + + if !(90 >= value && value >= 1) { + errors = append(errors, fmt.Errorf("EventHub Retention Count has to be between 1 and 7 or between 1 and 90 if using a dedicated Event Hubs Cluster")) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/eventhub/validate/eventhub_partition.go b/azurerm/internal/services/eventhub/validate/eventhub_partition.go new file mode 100644 index 000000000000..a1b9b1faf0a9 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/eventhub_partition.go @@ -0,0 +1,13 @@ +package validate + +import "fmt" + +func ValidateEventHubPartitionCount(v interface{}, _ string) (warnings []string, errors []error) { + value := v.(int) + + if !(1024 >= value && value >= 1) { + errors = append(errors, fmt.Errorf("EventHub Partition Count has to be between 1 and 32 or between 1 and 1024 if using a dedicated Event Hubs Cluster")) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/eventhub/validate/namespace.go b/azurerm/internal/services/eventhub/validate/namespace.go deleted file mode 100644 index 8c96612d02e1..000000000000 --- a/azurerm/internal/services/eventhub/validate/namespace.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" -) - -func NamespaceID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.NamespaceID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/eventhub/validate/namespace_authorization_rule_id.go b/azurerm/internal/services/eventhub/validate/namespace_authorization_rule_id.go new file mode 100644 index 000000000000..94c7eddf15f7 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/namespace_authorization_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" +) + +func NamespaceAuthorizationRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NamespaceAuthorizationRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventhub/validate/namespace_authorization_rule_id_test.go b/azurerm/internal/services/eventhub/validate/namespace_authorization_rule_id_test.go new file mode 100644 index 000000000000..23af8871caa6 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/namespace_authorization_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNamespaceAuthorizationRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", + Valid: false, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", + Valid: false, + }, + + { + // missing AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/", + Valid: false, + }, + + { + // missing value for AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1/authorizationRules/rule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/NAMESPACES/NAMESPACE1/AUTHORIZATIONRULES/RULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NamespaceAuthorizationRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/eventhub/validate/namespace_id.go b/azurerm/internal/services/eventhub/validate/namespace_id.go new file mode 100644 index 000000000000..9327d6e64aee --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/namespace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/eventhub/parse" +) + +func NamespaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NamespaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/eventhub/validate/namespace_id_test.go b/azurerm/internal/services/eventhub/validate/namespace_id_test.go new file mode 100644 index 000000000000..d34a402c0be5 --- /dev/null +++ b/azurerm/internal/services/eventhub/validate/namespace_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNamespaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.EventHub/namespaces/namespace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.EVENTHUB/NAMESPACES/NAMESPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NamespaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/firewall/client/client.go b/azurerm/internal/services/firewall/client/client.go new file mode 100644 index 000000000000..88313394b00e --- /dev/null +++ b/azurerm/internal/services/firewall/client/client.go @@ -0,0 +1,29 @@ +package client + +import ( + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" +) + +type Client struct { + AzureFirewallsClient *network.AzureFirewallsClient + FirewallPolicyClient *network.FirewallPoliciesClient + FirewallPolicyRuleGroupClient *network.FirewallPolicyRuleCollectionGroupsClient +} + +func NewClient(o *common.ClientOptions) *Client { + firewallsClient := network.NewAzureFirewallsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&firewallsClient.Client, o.ResourceManagerAuthorizer) + + policyClient := network.NewFirewallPoliciesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&policyClient.Client, o.ResourceManagerAuthorizer) + + policyRuleGroupClient := network.NewFirewallPolicyRuleCollectionGroupsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&policyRuleGroupClient.Client, o.ResourceManagerAuthorizer) + + return &Client{ + AzureFirewallsClient: &firewallsClient, + FirewallPolicyClient: &policyClient, + FirewallPolicyRuleGroupClient: &policyRuleGroupClient, + } +} diff --git a/azurerm/internal/services/network/firewall_application_rule_collection_resource.go b/azurerm/internal/services/firewall/firewall_application_rule_collection_resource.go similarity index 91% rename from azurerm/internal/services/network/firewall_application_rule_collection_resource.go rename to azurerm/internal/services/firewall/firewall_application_rule_collection_resource.go index 50f98de302ed..997b2c92c818 100644 --- a/azurerm/internal/services/network/firewall_application_rule_collection_resource.go +++ b/azurerm/internal/services/firewall/firewall_application_rule_collection_resource.go @@ -1,4 +1,4 @@ -package network +package firewall import ( "fmt" @@ -8,23 +8,23 @@ import ( "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + firewallValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFirewallApplicationRuleCollection() *schema.Resource { +func resourceFirewallApplicationRuleCollection() *schema.Resource { return &schema.Resource{ - Create: resourceArmFirewallApplicationRuleCollectionCreateUpdate, - Read: resourceArmFirewallApplicationRuleCollectionRead, - Update: resourceArmFirewallApplicationRuleCollectionCreateUpdate, - Delete: resourceArmFirewallApplicationRuleCollectionDelete, + Create: resourceFirewallApplicationRuleCollectionCreateUpdate, + Read: resourceFirewallApplicationRuleCollectionRead, + Update: resourceFirewallApplicationRuleCollectionCreateUpdate, + Delete: resourceFirewallApplicationRuleCollectionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -41,14 +41,14 @@ func resourceArmFirewallApplicationRuleCollection() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, + ValidateFunc: firewallValidate.FirewallName, }, "azure_firewall_name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, + ValidateFunc: firewallValidate.FirewallName, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -137,15 +137,15 @@ func resourceArmFirewallApplicationRuleCollection() *schema.Resource { } } -func resourceArmFirewallApplicationRuleCollectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallApplicationRuleCollectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) firewallName := d.Get("azure_firewall_name").(string) resourceGroup := d.Get("resource_group_name").(string) - applicationRules, err := expandArmFirewallApplicationRules(d.Get("rule").([]interface{})) + applicationRules, err := expandFirewallApplicationRules(d.Get("rule").([]interface{})) if err != nil { return fmt.Errorf("expanding Firewall Application Rules: %+v", err) } @@ -245,11 +245,11 @@ func resourceArmFirewallApplicationRuleCollectionCreateUpdate(d *schema.Resource } d.SetId(collectionID) - return resourceArmFirewallApplicationRuleCollectionRead(d, meta) + return resourceFirewallApplicationRuleCollectionRead(d, meta) } -func resourceArmFirewallApplicationRuleCollectionRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallApplicationRuleCollectionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -321,8 +321,8 @@ func resourceArmFirewallApplicationRuleCollectionRead(d *schema.ResourceData, me return nil } -func resourceArmFirewallApplicationRuleCollectionDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallApplicationRuleCollectionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -380,7 +380,7 @@ func resourceArmFirewallApplicationRuleCollectionDelete(d *schema.ResourceData, return nil } -func expandArmFirewallApplicationRules(inputs []interface{}) (*[]network.AzureFirewallApplicationRule, error) { +func expandFirewallApplicationRules(inputs []interface{}) (*[]network.AzureFirewallApplicationRule, error) { outputs := make([]network.AzureFirewallApplicationRule, 0) for _, input := range inputs { diff --git a/azurerm/internal/services/firewall/firewall_application_rule_collection_resource_test.go b/azurerm/internal/services/firewall/firewall_application_rule_collection_resource_test.go new file mode 100644 index 000000000000..4f3be60772e9 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_application_rule_collection_resource_test.go @@ -0,0 +1,886 @@ +package firewall_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" +) + +type FirewallApplicationRuleCollectionResource struct { +} + +func TestAccFirewallApplicationRuleCollection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.name").HasValue("rule1"), + check.That(data.ResourceName).Key("rule.0.source_addresses.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.target_fqdns.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.protocol.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.protocol.0.port").HasValue("443"), + check.That(data.ResourceName).Key("rule.0.protocol.0.type").HasValue("Https"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallApplicationRuleCollection_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_firewall_application_rule_collection"), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_updatedName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.name").HasValue("rule1"), + ), + }, + { + Config: r.updatedName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.name").HasValue("rule2"), + ), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_multipleRuleCollections(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + data2 := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "acctestarc_add") + r := FirewallApplicationRuleCollectionResource{} + + secondRule := "azurerm_firewall_application_rule_collection.test_add" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(secondRule).ExistsInAzure(r), + resource.TestCheckResourceAttr(secondRule, "name", "acctestarc_add"), + resource.TestCheckResourceAttr(secondRule, "priority", "200"), + resource.TestCheckResourceAttr(secondRule, "action", "Deny"), + resource.TestCheckResourceAttr(secondRule, "rule.#", "1"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + data2.CheckWithClient(r.doesNotExist), + ), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + secondResourceName := "azurerm_firewall_application_rule_collection.test_add" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(secondResourceName).ExistsInAzure(r), + check.That(secondResourceName).Key("name").HasValue("acctestarc_add"), + check.That(secondResourceName).Key("priority").HasValue("200"), + check.That(secondResourceName).Key("action").HasValue("Deny"), + check.That(secondResourceName).Key("rule.#").HasValue("1"), + ), + }, + { + Config: r.multipleUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("300"), + check.That(data.ResourceName).Key("action").HasValue("Deny"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(secondResourceName).ExistsInAzure(r), + check.That(secondResourceName).Key("name").HasValue("acctestarc_add"), + check.That(secondResourceName).Key("priority").HasValue("400"), + check.That(secondResourceName).Key("action").HasValue("Allow"), + check.That(secondResourceName).Key("rule.#").HasValue("1"), + ), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + data.CheckWithClient(r.disappears), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_multipleRules(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + { + Config: r.multipleRules(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("2"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_multipleProtocols(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleProtocols(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.protocol.#").HasValue("2"), + check.That(data.ResourceName).Key("rule.0.protocol.0.port").HasValue("8000"), + check.That(data.ResourceName).Key("rule.0.protocol.0.type").HasValue("Http"), + check.That(data.ResourceName).Key("rule.0.protocol.1.port").HasValue("8001"), + check.That(data.ResourceName).Key("rule.0.protocol.1.type").HasValue("Https"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallApplicationRuleCollection_updateProtocols(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleProtocols(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.protocol.#").HasValue("2"), + check.That(data.ResourceName).Key("rule.0.protocol.0.port").HasValue("8000"), + check.That(data.ResourceName).Key("rule.0.protocol.0.type").HasValue("Http"), + check.That(data.ResourceName).Key("rule.0.protocol.1.port").HasValue("8001"), + check.That(data.ResourceName).Key("rule.0.protocol.1.type").HasValue("Https"), + ), + }, + { + Config: r.multipleProtocolsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.protocol.#").HasValue("2"), + check.That(data.ResourceName).Key("rule.0.protocol.0.port").HasValue("9000"), + check.That(data.ResourceName).Key("rule.0.protocol.0.type").HasValue("Https"), + check.That(data.ResourceName).Key("rule.0.protocol.1.port").HasValue("9001"), + check.That(data.ResourceName).Key("rule.0.protocol.1.type").HasValue("Http"), + ), + }, + { + Config: r.multipleProtocols(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.protocol.#").HasValue("2"), + check.That(data.ResourceName).Key("rule.0.protocol.0.port").HasValue("8000"), + check.That(data.ResourceName).Key("rule.0.protocol.0.type").HasValue("Http"), + check.That(data.ResourceName).Key("rule.0.protocol.1.port").HasValue("8001"), + check.That(data.ResourceName).Key("rule.0.protocol.1.type").HasValue("Https"), + ), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_updateFirewallTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.name").HasValue("rule1"), + ), + }, + { + Config: r.updateFirewallTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestarc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.0.name").HasValue("rule1"), + ), + }, + }) +} + +func TestAccFirewallApplicationRuleCollection_ipGroups(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.ipGroups(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallApplicationRuleCollection_noSource(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") + r := FirewallApplicationRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.noSource(data), + ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "source_addresses", "source_ip_groups")), + }, + }) +} + +func (FirewallApplicationRuleCollectionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["applicationRuleCollections"] + + resp, err := clients.Firewall.AzureFirewallsClient.Get(ctx, id.ResourceGroup, firewallName) + if err != nil { + return nil, fmt.Errorf("retrieving Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if resp.AzureFirewallPropertiesFormat == nil || resp.AzureFirewallPropertiesFormat.ApplicationRuleCollections == nil { + return nil, fmt.Errorf("retrieving Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): properties or collections was nil", name, firewallName, id.ResourceGroup) + } + + for _, rule := range *resp.AzureFirewallPropertiesFormat.ApplicationRuleCollections { + if rule.Name == nil { + continue + } + + if *rule.Name == name { + return utils.Bool(true), nil + } + } + return utils.Bool(false), nil +} + +func (t FirewallApplicationRuleCollectionResource) doesNotExist(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["applicationRuleCollections"] + + exists, err := t.Exists(ctx, clients, state) + if err != nil { + return err + } + + if *exists { + return fmt.Errorf("Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): still exists", name, firewallName, id.ResourceGroup) + } + + return nil +} + +func (t FirewallApplicationRuleCollectionResource) disappears(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + client := clients.Firewall.AzureFirewallsClient + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["applicationRuleCollections"] + + resp, err := client.Get(ctx, id.ResourceGroup, firewallName) + if err != nil { + return fmt.Errorf("retrieving Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if resp.AzureFirewallPropertiesFormat == nil || resp.AzureFirewallPropertiesFormat.NatRuleCollections == nil { + return fmt.Errorf("retrieving Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): properties or collections was nil", name, firewallName, id.ResourceGroup) + } + + rules := make([]network.AzureFirewallApplicationRuleCollection, 0) + for _, collection := range *resp.AzureFirewallPropertiesFormat.ApplicationRuleCollections { + if *collection.Name != name { + rules = append(rules, collection) + } + } + + resp.AzureFirewallPropertiesFormat.ApplicationRuleCollections = &rules + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, firewallName, resp) + if err != nil { + return fmt.Errorf("removing Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for the removal of Firewall Application Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + return FirewallApplicationRuleCollectionResource{}.doesNotExist(ctx, clients, state) +} + +func (FirewallApplicationRuleCollectionResource) basic(data acceptance.TestData) string { + template := FirewallResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} +`, template) +} + +func (FirewallApplicationRuleCollectionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "import" { + name = azurerm_firewall_application_rule_collection.test.name + azure_firewall_name = azurerm_firewall_application_rule_collection.test.azure_firewall_name + resource_group_name = azurerm_firewall_application_rule_collection.test.resource_group_name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) updatedName(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule2" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) multiple(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} + +resource "azurerm_firewall_application_rule_collection" "test_add" { + name = "acctestarc_add" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 200 + action = "Deny" + + rule { + name = "acctestruleadd" + + source_addresses = [ + "192.168.0.1", + ] + + target_fqdns = [ + "*.microsoft.com", + ] + + protocol { + port = 80 + type = "Http" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) multipleUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 300 + action = "Deny" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} + +resource "azurerm_firewall_application_rule_collection" "test_add" { + name = "acctestarc_add" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 400 + action = "Allow" + + rule { + name = "acctestruleadd" + + source_addresses = [ + "192.168.0.1", + ] + + target_fqdns = [ + "*.microsoft.com", + ] + + protocol { + port = 80 + type = "Http" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) multipleRules(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } + + rule { + name = "acctestruleadd" + + source_addresses = [ + "192.168.0.1", + ] + + target_fqdns = [ + "*.microsoft.com", + ] + + protocol { + port = 80 + type = "Http" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) multipleProtocols(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 8000 + type = "Http" + } + + protocol { + port = 8001 + type = "Https" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) multipleProtocolsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 9000 + type = "Https" + } + + protocol { + port = 9001 + type = "Http" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) updateFirewallTags(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} +`, FirewallResource{}.withTags(data)) +} + +func (FirewallApplicationRuleCollectionResource) ipGroups(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_ip_group" "test" { + name = "acctestIpGroupForFirewallAppRules" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["192.168.0.0/25", "192.168.0.192/26"] +} + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_ip_groups = [ + azurerm_ip_group.test.id, + ] + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallApplicationRuleCollectionResource) noSource(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_application_rule_collection" "test" { + name = "acctestarc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + target_fqdns = [ + "*.google.com", + ] + + protocol { + port = 443 + type = "Https" + } + } +} +`, FirewallResource{}.basic(data)) +} diff --git a/azurerm/internal/services/firewall/firewall_data_source.go b/azurerm/internal/services/firewall/firewall_data_source.go new file mode 100644 index 000000000000..b8ba01fc200a --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_data_source.go @@ -0,0 +1,209 @@ +package firewall + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func FirewallDataSource() *schema.Resource { + return &schema.Resource{ + Read: FirewallDataSourceRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.FirewallName, + }, + + "location": azure.SchemaLocationForDataSource(), + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "sku_name": { + Type: schema.TypeString, + Computed: true, + }, + + "sku_tier": { + Type: schema.TypeString, + Computed: true, + }, + + "firewall_policy_id": { + Type: schema.TypeString, + Computed: true, + }, + + "ip_configuration": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "subnet_id": { + Type: schema.TypeString, + Computed: true, + }, + "public_ip_address_id": { + Type: schema.TypeString, + Computed: true, + }, + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "management_ip_configuration": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "subnet_id": { + Type: schema.TypeString, + Computed: true, + }, + "public_ip_address_id": { + Type: schema.TypeString, + Computed: true, + }, + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "threat_intel_mode": { + Type: schema.TypeString, + Computed: true, + }, + + "dns_servers": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "virtual_hub": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "virtual_hub_id": { + Type: schema.TypeString, + Computed: true, + }, + "public_ip_count": { + Type: schema.TypeInt, + Computed: true, + }, + "public_ip_addresses": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "zones": azure.SchemaZonesComputed(), + + "tags": tags.SchemaDataSource(), + }, + } +} + +func FirewallDataSourceRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(read.Response) { + return fmt.Errorf("Firewall %q was not found in Resource Group %q", name, resourceGroup) + } + + return fmt.Errorf("Error making Read request on Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(*read.ID) + d.Set("name", read.Name) + d.Set("resource_group_name", resourceGroup) + + if location := read.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := read.AzureFirewallPropertiesFormat; props != nil { + if err := d.Set("ip_configuration", flattenFirewallIPConfigurations(props.IPConfigurations)); err != nil { + return fmt.Errorf("Error setting `ip_configuration`: %+v", err) + } + managementIPConfigs := make([]interface{}, 0) + if props.ManagementIPConfiguration != nil { + managementIPConfigs = flattenFirewallIPConfigurations(&[]network.AzureFirewallIPConfiguration{ + *props.ManagementIPConfiguration, + }) + } + if err := d.Set("management_ip_configuration", managementIPConfigs); err != nil { + return fmt.Errorf("Error setting `management_ip_configuration`: %+v", err) + } + + d.Set("threat_intel_mode", string(props.ThreatIntelMode)) + + if err := d.Set("dns_servers", flattenFirewallDNSServers(props.AdditionalProperties)); err != nil { + return fmt.Errorf("Error setting `dns_servers`: %+v", err) + } + + if policy := props.FirewallPolicy; policy != nil { + d.Set("firewall_policy_id", policy.ID) + } + + if sku := props.Sku; sku != nil { + d.Set("sku_name", string(sku.Name)) + d.Set("sku_tier", string(sku.Tier)) + } + + if err := d.Set("virtual_hub", flattenFirewallVirtualHubSetting(props)); err != nil { + return fmt.Errorf("Error setting `virtual_hub`: %+v", err) + } + } + + if err := d.Set("zones", azure.FlattenZones(read.Zones)); err != nil { + return fmt.Errorf("Error setting `zones`: %+v", err) + } + + return tags.FlattenAndSet(d, read.Tags) +} diff --git a/azurerm/internal/services/firewall/firewall_data_source_test.go b/azurerm/internal/services/firewall/firewall_data_source_test.go new file mode 100644 index 000000000000..a7688f7e4806 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_data_source_test.go @@ -0,0 +1,187 @@ +package firewall_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type FirewallDataSource struct { +} + +func TestAccFirewallDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_firewall", "test") + r := FirewallDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("ip_configuration.0.name").HasValue("configuration"), + check.That(data.ResourceName).Key("ip_configuration.0.private_ip_address").Exists(), + ), + }, + }) +} + +func TestAccFirewallDataSource_enableDNS(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_firewall", "test") + r := FirewallDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.enableDNS(data, "1.1.1.1", "8.8.8.8"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("dns_servers.#").HasValue("2"), + check.That(data.ResourceName).Key("dns_servers.0").HasValue("1.1.1.1"), + check.That(data.ResourceName).Key("dns_servers.1").HasValue("8.8.8.8"), + ), + }, + }) +} + +func TestAccFirewallDataSource_withManagementIp(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_firewall", "test") + r := FirewallDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.withManagementIp(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("management_ip_configuration.0.name").HasValue("management_configuration"), + check.That(data.ResourceName).Key("management_ip_configuration.0.private_ip_address").Exists(), + check.That(data.ResourceName).Key("management_ip_configuration.0.subnet_id").Exists(), + check.That(data.ResourceName).Key("management_ip_configuration.0.public_ip_address_id").Exists(), + ), + }, + }) +} + +func TestAccFirewallDataSource_withFirewallPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_firewall", "test") + r := FirewallDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.withFirewallPolicy(data, "policy1"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("firewall_policy_id").Exists(), + ), + }, + }) +} + +func TestAccFirewallDataSource_inVirtualhub(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_firewall", "test") + r := FirewallDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.inVirtualHub(data, 2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("virtual_hub.0.virtual_hub_id").Exists(), + check.That(data.ResourceName).Key("virtual_hub.0.public_ip_count").HasValue("2"), + check.That(data.ResourceName).Key("virtual_hub.0.public_ip_addresses.#").HasValue("2"), + ), + }, + }) +} + +func (FirewallDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } +} + +data "azurerm_firewall" "test" { + name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallDataSource) enableDNS(data acceptance.TestData, dnsServers ...string) string { + return fmt.Sprintf(` +%s + +data "azurerm_firewall" "test" { + name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, FirewallResource{}.enableDNS(data, dnsServers...)) +} + +func (FirewallDataSource) withManagementIp(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_firewall" "test" { + name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, FirewallResource{}.withManagementIp(data)) +} + +func (FirewallDataSource) withFirewallPolicy(data acceptance.TestData, policyName string) string { + return fmt.Sprintf(` +%s + +data "azurerm_firewall" "test" { + name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, FirewallResource{}.withFirewallPolicy(data, policyName)) +} + +func (FirewallDataSource) inVirtualHub(data acceptance.TestData, pipCount int) string { + return fmt.Sprintf(` +%s + +data "azurerm_firewall" "test" { + name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, FirewallResource{}.inVirtualHub(data, pipCount)) +} diff --git a/azurerm/internal/services/network/firewall_nat_rule_collection_resource.go b/azurerm/internal/services/firewall/firewall_nat_rule_collection_resource.go similarity index 92% rename from azurerm/internal/services/network/firewall_nat_rule_collection_resource.go rename to azurerm/internal/services/firewall/firewall_nat_rule_collection_resource.go index 0c2d929d1692..983874568984 100644 --- a/azurerm/internal/services/network/firewall_nat_rule_collection_resource.go +++ b/azurerm/internal/services/firewall/firewall_nat_rule_collection_resource.go @@ -1,4 +1,4 @@ -package network +package firewall import ( "fmt" @@ -8,22 +8,22 @@ import ( "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFirewallNatRuleCollection() *schema.Resource { +func resourceFirewallNatRuleCollection() *schema.Resource { return &schema.Resource{ - Create: resourceArmFirewallNatRuleCollectionCreateUpdate, - Read: resourceArmFirewallNatRuleCollectionRead, - Update: resourceArmFirewallNatRuleCollectionCreateUpdate, - Delete: resourceArmFirewallNatRuleCollectionDelete, + Create: resourceFirewallNatRuleCollectionCreateUpdate, + Read: resourceFirewallNatRuleCollectionRead, + Update: resourceFirewallNatRuleCollectionCreateUpdate, + Delete: resourceFirewallNatRuleCollectionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -40,14 +40,14 @@ func resourceArmFirewallNatRuleCollection() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, + ValidateFunc: validate.FirewallName, }, "azure_firewall_name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, + ValidateFunc: validate.FirewallName, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -135,8 +135,8 @@ func resourceArmFirewallNatRuleCollection() *schema.Resource { } } -func resourceArmFirewallNatRuleCollectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallNatRuleCollectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -162,7 +162,7 @@ func resourceArmFirewallNatRuleCollectionCreateUpdate(d *schema.ResourceData, me } ruleCollections := *props.NatRuleCollections - natRules, err := expandArmFirewallNatRules(d.Get("rule").(*schema.Set)) + natRules, err := expandFirewallNatRules(d.Get("rule").(*schema.Set)) if err != nil { return fmt.Errorf("expanding Firewall NAT Rules: %+v", err) } @@ -244,11 +244,11 @@ func resourceArmFirewallNatRuleCollectionCreateUpdate(d *schema.ResourceData, me } d.SetId(collectionID) - return resourceArmFirewallNatRuleCollectionRead(d, meta) + return resourceFirewallNatRuleCollectionRead(d, meta) } -func resourceArmFirewallNatRuleCollectionRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallNatRuleCollectionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -320,8 +320,8 @@ func resourceArmFirewallNatRuleCollectionRead(d *schema.ResourceData, meta inter return nil } -func resourceArmFirewallNatRuleCollectionDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallNatRuleCollectionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -379,7 +379,7 @@ func resourceArmFirewallNatRuleCollectionDelete(d *schema.ResourceData, meta int return nil } -func expandArmFirewallNatRules(input *schema.Set) (*[]network.AzureFirewallNatRule, error) { +func expandFirewallNatRules(input *schema.Set) (*[]network.AzureFirewallNatRule, error) { nwRules := input.List() rules := make([]network.AzureFirewallNatRule, 0) diff --git a/azurerm/internal/services/firewall/firewall_nat_rule_collection_resource_test.go b/azurerm/internal/services/firewall/firewall_nat_rule_collection_resource_test.go new file mode 100644 index 000000000000..f4cf28ee8b24 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_nat_rule_collection_resource_test.go @@ -0,0 +1,738 @@ +package firewall_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FirewallNatRuleCollectionResource struct { +} + +func TestAccFirewallNatRuleCollection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_firewall_nat_rule_collection"), + }, + }) +} + +func TestAccFirewallNatRuleCollection_updatedName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updatedName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_multipleRuleCollections(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + data2 := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test_add") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + data2.CheckWithClient(r.doesNotExist), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + secondResourceName := "azurerm_firewall_nat_rule_collection.test_add" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(secondResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.multipleUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(secondResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + data.CheckWithClient(r.disappears), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccFirewallNatRuleCollection_multipleRules(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.multipleRules(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_updateFirewallTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updateFirewallTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_ipGroup(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.ipGroup(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNatRuleCollection_noSource(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") + r := FirewallNatRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.noSource(data), + ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "source_addresses", "source_ip_groups")), + }, + }) +} + +func (FirewallNatRuleCollectionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["natRuleCollections"] + + resp, err := clients.Firewall.AzureFirewallsClient.Get(ctx, id.ResourceGroup, firewallName) + if err != nil { + return nil, fmt.Errorf("retrieving Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if resp.AzureFirewallPropertiesFormat == nil || resp.AzureFirewallPropertiesFormat.NatRuleCollections == nil { + return nil, fmt.Errorf("retrieving Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): properties or collections was nil", name, firewallName, id.ResourceGroup) + } + + for _, rule := range *resp.AzureFirewallPropertiesFormat.NatRuleCollections { + if rule.Name == nil { + continue + } + + if *rule.Name == name { + return utils.Bool(true), nil + } + } + return utils.Bool(false), nil +} + +func (t FirewallNatRuleCollectionResource) doesNotExist(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["natRuleCollections"] + + exists, err := t.Exists(ctx, clients, state) + if err != nil { + return err + } + + if *exists { + return fmt.Errorf("Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): still exists", name, firewallName, id.ResourceGroup) + } + + return nil +} + +func (t FirewallNatRuleCollectionResource) disappears(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + client := clients.Firewall.AzureFirewallsClient + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["natRuleCollections"] + + resp, err := client.Get(ctx, id.ResourceGroup, firewallName) + if err != nil { + return fmt.Errorf("retrieving Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if resp.AzureFirewallPropertiesFormat == nil || resp.AzureFirewallPropertiesFormat.NatRuleCollections == nil { + return fmt.Errorf("retrieving Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): properties or collections was nil", name, firewallName, id.ResourceGroup) + } + + rules := make([]network.AzureFirewallNatRuleCollection, 0) + for _, collection := range *resp.AzureFirewallPropertiesFormat.NatRuleCollections { + if *collection.Name != name { + rules = append(rules, collection) + } + } + + resp.AzureFirewallPropertiesFormat.NatRuleCollections = &rules + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, firewallName, resp) + if err != nil { + return fmt.Errorf("removing Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for the removal of Firewall Nat Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + return FirewallNatRuleCollectionResource{}.doesNotExist(ctx, clients, state) +} + +func (FirewallNatRuleCollectionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "Any", + ] + + translated_port = 53 + translated_address = "8.8.8.8" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger) +} + +func (r FirewallNatRuleCollectionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "import" { + name = azurerm_firewall_nat_rule_collection.test.name + azure_firewall_name = azurerm_firewall_nat_rule_collection.test.azure_firewall_name + resource_group_name = azurerm_firewall_nat_rule_collection.test.resource_group_name + priority = 100 + action = "Dnat" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "Any", + ] + + translated_port = 53 + translated_address = "8.8.8.8" + } +} +`, r.basic(data)) +} + +func (FirewallNatRuleCollectionResource) updatedName(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "rule2" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 53 + translated_address = "8.8.8.8" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger) +} + +func (FirewallNatRuleCollectionResource) multiple(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "acctestrule" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 53 + translated_address = "8.8.8.8" + } +} + +resource "azurerm_firewall_nat_rule_collection" "test_add" { + name = "acctestnrc_add-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 200 + action = "Dnat" + + rule { + name = "acctestruleadd" + + source_addresses = [ + "10.0.0.0/8", + ] + + destination_ports = [ + "8080", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 8080 + translated_address = "8.8.4.4" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger, data.RandomInteger) +} + +func (FirewallNatRuleCollectionResource) multipleUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 300 + action = "Dnat" + + rule { + name = "acctestrule" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 53 + translated_address = "10.0.0.1" + } +} + +resource "azurerm_firewall_nat_rule_collection" "test_add" { + name = "acctestnrc_add-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 400 + action = "Dnat" + + rule { + name = "acctestruleadd" + + source_addresses = [ + "10.0.0.0/8", + ] + + destination_ports = [ + "8080", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 8080 + translated_address = "10.0.0.1" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger, data.RandomInteger) +} + +func (FirewallNatRuleCollectionResource) multipleRules(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "acctestrule" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 53 + translated_address = "10.0.0.1" + } + + rule { + name = "acctestrule_add" + + source_addresses = [ + "192.168.0.1", + ] + + destination_ports = [ + "8888", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 8888 + translated_address = "192.168.0.1" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger) +} + +func (FirewallNatRuleCollectionResource) updateFirewallTags(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "TCP", + ] + + translated_port = 53 + translated_address = "10.0.0.1" + } +} +`, FirewallResource{}.withTags(data), data.RandomInteger) +} + +func (FirewallNatRuleCollectionResource) ipGroup(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_ip_group" "test" { + name = "acctestIpGroupForFirewallNatRules" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["192.168.0.0/25", "192.168.0.192/26"] +} + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "rule1" + + source_ip_groups = [ + azurerm_ip_group.test.id, + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "Any", + ] + + translated_port = 53 + translated_address = "8.8.8.8" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger) +} + +func (FirewallNatRuleCollectionResource) noSource(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_nat_rule_collection" "test" { + name = "acctestnrc-%d" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Dnat" + + rule { + name = "rule1" + + destination_ports = [ + "53", + ] + + destination_addresses = [ + azurerm_public_ip.test.ip_address, + ] + + protocols = [ + "Any", + ] + + translated_port = 53 + translated_address = "8.8.8.8" + } +} +`, FirewallResource{}.basic(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/network/firewall_network_rule_collection_resource.go b/azurerm/internal/services/firewall/firewall_network_rule_collection_resource.go similarity index 82% rename from azurerm/internal/services/network/firewall_network_rule_collection_resource.go rename to azurerm/internal/services/firewall/firewall_network_rule_collection_resource.go index 74e8d22cb6cd..1bdd253970a9 100644 --- a/azurerm/internal/services/network/firewall_network_rule_collection_resource.go +++ b/azurerm/internal/services/firewall/firewall_network_rule_collection_resource.go @@ -1,4 +1,4 @@ -package network +package firewall import ( "fmt" @@ -8,22 +8,22 @@ import ( "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFirewallNetworkRuleCollection() *schema.Resource { +func resourceFirewallNetworkRuleCollection() *schema.Resource { return &schema.Resource{ - Create: resourceArmFirewallNetworkRuleCollectionCreateUpdate, - Read: resourceArmFirewallNetworkRuleCollectionRead, - Update: resourceArmFirewallNetworkRuleCollectionCreateUpdate, - Delete: resourceArmFirewallNetworkRuleCollectionDelete, + Create: resourceFirewallNetworkRuleCollectionCreateUpdate, + Read: resourceFirewallNetworkRuleCollectionRead, + Update: resourceFirewallNetworkRuleCollectionCreateUpdate, + Delete: resourceFirewallNetworkRuleCollectionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -40,14 +40,14 @@ func resourceArmFirewallNetworkRuleCollection() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, + ValidateFunc: validate.FirewallName, }, "azure_firewall_name": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, + ValidateFunc: validate.FirewallName, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -112,6 +112,12 @@ func resourceArmFirewallNetworkRuleCollection() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, Set: schema.HashString, }, + "destination_fqdns": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Set: schema.HashString, + }, "protocols": { Type: schema.TypeSet, Required: true, @@ -133,8 +139,8 @@ func resourceArmFirewallNetworkRuleCollection() *schema.Resource { } } -func resourceArmFirewallNetworkRuleCollectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallNetworkRuleCollectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -160,7 +166,7 @@ func resourceArmFirewallNetworkRuleCollectionCreateUpdate(d *schema.ResourceData } ruleCollections := *props.NetworkRuleCollections - networkRules, err := expandArmFirewallNetworkRules(d.Get("rule").(*schema.Set)) + networkRules, err := expandFirewallNetworkRules(d.Get("rule").(*schema.Set)) if err != nil { return fmt.Errorf("expanding Firewall Network Rules: %+v", err) } @@ -243,11 +249,11 @@ func resourceArmFirewallNetworkRuleCollectionCreateUpdate(d *schema.ResourceData } d.SetId(collectionID) - return resourceArmFirewallNetworkRuleCollectionRead(d, meta) + return resourceFirewallNetworkRuleCollectionRead(d, meta) } -func resourceArmFirewallNetworkRuleCollectionRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallNetworkRuleCollectionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -319,8 +325,8 @@ func resourceArmFirewallNetworkRuleCollectionRead(d *schema.ResourceData, meta i return nil } -func resourceArmFirewallNetworkRuleCollectionDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient +func resourceFirewallNetworkRuleCollectionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -378,7 +384,7 @@ func resourceArmFirewallNetworkRuleCollectionDelete(d *schema.ResourceData, meta return nil } -func expandArmFirewallNetworkRules(input *schema.Set) (*[]network.AzureFirewallNetworkRule, error) { +func expandFirewallNetworkRules(input *schema.Set) (*[]network.AzureFirewallNetworkRule, error) { nwRules := input.List() rules := make([]network.AzureFirewallNetworkRule, 0) @@ -412,8 +418,13 @@ func expandArmFirewallNetworkRules(input *schema.Set) (*[]network.AzureFirewallN destinationIpGroups = append(destinationIpGroups, v.(string)) } - if len(destinationAddresses) == 0 && len(destinationIpGroups) == 0 { - return nil, fmt.Errorf("at least one of %q and %q must be specified for each rule", "destination_addresses", "destination_ip_groups") + destinationFqdns := make([]string, 0) + for _, v := range rule["destination_fqdns"].(*schema.Set).List() { + destinationFqdns = append(destinationFqdns, v.(string)) + } + + if len(destinationAddresses) == 0 && len(destinationIpGroups) == 0 && len(destinationFqdns) == 0 { + return nil, fmt.Errorf("at least one of %q, %q and %q must be specified for each rule", "destination_addresses", "destination_ip_groups", "destination_fqdns") } destinationPorts := make([]string, 0) @@ -429,6 +440,7 @@ func expandArmFirewallNetworkRules(input *schema.Set) (*[]network.AzureFirewallN DestinationAddresses: &destinationAddresses, DestinationIPGroups: &destinationIpGroups, DestinationPorts: &destinationPorts, + DestinationFqdns: &destinationFqdns, } nrProtocols := make([]network.AzureFirewallNetworkRuleProtocol, 0) @@ -451,27 +463,40 @@ func flattenFirewallNetworkRuleCollectionRules(rules *[]network.AzureFirewallNet } for _, rule := range *rules { - output := make(map[string]interface{}) + var ( + name string + description string + sourceAddresses *schema.Set + sourceIPGroups *schema.Set + destAddresses *schema.Set + destIPGroups *schema.Set + destPorts *schema.Set + destFqdns *schema.Set + ) + if rule.Name != nil { - output["name"] = *rule.Name + name = *rule.Name } if rule.Description != nil { - output["description"] = *rule.Description + description = *rule.Description } if rule.SourceAddresses != nil { - output["source_addresses"] = set.FromStringSlice(*rule.SourceAddresses) + sourceAddresses = set.FromStringSlice(*rule.SourceAddresses) } if rule.SourceIPGroups != nil { - output["source_ip_groups"] = set.FromStringSlice(*rule.SourceIPGroups) + sourceIPGroups = set.FromStringSlice(*rule.SourceIPGroups) } if rule.DestinationAddresses != nil { - output["destination_addresses"] = set.FromStringSlice(*rule.DestinationAddresses) + destAddresses = set.FromStringSlice(*rule.DestinationAddresses) } if rule.DestinationIPGroups != nil { - output["destination_ip_groups"] = set.FromStringSlice(*rule.DestinationIPGroups) + destIPGroups = set.FromStringSlice(*rule.DestinationIPGroups) } if rule.DestinationPorts != nil { - output["destination_ports"] = set.FromStringSlice(*rule.DestinationPorts) + destPorts = set.FromStringSlice(*rule.DestinationPorts) + } + if rule.DestinationFqdns != nil { + destFqdns = set.FromStringSlice(*rule.DestinationFqdns) } protocols := make([]string, 0) if rule.Protocols != nil { @@ -479,8 +504,17 @@ func flattenFirewallNetworkRuleCollectionRules(rules *[]network.AzureFirewallNet protocols = append(protocols, string(protocol)) } } - output["protocols"] = set.FromStringSlice(protocols) - outputs = append(outputs, output) + outputs = append(outputs, map[string]interface{}{ + "name": name, + "description": description, + "source_addresses": sourceAddresses, + "source_ip_groups": sourceIPGroups, + "destination_addresses": destAddresses, + "destination_ip_groups": destIPGroups, + "destination_ports": destPorts, + "destination_fqdns": destFqdns, + "protocols": set.FromStringSlice(protocols), + }) } return outputs } diff --git a/azurerm/internal/services/firewall/firewall_network_rule_collection_resource_test.go b/azurerm/internal/services/firewall/firewall_network_rule_collection_resource_test.go new file mode 100644 index 000000000000..d5ae0ca66101 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_network_rule_collection_resource_test.go @@ -0,0 +1,923 @@ +package firewall_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FirewallNetworkRuleCollectionResource struct { +} + +func TestAccFirewallNetworkRuleCollection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNetworkRuleCollection_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_firewall_network_rule_collection"), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_updatedName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.3765122797.name").HasValue("rule1"), + ), + }, + { + Config: r.updatedName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(data.ResourceName).Key("rule.1700340761.name").HasValue("rule2"), + ), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_multipleRuleCollections(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + secondRule := "azurerm_firewall_network_rule_collection.test_add" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(secondRule).ExistsInAzure(r), + resource.TestCheckResourceAttr(secondRule, "name", "acctestnrc_add"), + resource.TestCheckResourceAttr(secondRule, "priority", "200"), + resource.TestCheckResourceAttr(secondRule, "action", "Deny"), + resource.TestCheckResourceAttr(secondRule, "rule.#", "1"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + testCheckFirewallNetworkRuleCollectionDoesNotExist("azurerm_firewall.test", "acctestnrc_add"), + ), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + secondResourceName := "azurerm_firewall_network_rule_collection.test_add" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(secondResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(secondResourceName, "name", "acctestnrc_add"), + resource.TestCheckResourceAttr(secondResourceName, "priority", "200"), + resource.TestCheckResourceAttr(secondResourceName, "action", "Deny"), + resource.TestCheckResourceAttr(secondResourceName, "rule.#", "1"), + ), + }, + { + Config: r.multipleUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("300"), + check.That(data.ResourceName).Key("action").HasValue("Deny"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + check.That(secondResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(secondResourceName, "name", "acctestnrc_add"), + resource.TestCheckResourceAttr(secondResourceName, "priority", "400"), + resource.TestCheckResourceAttr(secondResourceName, "action", "Allow"), + resource.TestCheckResourceAttr(secondResourceName, "rule.#", "1"), + ), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + testCheckFirewallNetworkRuleCollectionDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_multipleRules(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + { + Config: r.multipleRules(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("2"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_updateFirewallTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + { + Config: r.updateFirewallTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_serviceTag(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.serviceTag(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctestnrc"), + check.That(data.ResourceName).Key("priority").HasValue("100"), + check.That(data.ResourceName).Key("action").HasValue("Allow"), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNetworkRuleCollection_ipGroup(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.ipGroup(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("rule.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNetworkRuleCollection_fqdns(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fqdns(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallNetworkRuleCollection_noSource(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.noSource(data), + ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "source_addresses", "source_ip_groups")), + }, + }) +} + +func TestAccFirewallNetworkRuleCollection_noDestination(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") + r := FirewallNetworkRuleCollectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.noDestination(data), + ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q, %q and %q must be specified", "destination_addresses", "destination_ip_groups", "destination_fqdns")), + }, + }) +} + +func (FirewallNetworkRuleCollectionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + firewallName := id.Path["azureFirewalls"] + name := id.Path["networkRuleCollections"] + + resp, err := clients.Firewall.AzureFirewallsClient.Get(ctx, id.ResourceGroup, firewallName) + if err != nil { + return nil, fmt.Errorf("retrieving Firewall Network Rule Collection %q (Firewall %q / Resource Group %q): %v", name, firewallName, id.ResourceGroup, err) + } + + if resp.AzureFirewallPropertiesFormat == nil || resp.AzureFirewallPropertiesFormat.NetworkRuleCollections == nil { + return nil, fmt.Errorf("retrieving Firewall Network Rule Collection %q (Firewall %q / Resource Group %q): properties or collections was nil", name, firewallName, id.ResourceGroup) + } + + for _, rule := range *resp.AzureFirewallPropertiesFormat.NetworkRuleCollections { + if rule.Name == nil { + continue + } + + if *rule.Name == name { + return utils.Bool(true), nil + } + } + return utils.Bool(false), nil +} + +func testCheckFirewallNetworkRuleCollectionDoesNotExist(resourceName string, collectionName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Firewall.AzureFirewallsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + firewallName := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + read, err := client.Get(ctx, resourceGroup, firewallName) + if err != nil { + return err + } + + for _, collection := range *read.AzureFirewallPropertiesFormat.NetworkRuleCollections { + if *collection.Name == collectionName { + return fmt.Errorf("Network Rule Collection %q exists in Firewall %q: %+v", collectionName, firewallName, collection) + } + } + + return nil + } +} + +func testCheckFirewallNetworkRuleCollectionDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Firewall.AzureFirewallsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + firewallName := rs.Primary.Attributes["azure_firewall_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + read, err := client.Get(ctx, resourceGroup, firewallName) + if err != nil { + return err + } + + rules := make([]network.AzureFirewallNetworkRuleCollection, 0) + for _, collection := range *read.AzureFirewallPropertiesFormat.NetworkRuleCollections { + if *collection.Name != name { + rules = append(rules, collection) + } + } + + read.AzureFirewallPropertiesFormat.NetworkRuleCollections = &rules + + future, err := client.CreateOrUpdate(ctx, resourceGroup, firewallName, read) + if err != nil { + return fmt.Errorf("Error removing Network Rule Collection from Firewall: %+v", err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for the removal of Network Rule Collection from Firewall: %+v", err) + } + + _, err = client.Get(ctx, resourceGroup, firewallName) + return err + } +} + +func (FirewallNetworkRuleCollectionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (r FirewallNetworkRuleCollectionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "import" { + name = azurerm_firewall_network_rule_collection.test.name + azure_firewall_name = azurerm_firewall_network_rule_collection.test.azure_firewall_name + resource_group_name = azurerm_firewall_network_rule_collection.test.resource_group_name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} +`, r.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) updatedName(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule2" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) multiple(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "acctestrule" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} + +resource "azurerm_firewall_network_rule_collection" "test_add" { + name = "acctestnrc_add" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 200 + action = "Deny" + + rule { + name = "acctestruleadd" + + source_addresses = [ + "10.0.0.0/8", + ] + + destination_ports = [ + "8080", + ] + + destination_addresses = [ + "8.8.4.4", + ] + + protocols = [ + "TCP", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) multipleUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 300 + action = "Deny" + + rule { + name = "acctestrule" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} + +resource "azurerm_firewall_network_rule_collection" "test_add" { + name = "acctestnrc_add" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 400 + action = "Allow" + + rule { + name = "acctestruleadd" + + source_addresses = [ + "10.0.0.0/8", + ] + + destination_ports = [ + "8080", + ] + + destination_addresses = [ + "8.8.4.4", + ] + + protocols = [ + "TCP", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) multipleRules(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "acctestrule" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } + + rule { + name = "acctestrule_add" + + source_addresses = [ + "192.168.0.1", + ] + + destination_ports = [ + "8888", + ] + + destination_addresses = [ + "1.1.1.1", + ] + + protocols = [ + "TCP", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) updateFirewallTags(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.withTags(data)) +} + +func (r FirewallNetworkRuleCollectionResource) serviceTag(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "ApiManagement", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) ipGroup(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_ip_group" "test_source" { + name = "acctestIpGroupForFirewallNetworkRulesSource" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["1.2.3.4/32", "12.34.56.0/24"] +} + +resource "azurerm_ip_group" "test_destination" { + name = "acctestIpGroupForFirewallNetworkRulesDestination" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["192.168.0.0/25", "192.168.0.192/26"] +} + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_ip_groups = [ + azurerm_ip_group.test_source.id, + ] + + destination_ports = [ + "53", + ] + + destination_ip_groups = [ + azurerm_ip_group.test_destination.id, + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) fqdns(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_fqdns = [ + "time.windows.com" + ] + + destination_ports = [ + "8080", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.enableDNS(data)) +} + +func (r FirewallNetworkRuleCollectionResource) noSource(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + destination_ports = [ + "53", + ] + + destination_addresses = [ + "8.8.8.8", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.basic(data)) +} + +func (FirewallNetworkRuleCollectionResource) noDestination(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_network_rule_collection" "test" { + name = "acctestnrc" + azure_firewall_name = azurerm_firewall.test.name + resource_group_name = azurerm_resource_group.test.name + priority = 100 + action = "Allow" + + rule { + name = "rule1" + + source_addresses = [ + "10.0.0.0/16", + ] + + destination_ports = [ + "53", + ] + + protocols = [ + "Any", + ] + } +} +`, FirewallResource{}.basic(data)) +} diff --git a/azurerm/internal/services/network/firewall_policy_data_source.go b/azurerm/internal/services/firewall/firewall_policy_data_source.go similarity index 93% rename from azurerm/internal/services/network/firewall_policy_data_source.go rename to azurerm/internal/services/firewall/firewall_policy_data_source.go index e91dcde8e258..c7b31c44e120 100644 --- a/azurerm/internal/services/network/firewall_policy_data_source.go +++ b/azurerm/internal/services/firewall/firewall_policy_data_source.go @@ -1,23 +1,22 @@ -package network +package firewall import ( "fmt" "time" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmFirewallPolicy() *schema.Resource { +func FirewallDataSourcePolicy() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmFirewallPolicyRead, + Read: FirewallDataSourcePolicyRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -120,8 +119,8 @@ func dataSourceArmFirewallPolicy() *schema.Resource { } } -func dataSourceArmFirewallPolicyRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.FirewallPolicyClient +func FirewallDataSourcePolicyRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.FirewallPolicyClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/firewall/firewall_policy_data_source_test.go b/azurerm/internal/services/firewall/firewall_policy_data_source_test.go new file mode 100644 index 000000000000..14c5eff2ffd2 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_policy_data_source_test.go @@ -0,0 +1,59 @@ +package firewall_test + +import ( + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type FirewallPolicyDataSource struct { +} + +func TestAccFirewallPolicyDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_firewall_policy", "test") + r := FirewallPolicyDataSource{} + dataParent := acceptance.BuildTestData(t, "data.azurerm_firewall_policy", "test-parent") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("location").HasValue(location.Normalize(data.Locations.Primary)), + check.That(data.ResourceName).Key("base_policy_id").Exists(), + resource.TestCheckResourceAttr(dataParent.ResourceName, "child_policies.#", "1"), + check.That(data.ResourceName).Key("dns.0.proxy_enabled").HasValue("true"), + check.That(data.ResourceName).Key("dns.0.servers.#").HasValue("2"), + check.That(data.ResourceName).Key("threat_intelligence_mode").HasValue(string(network.AzureFirewallThreatIntelModeAlert)), + check.That(data.ResourceName).Key("threat_intelligence_allowlist.0.ip_addresses.#").HasValue("2"), + check.That(data.ResourceName).Key("threat_intelligence_allowlist.0.fqdns.#").HasValue("2"), + ), + }, + }) +} + +func (FirewallPolicyDataSource) basic(data acceptance.TestData) string { + // We deliberately set add a dependency between "data.azurerm_firewall_policy.test-parent" + // and "azurerm_firewall_policy.test" so that we can test "data.azurerm_firewall_policy.test-parent.child_policies" + return fmt.Sprintf(` +%s + +data "azurerm_firewall_policy" "test-parent" { + name = azurerm_firewall_policy.test-parent.name + resource_group_name = azurerm_firewall_policy.test.resource_group_name +} + +data "azurerm_firewall_policy" "test" { + name = azurerm_firewall_policy.test.name + resource_group_name = azurerm_firewall_policy.test.resource_group_name +} +`, FirewallPolicyResource{}.inherit(data)) +} diff --git a/azurerm/internal/services/network/firewall_policy_resource.go b/azurerm/internal/services/firewall/firewall_policy_resource.go similarity index 84% rename from azurerm/internal/services/network/firewall_policy_resource.go rename to azurerm/internal/services/firewall/firewall_policy_resource.go index bc94ff75b01a..d5ba9e3d9078 100644 --- a/azurerm/internal/services/network/firewall_policy_resource.go +++ b/azurerm/internal/services/firewall/firewall_policy_resource.go @@ -1,34 +1,35 @@ -package network +package firewall import ( "fmt" "log" "time" - "github.com/hashicorp/go-azure-helpers/response" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFirewallPolicy() *schema.Resource { +const azureFirewallPolicyResourceName = "azurerm_firewall_policy" + +func resourceFirewallPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmFirewallPolicyCreateUpdate, - Read: resourceArmFirewallPolicyRead, - Update: resourceArmFirewallPolicyCreateUpdate, - Delete: resourceArmFirewallPolicyDelete, + Create: resourceFirewallPolicyCreateUpdate, + Read: resourceFirewallPolicyRead, + Update: resourceFirewallPolicyCreateUpdate, + Delete: resourceFirewallPolicyDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.FirewallPolicyID(id) @@ -80,10 +81,12 @@ func resourceArmFirewallPolicy() *schema.Resource { Optional: true, Default: false, }, + // TODO 3.0 - remove this property "network_rule_fqdn_enabled": { - Type: schema.TypeBool, - Optional: true, - Default: false, + Type: schema.TypeBool, + Optional: true, + Computed: true, + Deprecated: "This property has been deprecated as the service team has removed it from all API versions and is no longer supported by Azure. It will be removed in v3.0 of the provider.", }, }, }, @@ -158,8 +161,8 @@ func resourceArmFirewallPolicy() *schema.Resource { } } -func resourceArmFirewallPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.FirewallPolicyClient +func resourceFirewallPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.FirewallPolicyClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -192,6 +195,9 @@ func resourceArmFirewallPolicyCreateUpdate(d *schema.ResourceData, meta interfac props.FirewallPolicyPropertiesFormat.BasePolicy = &network.SubResource{ID: utils.String(id.(string))} } + locks.ByName(name, azureFirewallPolicyResourceName) + defer locks.UnlockByName(name, azureFirewallPolicyResourceName) + if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, props); err != nil { return fmt.Errorf("creating Firewall Policy %q (Resource Group %q): %+v", name, resourceGroup, err) } @@ -205,11 +211,11 @@ func resourceArmFirewallPolicyCreateUpdate(d *schema.ResourceData, meta interfac } d.SetId(*resp.ID) - return resourceArmFirewallPolicyRead(d, meta) + return resourceFirewallPolicyRead(d, meta) } -func resourceArmFirewallPolicyRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.FirewallPolicyClient +func resourceFirewallPolicyRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.FirewallPolicyClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -266,8 +272,8 @@ func resourceArmFirewallPolicyRead(d *schema.ResourceData, meta interface{}) err return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmFirewallPolicyDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.FirewallPolicyClient +func resourceFirewallPolicyDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.FirewallPolicyClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -276,6 +282,9 @@ func resourceArmFirewallPolicyDelete(d *schema.ResourceData, meta interface{}) e return err } + locks.ByName(id.Name, azureFirewallPolicyResourceName) + defer locks.UnlockByName(id.Name, azureFirewallPolicyResourceName) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { return fmt.Errorf("deleting Firewall Policy %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) @@ -310,9 +319,8 @@ func expandFirewallPolicyDNSSetting(input []interface{}) *network.DNSSettings { raw := input[0].(map[string]interface{}) output := &network.DNSSettings{ - Servers: utils.ExpandStringSlice(raw["servers"].(*schema.Set).List()), - EnableProxy: utils.Bool(raw["proxy_enabled"].(bool)), - RequireProxyForNetworkRules: utils.Bool(raw["network_rule_fqdn_enabled"].(bool)), + Servers: utils.ExpandStringSlice(raw["servers"].(*schema.Set).List()), + EnableProxy: utils.Bool(raw["proxy_enabled"].(bool)), } return output @@ -341,16 +349,12 @@ func flattenFirewallPolicyDNSSetting(input *network.DNSSettings) []interface{} { proxyEnabled = *input.EnableProxy } - networkRulesFqdnEnabled := false - if input.RequireProxyForNetworkRules != nil { - networkRulesFqdnEnabled = *input.RequireProxyForNetworkRules - } - return []interface{}{ map[string]interface{}{ - "servers": utils.FlattenStringSlice(input.Servers), - "proxy_enabled": proxyEnabled, - "network_rule_fqdn_enabled": networkRulesFqdnEnabled, + "servers": utils.FlattenStringSlice(input.Servers), + "proxy_enabled": proxyEnabled, + // TODO 3.0: remove the setting zero value for property below. + "network_rule_fqdn_enabled": false, }, } } diff --git a/azurerm/internal/services/firewall/firewall_policy_resource_test.go b/azurerm/internal/services/firewall/firewall_policy_resource_test.go new file mode 100644 index 000000000000..3f18bf13ac60 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_policy_resource_test.go @@ -0,0 +1,216 @@ +package firewall_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FirewallPolicyResource struct { +} + +func TestAccFirewallPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") + r := FirewallPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallPolicy_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") + r := FirewallPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") + r := FirewallPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") + r := FirewallPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccFirewallPolicy_inherit(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") + r := FirewallPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.inherit(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (FirewallPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + var id, err = parse.FirewallPolicyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Firewall.FirewallPolicyClient.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.FirewallPolicyPropertiesFormat != nil), nil +} + +func (FirewallPolicyResource) basic(data acceptance.TestData) string { + template := FirewallPolicyResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_policy" "test" { + name = "acctest-networkfw-Policy-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, template, data.RandomInteger) +} + +func (FirewallPolicyResource) complete(data acceptance.TestData) string { + template := FirewallPolicyResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_policy" "test" { + name = "acctest-networkfw-Policy-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + threat_intelligence_mode = "Off" + threat_intelligence_allowlist { + ip_addresses = ["1.1.1.1", "2.2.2.2"] + fqdns = ["foo.com", "bar.com"] + } + dns { + servers = ["1.1.1.1", "2.2.2.2"] + proxy_enabled = true + } + tags = { + env = "Test" + } +} +`, template, data.RandomInteger) +} + +func (FirewallPolicyResource) requiresImport(data acceptance.TestData) string { + template := FirewallPolicyResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_policy" "import" { + name = azurerm_firewall_policy.test.name + resource_group_name = azurerm_firewall_policy.test.resource_group_name + location = azurerm_firewall_policy.test.location +} +`, template) +} + +func (FirewallPolicyResource) inherit(data acceptance.TestData) string { + template := FirewallPolicyResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_policy" "test-parent" { + name = "acctest-networkfw-Policy-%d-parent" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_firewall_policy" "test" { + name = "acctest-networkfw-Policy-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + base_policy_id = azurerm_firewall_policy.test-parent.id + threat_intelligence_allowlist { + ip_addresses = ["1.1.1.1", "2.2.2.2"] + fqdns = ["foo.com", "bar.com"] + } + dns { + servers = ["1.1.1.1", "2.2.2.2"] + proxy_enabled = true + } + tags = { + env = "Test" + } +} +`, template, data.RandomInteger, data.RandomInteger) +} + +func (FirewallPolicyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-networkfw-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/firewall/firewall_policy_rule_collection_group_resource.go b/azurerm/internal/services/firewall/firewall_policy_rule_collection_group_resource.go new file mode 100644 index 000000000000..02dee4d0ce70 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_policy_rule_collection_group_resource.go @@ -0,0 +1,852 @@ +package firewall + +import ( + "fmt" + "log" + "strconv" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceFirewallPolicyRuleCollectionGroup() *schema.Resource { + return &schema.Resource{ + Create: resourceFirewallPolicyRuleCollectionGroupCreateUpdate, + Read: resourceFirewallPolicyRuleCollectionGroupRead, + Update: resourceFirewallPolicyRuleCollectionGroupCreateUpdate, + Delete: resourceFirewallPolicyRuleCollectionGroupDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.FirewallPolicyRuleCollectionGroupID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.FirewallPolicyRuleCollectionGroupName(), + }, + + "firewall_policy_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.FirewallPolicyID, + }, + + "priority": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(100, 65000), + }, + + "application_rule_collection": { + Type: schema.TypeSet, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "priority": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(100, 65000), + }, + "action": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.FirewallPolicyFilterRuleCollectionActionTypeAllow), + string(network.FirewallPolicyFilterRuleCollectionActionTypeDeny), + }, false), + }, + "rule": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.FirewallPolicyRuleName(), + }, + "protocols": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.FirewallPolicyRuleApplicationProtocolTypeHTTP), + string(network.FirewallPolicyRuleApplicationProtocolTypeHTTPS), + }, false), + }, + "port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 64000), + }, + }, + }, + }, + "source_addresses": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.Any( + validation.IsIPAddress, + validation.IsCIDR, + validation.StringInSlice([]string{`*`}, false), + ), + }, + }, + "source_ip_groups": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_fqdns": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_fqdn_tags": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + }, + }, + }, + }, + + "network_rule_collection": { + Type: schema.TypeSet, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "priority": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(100, 65000), + }, + "action": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.FirewallPolicyFilterRuleCollectionActionTypeAllow), + string(network.FirewallPolicyFilterRuleCollectionActionTypeDeny), + }, false), + }, + "rule": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.FirewallPolicyRuleName(), + }, + "protocols": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringInSlice([]string{ + string(network.FirewallPolicyRuleNetworkProtocolAny), + string(network.FirewallPolicyRuleNetworkProtocolTCP), + string(network.FirewallPolicyRuleNetworkProtocolUDP), + string(network.FirewallPolicyRuleNetworkProtocolICMP), + }, false), + }, + }, + "source_addresses": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.Any( + validation.IsIPAddress, + validation.IsCIDR, + validation.StringInSlice([]string{`*`}, false), + ), + }, + }, + "source_ip_groups": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_addresses": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + // Can be IP address, CIDR, "*", or service tag + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_ip_groups": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_fqdns": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_ports": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: azValidate.PortOrPortRangeWithin(1, 65535), + }, + }, + }, + }, + }, + }, + }, + }, + + "nat_rule_collection": { + Type: schema.TypeSet, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "priority": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(100, 65000), + }, + "action": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + // Hardcode to using `Dnat` instead of the one defined in Swagger (i.e. network.DNAT) because of: https://github.com/Azure/azure-rest-api-specs/issues/9986 + // Setting `StateFunc: state.IgnoreCase` will cause other issues, as tracked by: https://github.com/hashicorp/terraform-plugin-sdk/issues/485 + // Another solution is to customize the hash function for the containing block, but as there are a couple of properties here, especially + // has property whose type is another nested block (Set), so the implementation is nontrivial and error-prone. + "Dnat", + }, false), + }, + "rule": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.FirewallPolicyRuleName(), + }, + "protocols": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringInSlice([]string{ + string(network.FirewallPolicyRuleNetworkProtocolTCP), + string(network.FirewallPolicyRuleNetworkProtocolUDP), + }, false), + }, + }, + "source_addresses": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.Any( + validation.IsIPAddress, + validation.IsCIDR, + validation.StringInSlice([]string{`*`}, false), + ), + }, + }, + "source_ip_groups": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "destination_address": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.Any( + validation.IsIPAddress, + validation.IsCIDR, + ), + }, + "destination_ports": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: azValidate.PortOrPortRangeWithin(1, 64000), + }, + }, + "translated_address": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.IsIPAddress, + }, + "translated_port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IsPortNumber, + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func resourceFirewallPolicyRuleCollectionGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.FirewallPolicyRuleGroupClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + policyId, err := parse.FirewallPolicyID(d.Get("firewall_policy_id").(string)) + if err != nil { + return err + } + + if d.IsNewResource() { + resp, err := client.Get(ctx, policyId.ResourceGroup, policyId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("checking for existing Firewall Policy Rule Collection Group %q (Resource Group %q / Policy %q): %+v", name, policyId.ResourceGroup, policyId.Name, err) + } + } + + if resp.ID != nil && *resp.ID != "" { + return tf.ImportAsExistsError("azurerm_firewall_policy_rule_collection_group", *resp.ID) + } + } + + locks.ByName(policyId.Name, azureFirewallPolicyResourceName) + defer locks.UnlockByName(policyId.Name, azureFirewallPolicyResourceName) + + param := network.FirewallPolicyRuleCollectionGroup{ + FirewallPolicyRuleCollectionGroupProperties: &network.FirewallPolicyRuleCollectionGroupProperties{ + Priority: utils.Int32(int32(d.Get("priority").(int))), + }, + } + var rulesCollections []network.BasicFirewallPolicyRuleCollection + rulesCollections = append(rulesCollections, expandFirewallPolicyRuleCollectionApplication(d.Get("application_rule_collection").(*schema.Set).List())...) + rulesCollections = append(rulesCollections, expandFirewallPolicyRuleCollectionNetwork(d.Get("network_rule_collection").(*schema.Set).List())...) + rulesCollections = append(rulesCollections, expandFirewallPolicyRuleCollectionNat(d.Get("nat_rule_collection").(*schema.Set).List())...) + param.FirewallPolicyRuleCollectionGroupProperties.RuleCollections = &rulesCollections + + future, err := client.CreateOrUpdate(ctx, policyId.ResourceGroup, policyId.Name, name, param) + if err != nil { + return fmt.Errorf("creating Firewall Policy Rule Collection Group %q (Resource Group %q / Policy: %q): %+v", name, policyId.ResourceGroup, policyId.Name, err) + } + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting Firewall Policy Rule Collection Group %q (Resource Group %q / Policy: %q): %+v", name, policyId.ResourceGroup, policyId.Name, err) + } + + resp, err := client.Get(ctx, policyId.ResourceGroup, policyId.Name, name) + if err != nil { + return fmt.Errorf("retrieving Firewall Policy Rule Collection Group %q (Resource Group %q / Policy: %q): %+v", name, policyId.ResourceGroup, policyId.Name, err) + } + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Firewall Policy Rule Collection Group %q (Resource Group %q / Policy: %q) ID", name, policyId.ResourceGroup, policyId.Name) + } + id, err := parse.FirewallPolicyRuleCollectionGroupID(*resp.ID) + if err != nil { + return err + } + d.SetId(id.ID()) + + return resourceFirewallPolicyRuleCollectionGroupRead(d, meta) +} + +func resourceFirewallPolicyRuleCollectionGroupRead(d *schema.ResourceData, meta interface{}) error { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + client := meta.(*clients.Client).Firewall.FirewallPolicyRuleGroupClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.FirewallPolicyRuleCollectionGroupID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.FirewallPolicyName, id.RuleCollectionGroupName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Firewall Policy Rule Collection Group %q was not found in Resource Group %q - removing from state!", id.RuleCollectionGroupName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Firewall Policy Rule Collection Group %q (Resource Group %q / Policy: %q): %+v", id.RuleCollectionGroupName, id.ResourceGroup, id.FirewallPolicyName, err) + } + + d.Set("name", resp.Name) + d.Set("priority", resp.Priority) + d.Set("firewall_policy_id", parse.NewFirewallPolicyID(subscriptionId, id.ResourceGroup, id.FirewallPolicyName).ID()) + + applicationRuleCollections, networkRuleCollections, natRuleCollections, err := flattenFirewallPolicyRuleCollection(resp.RuleCollections) + if err != nil { + return fmt.Errorf("flattening Firewall Policy Rule Collections: %+v", err) + } + + if err := d.Set("application_rule_collection", applicationRuleCollections); err != nil { + return fmt.Errorf("setting `application_rule_collection`: %+v", err) + } + if err := d.Set("network_rule_collection", networkRuleCollections); err != nil { + return fmt.Errorf("setting `network_rule_collection`: %+v", err) + } + if err := d.Set("nat_rule_collection", natRuleCollections); err != nil { + return fmt.Errorf("setting `nat_rule_collection`: %+v", err) + } + + return nil +} + +func resourceFirewallPolicyRuleCollectionGroupDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.FirewallPolicyRuleGroupClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.FirewallPolicyRuleCollectionGroupID(d.Id()) + if err != nil { + return err + } + + locks.ByName(id.FirewallPolicyName, azureFirewallPolicyResourceName) + defer locks.UnlockByName(id.FirewallPolicyName, azureFirewallPolicyResourceName) + + future, err := client.Delete(ctx, id.ResourceGroup, id.FirewallPolicyName, id.RuleCollectionGroupName) + if err != nil { + return fmt.Errorf("deleting Firewall Policy Rule Collection Group %q (Resource Group %q / Policy: %q): %+v", id.RuleCollectionGroupName, id.ResourceGroup, id.FirewallPolicyName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + if !response.WasNotFound(future.Response()) { + return fmt.Errorf("waiting for deleting %q (Resource Group %q / Policy: %q): %+v", id.RuleCollectionGroupName, id.ResourceGroup, id.FirewallPolicyName, err) + } + } + + return nil +} + +func expandFirewallPolicyRuleCollectionApplication(input []interface{}) []network.BasicFirewallPolicyRuleCollection { + return expandFirewallPolicyFilterRuleCollection(input, expandFirewallPolicyRuleApplication) +} + +func expandFirewallPolicyRuleCollectionNetwork(input []interface{}) []network.BasicFirewallPolicyRuleCollection { + return expandFirewallPolicyFilterRuleCollection(input, expandFirewallPolicyRuleNetwork) +} + +func expandFirewallPolicyRuleCollectionNat(input []interface{}) []network.BasicFirewallPolicyRuleCollection { + result := make([]network.BasicFirewallPolicyRuleCollection, 0) + for _, e := range input { + rule := e.(map[string]interface{}) + output := &network.FirewallPolicyNatRuleCollection{ + RuleCollectionType: network.RuleCollectionTypeFirewallPolicyNatRuleCollection, + Name: utils.String(rule["name"].(string)), + Priority: utils.Int32(int32(rule["priority"].(int))), + Action: &network.FirewallPolicyNatRuleCollectionAction{ + Type: network.FirewallPolicyNatRuleCollectionActionType(rule["action"].(string)), + }, + Rules: expandFirewallPolicyRuleNat(rule["rule"].(*schema.Set).List()), + } + result = append(result, output) + } + return result +} + +func expandFirewallPolicyFilterRuleCollection(input []interface{}, f func(input []interface{}) *[]network.BasicFirewallPolicyRule) []network.BasicFirewallPolicyRuleCollection { + result := make([]network.BasicFirewallPolicyRuleCollection, 0) + for _, e := range input { + rule := e.(map[string]interface{}) + output := &network.FirewallPolicyFilterRuleCollection{ + Action: &network.FirewallPolicyFilterRuleCollectionAction{ + Type: network.FirewallPolicyFilterRuleCollectionActionType(rule["action"].(string)), + }, + Name: utils.String(rule["name"].(string)), + Priority: utils.Int32(int32(rule["priority"].(int))), + RuleCollectionType: network.RuleCollectionTypeFirewallPolicyFilterRuleCollection, + Rules: f(rule["rule"].(*schema.Set).List()), + } + result = append(result, output) + } + return result +} + +func expandFirewallPolicyRuleApplication(input []interface{}) *[]network.BasicFirewallPolicyRule { + result := make([]network.BasicFirewallPolicyRule, 0) + for _, e := range input { + condition := e.(map[string]interface{}) + var protocols []network.FirewallPolicyRuleApplicationProtocol + for _, p := range condition["protocols"].(*schema.Set).List() { + proto := p.(map[string]interface{}) + protocols = append(protocols, network.FirewallPolicyRuleApplicationProtocol{ + ProtocolType: network.FirewallPolicyRuleApplicationProtocolType(proto["type"].(string)), + Port: utils.Int32(int32(proto["port"].(int))), + }) + } + output := &network.ApplicationRule{ + Name: utils.String(condition["name"].(string)), + RuleType: network.RuleTypeApplicationRule, + Protocols: &protocols, + SourceAddresses: utils.ExpandStringSlice(condition["source_addresses"].(*schema.Set).List()), + SourceIPGroups: utils.ExpandStringSlice(condition["source_ip_groups"].(*schema.Set).List()), + TargetFqdns: utils.ExpandStringSlice(condition["destination_fqdns"].(*schema.Set).List()), + FqdnTags: utils.ExpandStringSlice(condition["destination_fqdn_tags"].(*schema.Set).List()), + } + result = append(result, output) + } + return &result +} + +func expandFirewallPolicyRuleNetwork(input []interface{}) *[]network.BasicFirewallPolicyRule { + result := make([]network.BasicFirewallPolicyRule, 0) + for _, e := range input { + condition := e.(map[string]interface{}) + var protocols []network.FirewallPolicyRuleNetworkProtocol + for _, p := range condition["protocols"].(*schema.Set).List() { + protocols = append(protocols, network.FirewallPolicyRuleNetworkProtocol(p.(string))) + } + output := &network.Rule{ + Name: utils.String(condition["name"].(string)), + RuleType: network.RuleTypeNetworkRule, + IPProtocols: &protocols, + SourceAddresses: utils.ExpandStringSlice(condition["source_addresses"].(*schema.Set).List()), + SourceIPGroups: utils.ExpandStringSlice(condition["source_ip_groups"].(*schema.Set).List()), + DestinationAddresses: utils.ExpandStringSlice(condition["destination_addresses"].(*schema.Set).List()), + DestinationIPGroups: utils.ExpandStringSlice(condition["destination_ip_groups"].(*schema.Set).List()), + DestinationFqdns: utils.ExpandStringSlice(condition["destination_fqdns"].(*schema.Set).List()), + DestinationPorts: utils.ExpandStringSlice(condition["destination_ports"].(*schema.Set).List()), + } + result = append(result, output) + } + return &result +} + +func expandFirewallPolicyRuleNat(input []interface{}) *[]network.BasicFirewallPolicyRule { + result := make([]network.BasicFirewallPolicyRule, 0) + for _, e := range input { + condition := e.(map[string]interface{}) + var protocols []network.FirewallPolicyRuleNetworkProtocol + for _, p := range condition["protocols"].(*schema.Set).List() { + protocols = append(protocols, network.FirewallPolicyRuleNetworkProtocol(p.(string))) + } + destinationAddresses := []string{condition["destination_address"].(string)} + output := &network.NatRule{ + Name: utils.String(condition["name"].(string)), + RuleType: network.RuleTypeNatRule, + IPProtocols: &protocols, + SourceAddresses: utils.ExpandStringSlice(condition["source_addresses"].(*schema.Set).List()), + SourceIPGroups: utils.ExpandStringSlice(condition["source_ip_groups"].(*schema.Set).List()), + DestinationAddresses: &destinationAddresses, + DestinationPorts: utils.ExpandStringSlice(condition["destination_ports"].(*schema.Set).List()), + TranslatedAddress: utils.String(condition["translated_address"].(string)), + TranslatedPort: utils.String(strconv.Itoa(condition["translated_port"].(int))), + } + result = append(result, output) + } + return &result +} + +func flattenFirewallPolicyRuleCollection(input *[]network.BasicFirewallPolicyRuleCollection) ([]interface{}, []interface{}, []interface{}, error) { + var ( + applicationRuleCollection = []interface{}{} + networkRuleCollection = []interface{}{} + natRuleCollection = []interface{}{} + ) + if input == nil { + return applicationRuleCollection, networkRuleCollection, natRuleCollection, nil + } + + for _, e := range *input { + var result map[string]interface{} + + switch rule := e.(type) { + case network.FirewallPolicyFilterRuleCollection: + var name string + if rule.Name != nil { + name = *rule.Name + } + var priority int32 + if rule.Priority != nil { + priority = *rule.Priority + } + + var action string + if rule.Action != nil { + action = string(rule.Action.Type) + } + + result = map[string]interface{}{ + "name": name, + "priority": priority, + "action": action, + } + + if rule.Rules == nil || len(*rule.Rules) == 0 { + continue + } + + // Determine the rule type based on the first rule's type + switch (*rule.Rules)[0].(type) { + case network.ApplicationRule: + appRules, err := flattenFirewallPolicyRuleApplication(rule.Rules) + if err != nil { + return nil, nil, nil, err + } + result["rule"] = appRules + + applicationRuleCollection = append(applicationRuleCollection, result) + + case network.Rule: + networkRules, err := flattenFirewallPolicyRuleNetwork(rule.Rules) + if err != nil { + return nil, nil, nil, err + } + result["rule"] = networkRules + + networkRuleCollection = append(networkRuleCollection, result) + + default: + return nil, nil, nil, fmt.Errorf("unknown rule condition type %+v", (*rule.Rules)[0]) + } + case network.FirewallPolicyNatRuleCollection: + var name string + if rule.Name != nil { + name = *rule.Name + } + var priority int32 + if rule.Priority != nil { + priority = *rule.Priority + } + + var action string + if rule.Action != nil { + action = string(rule.Action.Type) + } + + rules, err := flattenFirewallPolicyRuleNat(rule.Rules) + if err != nil { + return nil, nil, nil, err + } + result = map[string]interface{}{ + "name": name, + "priority": priority, + "action": action, + "rule": rules, + } + + natRuleCollection = append(natRuleCollection, result) + + default: + return nil, nil, nil, fmt.Errorf("unknown rule type %+v", rule) + } + } + return applicationRuleCollection, networkRuleCollection, natRuleCollection, nil +} + +func flattenFirewallPolicyRuleApplication(input *[]network.BasicFirewallPolicyRule) ([]interface{}, error) { + if input == nil { + return []interface{}{}, nil + } + output := make([]interface{}, 0) + for _, e := range *input { + rule, ok := e.(network.ApplicationRule) + if !ok { + return nil, fmt.Errorf("unexpected non-application rule: %+v", e) + } + + var name string + if rule.Name != nil { + name = *rule.Name + } + + protocols := make([]interface{}, 0) + if rule.Protocols != nil { + for _, protocol := range *rule.Protocols { + var port int + if protocol.Port != nil { + port = int(*protocol.Port) + } + protocols = append(protocols, map[string]interface{}{ + "type": string(protocol.ProtocolType), + "port": port, + }) + } + } + + output = append(output, map[string]interface{}{ + "name": name, + "protocols": protocols, + "source_addresses": utils.FlattenStringSlice(rule.SourceAddresses), + "source_ip_groups": utils.FlattenStringSlice(rule.SourceIPGroups), + "destination_fqdns": utils.FlattenStringSlice(rule.TargetFqdns), + "destination_fqdn_tags": utils.FlattenStringSlice(rule.FqdnTags), + }) + } + + return output, nil +} + +func flattenFirewallPolicyRuleNetwork(input *[]network.BasicFirewallPolicyRule) ([]interface{}, error) { + if input == nil { + return []interface{}{}, nil + } + output := make([]interface{}, 0) + for _, e := range *input { + rule, ok := e.(network.Rule) + if !ok { + return nil, fmt.Errorf("unexpected non-network rule: %+v", e) + } + + var name string + if rule.Name != nil { + name = *rule.Name + } + + protocols := make([]interface{}, 0) + if rule.IPProtocols != nil { + for _, protocol := range *rule.IPProtocols { + protocols = append(protocols, string(protocol)) + } + } + + output = append(output, map[string]interface{}{ + "name": name, + "protocols": protocols, + "source_addresses": utils.FlattenStringSlice(rule.SourceAddresses), + "source_ip_groups": utils.FlattenStringSlice(rule.SourceIPGroups), + "destination_addresses": utils.FlattenStringSlice(rule.DestinationAddresses), + "destination_ip_groups": utils.FlattenStringSlice(rule.DestinationIPGroups), + "destination_fqdns": utils.FlattenStringSlice(rule.DestinationFqdns), + "destination_ports": utils.FlattenStringSlice(rule.DestinationPorts), + }) + } + return output, nil +} + +func flattenFirewallPolicyRuleNat(input *[]network.BasicFirewallPolicyRule) ([]interface{}, error) { + if input == nil { + return []interface{}{}, nil + } + output := make([]interface{}, 0) + for _, e := range *input { + rule, ok := e.(network.NatRule) + if !ok { + return nil, fmt.Errorf("unexpected non-nat rule: %+v", e) + } + + var name string + if rule.Name != nil { + name = *rule.Name + } + + protocols := make([]interface{}, 0) + if rule.IPProtocols != nil { + for _, protocol := range *rule.IPProtocols { + protocols = append(protocols, string(protocol)) + } + } + destinationAddr := "" + if rule.DestinationAddresses != nil && len(*rule.DestinationAddresses) != 0 { + destinationAddr = (*rule.DestinationAddresses)[0] + } + + translatedPort := 0 + if rule.TranslatedPort != nil { + port, err := strconv.Atoi(*rule.TranslatedPort) + if err != nil { + return nil, fmt.Errorf(`The "translatedPort" property is not a valid integer (%s)`, *rule.TranslatedPort) + } + translatedPort = port + } + + output = append(output, map[string]interface{}{ + "name": name, + "protocols": protocols, + "source_addresses": utils.FlattenStringSlice(rule.SourceAddresses), + "source_ip_groups": utils.FlattenStringSlice(rule.SourceIPGroups), + "destination_address": destinationAddr, + "destination_ports": utils.FlattenStringSlice(rule.DestinationPorts), + "translated_address": rule.TranslatedAddress, + "translated_port": &translatedPort, + }) + } + return output, nil +} diff --git a/azurerm/internal/services/firewall/firewall_policy_rule_collection_group_resource_test.go b/azurerm/internal/services/firewall/firewall_policy_rule_collection_group_resource_test.go new file mode 100644 index 000000000000..ad9e3f1c4702 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_policy_rule_collection_group_resource_test.go @@ -0,0 +1,407 @@ +package firewall_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FirewallPolicyRuleCollectionGroupResource struct { +} + +func TestAccFirewallPolicyRuleCollectionGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy_rule_collection_group", "test") + r := FirewallPolicyRuleCollectionGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallPolicyRuleCollectionGroup_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy_rule_collection_group", "test") + r := FirewallPolicyRuleCollectionGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallPolicyRuleCollectionGroup_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy_rule_collection_group", "test") + r := FirewallPolicyRuleCollectionGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewallPolicyRuleCollectionGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall_policy_rule_collection_group", "test") + r := FirewallPolicyRuleCollectionGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (FirewallPolicyRuleCollectionGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + var id, err = parse.FirewallPolicyRuleCollectionGroupID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Firewall.FirewallPolicyRuleGroupClient.Get(ctx, id.ResourceGroup, id.FirewallPolicyName, id.RuleCollectionGroupName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.FirewallPolicyRuleCollectionGroupProperties != nil), nil +} + +func (FirewallPolicyRuleCollectionGroupResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fwpolicy-RCG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_firewall_policy" "test" { + name = "acctest-fwpolicy-RCG-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_firewall_policy_rule_collection_group" "test" { + name = "acctest-fwpolicy-RCG-%[1]d" + firewall_policy_id = azurerm_firewall_policy.test.id + priority = 500 +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (FirewallPolicyRuleCollectionGroupResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fwpolicy-RCG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_firewall_policy" "test" { + name = "acctest-fwpolicy-RCG-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + dns { + network_rule_fqdn_enabled = false + proxy_enabled = true + } +} + +resource "azurerm_ip_group" "test_source" { + name = "acctestIpGroupForFirewallPolicySource" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["1.2.3.4/32", "12.34.56.0/24"] +} + +resource "azurerm_ip_group" "test_destination" { + name = "acctestIpGroupForFirewallPolicyDest" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["192.168.0.0/25", "192.168.0.192/26"] +} + +resource "azurerm_firewall_policy_rule_collection_group" "test" { + name = "acctest-fwpolicy-RCG-%[1]d" + firewall_policy_id = azurerm_firewall_policy.test.id + priority = 500 + application_rule_collection { + name = "app_rule_collection1" + priority = 500 + action = "Deny" + rule { + name = "app_rule_collection1_rule1" + protocols { + type = "Http" + port = 80 + } + protocols { + type = "Https" + port = 443 + } + source_addresses = ["10.0.0.1"] + destination_fqdns = ["terraform.io"] + } + rule { + name = "app_rule_collection1_rule2" + protocols { + type = "Http" + port = 80 + } + protocols { + type = "Https" + port = 443 + } + source_ip_groups = [azurerm_ip_group.test_source.id] + destination_fqdns = ["terraform.io"] + } + rule { + name = "app_rule_collection1_rule3" + protocols { + type = "Http" + port = 80 + } + protocols { + type = "Https" + port = 443 + } + source_addresses = ["10.0.0.1"] + destination_fqdn_tags = ["WindowsDiagnostics"] + } + } + + network_rule_collection { + name = "network_rule_collection1" + priority = 400 + action = "Deny" + rule { + name = "network_rule_collection1_rule1" + protocols = ["TCP", "UDP"] + source_addresses = ["10.0.0.1"] + destination_addresses = ["192.168.1.1", "ApiManagement"] + destination_ports = ["80", "1000-2000"] + } + rule { + name = "network_rule_collection1_rule2" + protocols = ["TCP", "UDP"] + source_addresses = ["10.0.0.1"] + destination_fqdns = ["time.windows.com"] + destination_ports = ["80", "1000-2000"] + } + rule { + name = "network_rule_collection1_rule3" + protocols = ["TCP", "UDP"] + source_ip_groups = [azurerm_ip_group.test_source.id] + destination_ip_groups = [azurerm_ip_group.test_destination.id] + destination_ports = ["80", "1000-2000"] + } + } + + nat_rule_collection { + name = "nat_rule_collection1" + priority = 300 + action = "Dnat" + rule { + name = "nat_rule_collection1_rule1" + protocols = ["TCP", "UDP"] + source_addresses = ["10.0.0.1", "10.0.0.2"] + destination_address = "192.168.1.1" + destination_ports = ["80", "1000-2000"] + translated_address = "192.168.0.1" + translated_port = "8080" + } + rule { + name = "nat_rule_collection1_rule2" + protocols = ["TCP", "UDP"] + source_ip_groups = [azurerm_ip_group.test_source.id] + destination_address = "192.168.1.1" + destination_ports = ["80", "1000-2000"] + translated_address = "192.168.0.1" + translated_port = "8080" + } + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (FirewallPolicyRuleCollectionGroupResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fwpolicy-RCG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_firewall_policy" "test" { + name = "acctest-fwpolicy-RCG-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + dns { + network_rule_fqdn_enabled = false + proxy_enabled = true + } +} + +resource "azurerm_ip_group" "test_source" { + name = "acctestIpGroupForFirewallPolicySource" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["1.2.3.4/32", "12.34.56.0/24"] +} + +resource "azurerm_ip_group" "test_destination" { + name = "acctestIpGroupForFirewallPolicyDest" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + cidrs = ["192.168.0.0/25", "192.168.0.192/26"] +} + +resource "azurerm_firewall_policy_rule_collection_group" "test" { + name = "acctest-fwpolicy-RCG-%[1]d" + firewall_policy_id = azurerm_firewall_policy.test.id + priority = 500 + application_rule_collection { + name = "app_rule_collection1" + priority = 500 + action = "Deny" + rule { + name = "app_rule_collection1_rule1" + protocols { + type = "Http" + port = 80 + } + protocols { + type = "Https" + port = 443 + } + source_addresses = ["10.0.0.1", "10.0.0.2"] + destination_fqdns = ["terraform.io"] + } + rule { + name = "app_rule_collection1_rule2" + protocols { + type = "Http" + port = 80 + } + source_ip_groups = [azurerm_ip_group.test_source.id] + destination_fqdns = ["terraform.io"] + } + rule { + name = "app_rule_collection1_rule3" + protocols { + type = "Http" + port = 80 + } + protocols { + type = "Https" + port = 443 + } + source_addresses = ["10.0.0.1", "10.0.0.2"] + destination_fqdn_tags = ["WindowsDiagnostics"] + } + } + + network_rule_collection { + name = "network_rule_collection1" + priority = 400 + action = "Deny" + rule { + name = "network_rule_collection1_rule1" + protocols = ["TCP", "UDP"] + source_addresses = ["10.0.0.1"] + destination_addresses = ["192.168.1.2", "ApiManagement"] + destination_ports = ["80", "1-65535"] + } + rule { + name = "network_rule_collection1_rule2" + protocols = ["TCP", "UDP"] + source_addresses = ["10.0.0.1", "10.0.0.2"] + destination_fqdns = ["time.windows.com"] + destination_ports = ["80", "1-65535"] + } + rule { + name = "network_rule_collection1_rule3" + protocols = ["TCP"] + source_ip_groups = [azurerm_ip_group.test_source.id] + destination_ip_groups = [azurerm_ip_group.test_destination.id] + destination_ports = ["80", "1-65535"] + } + } + + nat_rule_collection { + name = "nat_rule_collection1" + priority = 300 + action = "Dnat" + rule { + name = "nat_rule_collection1_rule1" + protocols = ["TCP", "UDP"] + source_addresses = ["10.0.0.1", "10.0.0.2"] + destination_address = "192.168.1.1" + destination_ports = ["80", "1000-2000"] + translated_address = "192.168.0.1" + translated_port = "8080" + } + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (FirewallPolicyRuleCollectionGroupResource) requiresImport(data acceptance.TestData) string { + template := FirewallPolicyRuleCollectionGroupResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall_policy_rule_collection_group" "import" { + name = azurerm_firewall_policy_rule_collection_group.test.name + firewall_policy_id = azurerm_firewall_policy_rule_collection_group.test.firewall_policy_id + priority = azurerm_firewall_policy_rule_collection_group.test.priority +} +`, template) +} diff --git a/azurerm/internal/services/firewall/firewall_resource.go b/azurerm/internal/services/firewall/firewall_resource.go new file mode 100644 index 000000000000..7705b415fe82 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_resource.go @@ -0,0 +1,741 @@ +package firewall + +import ( + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/validate" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +var azureFirewallResourceName = "azurerm_firewall" + +func resourceFirewall() *schema.Resource { + return &schema.Resource{ + Create: resourceFirewallCreateUpdate, + Read: resourceFirewallRead, + Update: resourceFirewallCreateUpdate, + Delete: resourceFirewallDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(90 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(90 * time.Minute), + Delete: schema.DefaultTimeout(90 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.FirewallName, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + // TODO 3.0: change this to required + "sku_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.AZFWHub), + string(network.AZFWVNet), + }, false), + }, + + // TODO 3.0: change this to required + "sku_tier": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.Premium), + string(network.Standard), + }, false), + }, + + "firewall_policy_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.FirewallPolicyID, + }, + + "ip_configuration": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "subnet_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validate.FirewallSubnetName, + }, + "public_ip_address_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: networkValidate.PublicIpAddressID, + }, + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "management_ip_configuration": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.FirewallManagementSubnetName, + }, + "public_ip_address_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: networkValidate.PublicIpAddressID, + }, + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "threat_intel_mode": { + Type: schema.TypeString, + Optional: true, + Default: string(network.AzureFirewallThreatIntelModeAlert), + ValidateFunc: validation.StringInSlice([]string{ + // TODO 3.0: remove the default value and the `""` below. So if it is not specified + // in config, it will not be send in request, which is required in case of vhub. + "", + string(network.AzureFirewallThreatIntelModeOff), + string(network.AzureFirewallThreatIntelModeAlert), + string(network.AzureFirewallThreatIntelModeDeny), + }, false), + }, + + "dns_servers": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.IsIPAddress, + }, + }, + + "virtual_hub": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "virtual_hub_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: networkValidate.VirtualHubID, + }, + "public_ip_count": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntAtLeast(1), + Default: 1, + }, + "public_ip_addresses": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "zones": azure.SchemaZones(), + + "tags": tags.Schema(), + }, + } +} + +func resourceFirewallCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for AzureRM Azure Firewall creation") + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Firewall %q (Resource Group %q): %s", name, resourceGroup, err) + } + } + + if d.IsNewResource() { + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_firewall", *existing.ID) + } + } + + if err := validateFirewallIPConfigurationSettings(d.Get("ip_configuration").([]interface{})); err != nil { + return fmt.Errorf("Error validating Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + location := azure.NormalizeLocation(d.Get("location").(string)) + t := d.Get("tags").(map[string]interface{}) + i := d.Get("ip_configuration").([]interface{}) + ipConfigs, subnetToLock, vnetToLock, err := expandFirewallIPConfigurations(i) + if err != nil { + return fmt.Errorf("Error building list of Azure Firewall IP Configurations: %+v", err) + } + zones := azure.ExpandZones(d.Get("zones").([]interface{})) + + parameters := network.AzureFirewall{ + Location: &location, + Tags: tags.Expand(t), + AzureFirewallPropertiesFormat: &network.AzureFirewallPropertiesFormat{ + IPConfigurations: ipConfigs, + ThreatIntelMode: network.AzureFirewallThreatIntelMode(d.Get("threat_intel_mode").(string)), + AdditionalProperties: expandFirewallDNSServers(d.Get("dns_servers").([]interface{})), + }, + Zones: zones, + } + + m := d.Get("management_ip_configuration").([]interface{}) + if len(m) == 1 { + mgmtIPConfig, mgmtSubnetName, mgmtVirtualNetworkName, err := expandFirewallIPConfigurations(m) + if err != nil { + return fmt.Errorf("Error parsing Azure Firewall Management IP Configurations: %+v", err) + } + + if !utils.SliceContainsValue(*subnetToLock, (*mgmtSubnetName)[0]) { + *subnetToLock = append(*subnetToLock, (*mgmtSubnetName)[0]) + } + + if !utils.SliceContainsValue(*vnetToLock, (*mgmtVirtualNetworkName)[0]) { + *vnetToLock = append(*vnetToLock, (*mgmtVirtualNetworkName)[0]) + } + if *mgmtIPConfig != nil { + parameters.ManagementIPConfiguration = &(*mgmtIPConfig)[0] + } + } + + if threatIntelMode := d.Get("threat_intel_mode").(string); threatIntelMode != "" { + parameters.AzureFirewallPropertiesFormat.ThreatIntelMode = network.AzureFirewallThreatIntelMode(threatIntelMode) + } + + if policyId := d.Get("firewall_policy_id").(string); policyId != "" { + parameters.AzureFirewallPropertiesFormat.FirewallPolicy = &network.SubResource{ID: &policyId} + } + + vhub, hubIpAddresses, ok := expandFirewallVirtualHubSetting(existing, d.Get("virtual_hub").([]interface{})) + if ok { + parameters.AzureFirewallPropertiesFormat.VirtualHub = vhub + parameters.AzureFirewallPropertiesFormat.HubIPAddresses = hubIpAddresses + } + + // TODO 3.0: no need to test since sku_name is required + if skuName := d.Get("sku_name").(string); skuName != "" { + if parameters.Sku == nil { + parameters.Sku = &network.AzureFirewallSku{} + } + parameters.Sku.Name = network.AzureFirewallSkuName(skuName) + } + + // TODO 3.0: no need to test since sku_tier is required + if skuTier := d.Get("sku_tier").(string); skuTier != "" { + if parameters.Sku == nil { + parameters.Sku = &network.AzureFirewallSku{} + } + parameters.Sku.Tier = network.AzureFirewallSkuTier(skuTier) + } + + locks.ByName(name, azureFirewallResourceName) + defer locks.UnlockByName(name, azureFirewallResourceName) + + locks.MultipleByName(vnetToLock, VirtualNetworkResourceName) + defer locks.UnlockMultipleByName(vnetToLock, VirtualNetworkResourceName) + + locks.MultipleByName(subnetToLock, SubnetResourceName) + defer locks.UnlockMultipleByName(subnetToLock, SubnetResourceName) + + if !d.IsNewResource() { + exists, err2 := client.Get(ctx, resourceGroup, name) + if err2 != nil { + if utils.ResponseWasNotFound(exists.Response) { + return fmt.Errorf("Error retrieving existing Firewall %q (Resource Group %q): firewall not found in resource group", name, resourceGroup) + } + return fmt.Errorf("Error retrieving existing Firewall %q (Resource Group %q): %s", name, resourceGroup, err2) + } + if exists.AzureFirewallPropertiesFormat == nil { + return fmt.Errorf("Error retrieving existing rules (Firewall %q / Resource Group %q): `props` was nil", name, resourceGroup) + } + props := *exists.AzureFirewallPropertiesFormat + parameters.AzureFirewallPropertiesFormat.ApplicationRuleCollections = props.ApplicationRuleCollections + parameters.AzureFirewallPropertiesFormat.NetworkRuleCollections = props.NetworkRuleCollections + parameters.AzureFirewallPropertiesFormat.NatRuleCollections = props.NatRuleCollections + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) + if err != nil { + return fmt.Errorf("Error creating/updating Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation/update of Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error retrieving Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if read.ID == nil { + return fmt.Errorf("Cannot read Azure Firewall %q (Resource Group %q) ID", name, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceFirewallRead(d, meta) +} + +func resourceFirewallRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + name := id.Path["azureFirewalls"] + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(read.Response) { + log.Printf("[DEBUG] Firewall %q was not found in Resource Group %q - removing from state!", name, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.Set("name", read.Name) + d.Set("resource_group_name", resourceGroup) + if location := read.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := read.AzureFirewallPropertiesFormat; props != nil { + if err := d.Set("ip_configuration", flattenFirewallIPConfigurations(props.IPConfigurations)); err != nil { + return fmt.Errorf("Error setting `ip_configuration`: %+v", err) + } + managementIPConfigs := make([]interface{}, 0) + if props.ManagementIPConfiguration != nil { + managementIPConfigs = flattenFirewallIPConfigurations(&[]network.AzureFirewallIPConfiguration{ + *props.ManagementIPConfiguration, + }) + } + if err := d.Set("management_ip_configuration", managementIPConfigs); err != nil { + return fmt.Errorf("Error setting `management_ip_configuration`: %+v", err) + } + + d.Set("threat_intel_mode", string(props.ThreatIntelMode)) + + if err := d.Set("dns_servers", flattenFirewallDNSServers(props.AdditionalProperties)); err != nil { + return fmt.Errorf("Error setting `dns_servers`: %+v", err) + } + + if policy := props.FirewallPolicy; policy != nil { + d.Set("firewall_policy_id", policy.ID) + } + + if sku := props.Sku; sku != nil { + d.Set("sku_name", string(sku.Name)) + d.Set("sku_tier", string(sku.Tier)) + } + + if err := d.Set("virtual_hub", flattenFirewallVirtualHubSetting(props)); err != nil { + return fmt.Errorf("Error setting `virtual_hub`: %+v", err) + } + } + + if err := d.Set("zones", azure.FlattenZones(read.Zones)); err != nil { + return fmt.Errorf("Error setting `zones`: %+v", err) + } + + return tags.FlattenAndSet(d, read.Tags) +} + +func resourceFirewallDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Firewall.AzureFirewallsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + name := id.Path["azureFirewalls"] + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(read.Response) { + // deleted outside of TF + log.Printf("[DEBUG] Firewall %q was not found in Resource Group %q - assuming removed!", name, resourceGroup) + return nil + } + + return fmt.Errorf("Error retrieving Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + subnetNamesToLock := make([]string, 0) + virtualNetworkNamesToLock := make([]string, 0) + if props := read.AzureFirewallPropertiesFormat; props != nil { + if configs := props.IPConfigurations; configs != nil { + for _, config := range *configs { + if config.Subnet == nil || config.Subnet.ID == nil { + continue + } + + parsedSubnetID, err2 := azure.ParseAzureResourceID(*config.Subnet.ID) + if err2 != nil { + return err2 + } + subnetName := parsedSubnetID.Path["subnets"] + + if !utils.SliceContainsValue(subnetNamesToLock, subnetName) { + subnetNamesToLock = append(subnetNamesToLock, subnetName) + } + + virtualNetworkName := parsedSubnetID.Path["virtualNetworks"] + if !utils.SliceContainsValue(virtualNetworkNamesToLock, virtualNetworkName) { + virtualNetworkNamesToLock = append(virtualNetworkNamesToLock, virtualNetworkName) + } + } + } + + if mconfig := props.ManagementIPConfiguration; mconfig != nil { + if mconfig.Subnet != nil && mconfig.Subnet.ID != nil { + parsedSubnetID, err2 := azure.ParseAzureResourceID(*mconfig.Subnet.ID) + if err2 != nil { + return err2 + } + subnetName := parsedSubnetID.Path["subnets"] + + if !utils.SliceContainsValue(subnetNamesToLock, subnetName) { + subnetNamesToLock = append(subnetNamesToLock, subnetName) + } + + virtualNetworkName := parsedSubnetID.Path["virtualNetworks"] + if !utils.SliceContainsValue(virtualNetworkNamesToLock, virtualNetworkName) { + virtualNetworkNamesToLock = append(virtualNetworkNamesToLock, virtualNetworkName) + } + } + } + } + + locks.ByName(name, azureFirewallResourceName) + defer locks.UnlockByName(name, azureFirewallResourceName) + + locks.MultipleByName(&virtualNetworkNamesToLock, VirtualNetworkResourceName) + defer locks.UnlockMultipleByName(&virtualNetworkNamesToLock, VirtualNetworkResourceName) + + locks.MultipleByName(&subnetNamesToLock, SubnetResourceName) + defer locks.UnlockMultipleByName(&subnetNamesToLock, SubnetResourceName) + + future, err := client.Delete(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error deleting Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for the deletion of Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + return err +} + +func expandFirewallIPConfigurations(configs []interface{}) (*[]network.AzureFirewallIPConfiguration, *[]string, *[]string, error) { + ipConfigs := make([]network.AzureFirewallIPConfiguration, 0) + subnetNamesToLock := make([]string, 0) + virtualNetworkNamesToLock := make([]string, 0) + + for _, configRaw := range configs { + data := configRaw.(map[string]interface{}) + name := data["name"].(string) + subnetId := data["subnet_id"].(string) + pubID := data["public_ip_address_id"].(string) + + ipConfig := network.AzureFirewallIPConfiguration{ + Name: utils.String(name), + AzureFirewallIPConfigurationPropertiesFormat: &network.AzureFirewallIPConfigurationPropertiesFormat{ + PublicIPAddress: &network.SubResource{ + ID: utils.String(pubID), + }, + }, + } + + if subnetId != "" { + subnetID, err := azure.ParseAzureResourceID(subnetId) + if err != nil { + return nil, nil, nil, err + } + + subnetName := subnetID.Path["subnets"] + virtualNetworkName := subnetID.Path["virtualNetworks"] + + if !utils.SliceContainsValue(subnetNamesToLock, subnetName) { + subnetNamesToLock = append(subnetNamesToLock, subnetName) + } + + if !utils.SliceContainsValue(virtualNetworkNamesToLock, virtualNetworkName) { + virtualNetworkNamesToLock = append(virtualNetworkNamesToLock, virtualNetworkName) + } + + ipConfig.AzureFirewallIPConfigurationPropertiesFormat.Subnet = &network.SubResource{ + ID: utils.String(subnetId), + } + } + ipConfigs = append(ipConfigs, ipConfig) + } + return &ipConfigs, &subnetNamesToLock, &virtualNetworkNamesToLock, nil +} + +func flattenFirewallIPConfigurations(input *[]network.AzureFirewallIPConfiguration) []interface{} { + result := make([]interface{}, 0) + if input == nil { + return result + } + + for _, v := range *input { + afIPConfig := make(map[string]interface{}) + props := v.AzureFirewallIPConfigurationPropertiesFormat + if props == nil { + continue + } + + if name := v.Name; name != nil { + afIPConfig["name"] = *name + } + + if subnet := props.Subnet; subnet != nil { + if id := subnet.ID; id != nil { + afIPConfig["subnet_id"] = *id + } + } + + if ipAddress := props.PrivateIPAddress; ipAddress != nil { + afIPConfig["private_ip_address"] = *ipAddress + } + + if pip := props.PublicIPAddress; pip != nil { + if id := pip.ID; id != nil { + afIPConfig["public_ip_address_id"] = *id + } + } + result = append(result, afIPConfig) + } + + return result +} + +func expandFirewallDNSServers(input []interface{}) map[string]*string { + if len(input) == 0 { + return nil + } + + var servers []string + for _, server := range input { + servers = append(servers, server.(string)) + } + + // Swagger issue asking finalize these properties: https://github.com/Azure/azure-rest-api-specs/issues/11278 + return map[string]*string{ + "Network.DNS.EnableProxy": utils.String("true"), + "Network.DNS.Servers": utils.String(strings.Join(servers, ",")), + } +} + +func flattenFirewallDNSServers(input map[string]*string) []interface{} { + if len(input) == 0 { + return nil + } + + enabled := false + if enabledPtr := input["Network.DNS.EnableProxy"]; enabledPtr != nil { + enabled = *enabledPtr == "true" + } + + if !enabled { + return nil + } + + servers := []string{} + if serversPtr := input["Network.DNS.Servers"]; serversPtr != nil { + servers = strings.Split(*serversPtr, ",") + } + return utils.FlattenStringSlice(&servers) +} + +func expandFirewallVirtualHubSetting(existing network.AzureFirewall, input []interface{}) (vhub *network.SubResource, ipAddresses *network.HubIPAddresses, ok bool) { + if len(input) == 0 { + return nil, nil, false + } + + b := input[0].(map[string]interface{}) + + // The API requires both "Count" and "Addresses" for the "PublicIPs" setting. + // The "Count" means how many PIP to provision. + // The "Addresses" means differently in different cases: + // - Create: only "Count" is needed, "Addresses" is not necessary + // - Update: both "Count" and "Addresses" are needed: + // Scale up: "Addresses" should remain same as before scaling up + // Scale down: "Addresses" should indicate the addresses to be retained (in this case we retain the first new "Count" ones) + newCount := b["public_ip_count"].(int) + var addresses *[]network.AzureFirewallPublicIPAddress + if prop := existing.AzureFirewallPropertiesFormat; prop != nil { + if ipaddress := prop.HubIPAddresses; ipaddress != nil { + if pips := ipaddress.PublicIPs; pips != nil { + if count := pips.Count; count != nil { + oldCount := int(*count) + addresses = pips.Addresses + + // In case of scale down, keep the first new "Count" addresses. + if oldCount > newCount { + keptAddresses := make([]network.AzureFirewallPublicIPAddress, newCount) + for i := 0; i < newCount; i++ { + keptAddresses[i] = (*addresses)[i] + } + addresses = &keptAddresses + } + } + } + } + } + + vhub = &network.SubResource{ID: utils.String(b["virtual_hub_id"].(string))} + ipAddresses = &network.HubIPAddresses{ + PublicIPs: &network.HubPublicIPAddresses{ + Count: utils.Int32(int32(b["public_ip_count"].(int))), + Addresses: addresses, + }, + } + + return vhub, ipAddresses, true +} + +func flattenFirewallVirtualHubSetting(props *network.AzureFirewallPropertiesFormat) []interface{} { + if props.VirtualHub == nil { + return nil + } + + var vhubId string + if props.VirtualHub.ID != nil { + vhubId = *props.VirtualHub.ID + } + + var ( + publicIpCount int + publicIps []string + privateIp string + ) + if hubIP := props.HubIPAddresses; hubIP != nil { + if hubIP.PrivateIPAddress != nil { + privateIp = *hubIP.PrivateIPAddress + } + if pubIPs := hubIP.PublicIPs; pubIPs != nil { + if pubIPs.Count != nil { + publicIpCount = int(*pubIPs.Count) + } + if pubIPs.Addresses != nil { + for _, addr := range *pubIPs.Addresses { + if addr.Address != nil { + publicIps = append(publicIps, *addr.Address) + } + } + } + } + } + + return []interface{}{ + map[string]interface{}{ + "virtual_hub_id": vhubId, + "public_ip_count": publicIpCount, + "public_ip_addresses": publicIps, + "private_ip_address": privateIp, + }, + } +} + +func validateFirewallIPConfigurationSettings(configs []interface{}) error { + if len(configs) == 0 { + return nil + } + + subnetNumber := 0 + + for _, configRaw := range configs { + data := configRaw.(map[string]interface{}) + if subnet, exist := data["subnet_id"].(string); exist && subnet != "" { + subnetNumber++ + } + } + + if subnetNumber != 1 { + return fmt.Errorf(`The "ip_configuration" is invalid, %d "subnet_id" have been set, one "subnet_id" should be set among all "ip_configuration" blocks`, subnetNumber) + } + + return nil +} diff --git a/azurerm/internal/services/firewall/firewall_resource_test.go b/azurerm/internal/services/firewall/firewall_resource_test.go new file mode 100644 index 000000000000..1fe2d4f589f7 --- /dev/null +++ b/azurerm/internal/services/firewall/firewall_resource_test.go @@ -0,0 +1,877 @@ +package firewall_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FirewallResource struct { +} + +func TestAccFirewall_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("ip_configuration.0.name").HasValue("configuration"), + check.That(data.ResourceName).Key("ip_configuration.0.private_ip_address").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_enableDNS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.enableDNS(data, "1.1.1.1", "8.8.8.8"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.enableDNS(data, "1.1.1.1"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_withManagementIp(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withManagementIp(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("ip_configuration.0.name").HasValue("configuration"), + check.That(data.ResourceName).Key("ip_configuration.0.private_ip_address").Exists(), + check.That(data.ResourceName).Key("management_ip_configuration.0.name").HasValue("management_configuration"), + check.That(data.ResourceName).Key("management_ip_configuration.0.public_ip_address_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_withMultiplePublicIPs(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiplePublicIps(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("ip_configuration.0.name").HasValue("configuration"), + check.That(data.ResourceName).Key("ip_configuration.0.private_ip_address").Exists(), + check.That(data.ResourceName).Key("ip_configuration.1.name").HasValue("configuration_2"), + check.That(data.ResourceName).Key("ip_configuration.1.public_ip_address_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_firewall"), + }, + }) +} + +func TestAccFirewall_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Production"), + check.That(data.ResourceName).Key("tags.cost_center").HasValue("MSFT"), + ), + }, + { + Config: r.withUpdatedTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("staging"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_withZones(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + zones := []string{"1"} + zonesUpdate := []string{"1", "3"} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withZones(data, zones), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("zones.#").HasValue("1"), + check.That(data.ResourceName).Key("zones.0").HasValue("1"), + ), + }, + { + Config: r.withZones(data, zonesUpdate), + Check: resource.ComposeTestCheckFunc( + + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("zones.#").HasValue("2"), + check.That(data.ResourceName).Key("zones.0").HasValue("1"), + check.That(data.ResourceName).Key("zones.1").HasValue("3"), + ), + }, + }) +} + +func TestAccFirewall_withoutZone(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withoutZone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckFirewallDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccFirewall_withFirewallPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withFirewallPolicy(data, "pol-01"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withFirewallPolicy(data, "pol-02"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFirewall_inVirtualHub(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_firewall", "test") + r := FirewallResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.inVirtualHub(data, 1), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("virtual_hub.0.public_ip_addresses.#").HasValue("1"), + check.That(data.ResourceName).Key("virtual_hub.0.private_ip_address").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.inVirtualHub(data, 2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("virtual_hub.0.public_ip_addresses.#").HasValue("2"), + check.That(data.ResourceName).Key("virtual_hub.0.private_ip_address").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.inVirtualHub(data, 1), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("virtual_hub.0.public_ip_addresses.#").HasValue("1"), + check.That(data.ResourceName).Key("virtual_hub.0.private_ip_address").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (FirewallResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + var id, err = azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + name := id.Path["azureFirewalls"] + + resp, err := clients.Firewall.AzureFirewallsClient.Get(ctx, id.ResourceGroup, name) + if err != nil { + return nil, fmt.Errorf("retrieving Azure Firewall %q (Resource Group: %q): %v", name, id.ResourceGroup, err) + } + + return utils.Bool(resp.AzureFirewallPropertiesFormat != nil), nil +} + +func testCheckFirewallDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Firewall.AzureFirewallsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for Azure Firewall: %q", name) + } + + future, err := client.Delete(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Bad: Delete on azureFirewallsClient: %+v", err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Bad: waiting for Deletion on azureFirewallsClient: %+v", err) + } + + return nil + } +} + +func (FirewallResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + threat_intel_mode = "Deny" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallResource) enableDNS(data acceptance.TestData, dnsServers ...string) string { + servers := make([]string, len(dnsServers)) + for idx, server := range dnsServers { + servers[idx] = fmt.Sprintf(`"%s"`, server) + } + + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + threat_intel_mode = "Deny" + dns_servers = [%s] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, strings.Join(servers, ",")) +} + +func (FirewallResource) withManagementIp(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_subnet" "test_mgmt" { + name = "AzureFirewallManagementSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.2.0/24"] +} + +resource "azurerm_public_ip" "test_mgmt" { + name = "acctestmgmtpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + management_ip_configuration { + name = "management_configuration" + subnet_id = azurerm_subnet.test_mgmt.id + public_ip_address_id = azurerm_public_ip.test_mgmt.id + } + + threat_intel_mode = "Alert" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallResource) multiplePublicIps(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_public_ip" "test_2" { + name = "acctestpip2%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + ip_configuration { + name = "configuration_2" + public_ip_address_id = azurerm_public_ip.test_2.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallResource) requiresImport(data acceptance.TestData) string { + template := FirewallResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_firewall" "import" { + name = azurerm_firewall.test.name + location = azurerm_firewall.test.location + resource_group_name = azurerm_firewall.test.resource_group_name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + threat_intel_mode = azurerm_firewall.test.threat_intel_mode +} +`, template) +} + +func (FirewallResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallResource) withUpdatedTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallResource) withZones(data acceptance.TestData, zones []string) string { + zoneString := strings.Join(zones, ",") + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + zones = [%s] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, zoneString) +} + +func (FirewallResource) withoutZone(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + zones = [] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FirewallResource) withFirewallPolicy(data acceptance.TestData, policyName string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "AzureFirewallSubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_public_ip" "test" { + name = "acctestpip%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_firewall_policy" "test" { + name = "acctestfirewall-%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_firewall" "test" { + name = "acctestfirewall%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "configuration" + subnet_id = azurerm_subnet.test.id + public_ip_address_id = azurerm_public_ip.test.id + } + + firewall_policy_id = azurerm_firewall_policy.test.id + + lifecycle { + create_before_destroy = true + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, policyName, data.RandomInteger) +} + +func (FirewallResource) inVirtualHub(data acceptance.TestData, pipCount int) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-fw-%[1]d" + location = "%s" +} + +resource "azurerm_firewall_policy" "test" { + name = "acctest-firewallpolicy-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_virtual_wan" "test" { + name = "acctest-virtualwan-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_virtual_hub" "test" { + name = "acctest-virtualhub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + virtual_wan_id = azurerm_virtual_wan.test.id + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_firewall" "test" { + name = "acctest-firewall-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "AZFW_Hub" + + virtual_hub { + virtual_hub_id = azurerm_virtual_hub.test.id + public_ip_count = %[3]d + } + + firewall_policy_id = azurerm_firewall_policy.test.id + threat_intel_mode = "" +} +`, data.RandomInteger, data.Locations.Primary, pipCount) +} diff --git a/azurerm/internal/services/firewall/internal.go b/azurerm/internal/services/firewall/internal.go new file mode 100644 index 000000000000..db4c211ac604 --- /dev/null +++ b/azurerm/internal/services/firewall/internal.go @@ -0,0 +1,6 @@ +package firewall + +// TODO: determine what to do with these locking handlers, could we replace it with the ID? + +var SubnetResourceName = "azurerm_subnet" +var VirtualNetworkResourceName = "azurerm_virtual_network" diff --git a/azurerm/internal/services/firewall/parse/firewall_policy.go b/azurerm/internal/services/firewall/parse/firewall_policy.go new file mode 100644 index 000000000000..5431cf60f79e --- /dev/null +++ b/azurerm/internal/services/firewall/parse/firewall_policy.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type FirewallPolicyId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewFirewallPolicyID(subscriptionId, resourceGroup, name string) FirewallPolicyId { + return FirewallPolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id FirewallPolicyId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Firewall Policy", segmentsStr) +} + +func (id FirewallPolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/firewallPolicies/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// FirewallPolicyID parses a FirewallPolicy ID into an FirewallPolicyId struct +func FirewallPolicyID(input string) (*FirewallPolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FirewallPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("firewallPolicies"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/firewall/parse/firewall_policy_rule_collection_group.go b/azurerm/internal/services/firewall/parse/firewall_policy_rule_collection_group.go new file mode 100644 index 000000000000..2ad27f1006b0 --- /dev/null +++ b/azurerm/internal/services/firewall/parse/firewall_policy_rule_collection_group.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type FirewallPolicyRuleCollectionGroupId struct { + SubscriptionId string + ResourceGroup string + FirewallPolicyName string + RuleCollectionGroupName string +} + +func NewFirewallPolicyRuleCollectionGroupID(subscriptionId, resourceGroup, firewallPolicyName, ruleCollectionGroupName string) FirewallPolicyRuleCollectionGroupId { + return FirewallPolicyRuleCollectionGroupId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FirewallPolicyName: firewallPolicyName, + RuleCollectionGroupName: ruleCollectionGroupName, + } +} + +func (id FirewallPolicyRuleCollectionGroupId) String() string { + segments := []string{ + fmt.Sprintf("Rule Collection Group Name %q", id.RuleCollectionGroupName), + fmt.Sprintf("Firewall Policy Name %q", id.FirewallPolicyName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Firewall Policy Rule Collection Group", segmentsStr) +} + +func (id FirewallPolicyRuleCollectionGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/firewallPolicies/%s/ruleCollectionGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FirewallPolicyName, id.RuleCollectionGroupName) +} + +// FirewallPolicyRuleCollectionGroupID parses a FirewallPolicyRuleCollectionGroup ID into an FirewallPolicyRuleCollectionGroupId struct +func FirewallPolicyRuleCollectionGroupID(input string) (*FirewallPolicyRuleCollectionGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FirewallPolicyRuleCollectionGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FirewallPolicyName, err = id.PopSegment("firewallPolicies"); err != nil { + return nil, err + } + if resourceId.RuleCollectionGroupName, err = id.PopSegment("ruleCollectionGroups"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/firewall/parse/firewall_policy_rule_collection_group_test.go b/azurerm/internal/services/firewall/parse/firewall_policy_rule_collection_group_test.go new file mode 100644 index 000000000000..2d047cd3f584 --- /dev/null +++ b/azurerm/internal/services/firewall/parse/firewall_policy_rule_collection_group_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = FirewallPolicyRuleCollectionGroupId{} + +func TestFirewallPolicyRuleCollectionGroupIDFormatter(t *testing.T) { + actual := NewFirewallPolicyRuleCollectionGroupID("12345678-1234-9876-4563-123456789012", "resGroup1", "policy1", "ruleCollectionGroup1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/ruleCollectionGroups/ruleCollectionGroup1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestFirewallPolicyRuleCollectionGroupID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *FirewallPolicyRuleCollectionGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/", + Error: true, + }, + + { + // missing RuleCollectionGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/", + Error: true, + }, + + { + // missing value for RuleCollectionGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/ruleCollectionGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/ruleCollectionGroups/ruleCollectionGroup1", + Expected: &FirewallPolicyRuleCollectionGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FirewallPolicyName: "policy1", + RuleCollectionGroupName: "ruleCollectionGroup1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FIREWALLPOLICIES/POLICY1/RULECOLLECTIONGROUPS/RULECOLLECTIONGROUP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FirewallPolicyRuleCollectionGroupID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FirewallPolicyName != v.Expected.FirewallPolicyName { + t.Fatalf("Expected %q but got %q for FirewallPolicyName", v.Expected.FirewallPolicyName, actual.FirewallPolicyName) + } + if actual.RuleCollectionGroupName != v.Expected.RuleCollectionGroupName { + t.Fatalf("Expected %q but got %q for RuleCollectionGroupName", v.Expected.RuleCollectionGroupName, actual.RuleCollectionGroupName) + } + } +} diff --git a/azurerm/internal/services/firewall/parse/firewall_policy_test.go b/azurerm/internal/services/firewall/parse/firewall_policy_test.go new file mode 100644 index 000000000000..36ca979f504d --- /dev/null +++ b/azurerm/internal/services/firewall/parse/firewall_policy_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = FirewallPolicyId{} + +func TestFirewallPolicyIDFormatter(t *testing.T) { + actual := NewFirewallPolicyID("12345678-1234-9876-4563-123456789012", "resGroup1", "policy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestFirewallPolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *FirewallPolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1", + Expected: &FirewallPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "policy1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FIREWALLPOLICIES/POLICY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FirewallPolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/firewall/registration.go b/azurerm/internal/services/firewall/registration.go new file mode 100644 index 000000000000..8c8b1fd134c7 --- /dev/null +++ b/azurerm/internal/services/firewall/registration.go @@ -0,0 +1,39 @@ +package firewall + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +type Registration struct{} + +// Name is the name of this Service +func (r Registration) Name() string { + return "Firewall" +} + +// WebsiteCategories returns a list of categories which can be used for the sidebar +func (r Registration) WebsiteCategories() []string { + return []string{ + "Network", + } +} + +// SupportedDataSources returns the supported Data Sources supported by this Service +func (r Registration) SupportedDataSources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_firewall": FirewallDataSource(), + "azurerm_firewall_policy": FirewallDataSourcePolicy(), + } +} + +// SupportedResources returns the supported Resources supported by this Service +func (r Registration) SupportedResources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_firewall_application_rule_collection": resourceFirewallApplicationRuleCollection(), + "azurerm_firewall_policy": resourceFirewallPolicy(), + "azurerm_firewall_policy_rule_collection_group": resourceFirewallPolicyRuleCollectionGroup(), + "azurerm_firewall_nat_rule_collection": resourceFirewallNatRuleCollection(), + "azurerm_firewall_network_rule_collection": resourceFirewallNetworkRuleCollection(), + "azurerm_firewall": resourceFirewall(), + } +} diff --git a/azurerm/internal/services/firewall/resourceids.go b/azurerm/internal/services/firewall/resourceids.go new file mode 100644 index 000000000000..a531d82c74c6 --- /dev/null +++ b/azurerm/internal/services/firewall/resourceids.go @@ -0,0 +1,5 @@ +package firewall + +// Firewall Policy +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=FirewallPolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=FirewallPolicyRuleCollectionGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/ruleCollectionGroups/ruleCollectionGroup1 diff --git a/azurerm/internal/services/firewall/subresource.go b/azurerm/internal/services/firewall/subresource.go new file mode 100644 index 000000000000..d124f6cf1f60 --- /dev/null +++ b/azurerm/internal/services/firewall/subresource.go @@ -0,0 +1,20 @@ +package firewall + +import ( + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" +) + +func flattenNetworkSubResourceID(input *[]network.SubResource) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + for _, item := range *input { + if item.ID != nil { + results = append(results, *item.ID) + } + } + + return results +} diff --git a/azurerm/internal/services/firewall/validate/firewall_management_subnet_name.go b/azurerm/internal/services/firewall/validate/firewall_management_subnet_name.go new file mode 100644 index 000000000000..4ee334c62a15 --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_management_subnet_name.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +func FirewallManagementSubnetName(v interface{}, k string) (warnings []string, errors []error) { + parsed, err := azure.ParseAzureResourceID(v.(string)) + if err != nil { + errors = append(errors, fmt.Errorf("Error parsing Azure Resource ID %q", v.(string))) + return warnings, errors + } + subnetName := parsed.Path["subnets"] + if subnetName != "AzureFirewallManagementSubnet" { + errors = append(errors, fmt.Errorf("The name of the management subnet for %q must be exactly 'AzureFirewallManagementSubnet' to be used for the Azure Firewall resource", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/firewall/validate/firewall_name.go b/azurerm/internal/services/firewall/validate/firewall_name.go new file mode 100644 index 000000000000..d5ae43f89978 --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_name.go @@ -0,0 +1,18 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func FirewallName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + // From the Portal: + // The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens. + if matched := regexp.MustCompile(`^[0-9a-zA-Z]([0-9a-zA-Z._-]{0,}[0-9a-zA-Z_])?$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/firewall/validate/firewall_name_test.go b/azurerm/internal/services/firewall/validate/firewall_name_test.go new file mode 100644 index 000000000000..e0f9e8857cfc --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_name_test.go @@ -0,0 +1,42 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestFirewallName(t *testing.T) { + // The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens. + var validNames = []string{ + "a", + "abc123", + "a_b_c", + "hy-ph-en", + "valid_", + "v-a_l1.d_", + strings.Repeat("w", 65), + } + for _, v := range validNames { + _, errors := FirewallName(v, "name") + if len(errors) != 0 { + t.Fatalf("%q should be a valid Firewall Name: %q", v, errors) + } + } + + invalidNames := []string{ + "_invalid", + "-invalid", + ".invalid", + "!invalid", + "hel!!o", + "invalid.", + "invalid-", + "invalid!", + } + for _, v := range invalidNames { + _, errors := FirewallName(v, "name") + if len(errors) == 0 { + t.Fatalf("%q should be an invalid Firewall Name", v) + } + } +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_id.go b/azurerm/internal/services/firewall/validate/firewall_policy_id.go new file mode 100644 index 000000000000..9e5d2b9ee709 --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/parse" +) + +func FirewallPolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.FirewallPolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_id_test.go b/azurerm/internal/services/firewall/validate/firewall_policy_id_test.go new file mode 100644 index 000000000000..04b7e809793d --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestFirewallPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FIREWALLPOLICIES/POLICY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := FirewallPolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_name.go b/azurerm/internal/services/firewall/validate/firewall_policy_name.go new file mode 100644 index 000000000000..df9573b3dc43 --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_name.go @@ -0,0 +1,12 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func FirewallPolicyName() func(i interface{}, k string) (warnings []string, errors []error) { + return validation.StringMatch(regexp.MustCompile(`^[^\W_][\w-.]*[\w]$`), + "The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens.") +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_id.go b/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_id.go new file mode 100644 index 000000000000..2459fab39fdc --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/firewall/parse" +) + +func FirewallPolicyRuleCollectionGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.FirewallPolicyRuleCollectionGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_id_test.go b/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_id_test.go new file mode 100644 index 000000000000..069e37948bed --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestFirewallPolicyRuleCollectionGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/", + Valid: false, + }, + + { + // missing RuleCollectionGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/", + Valid: false, + }, + + { + // missing value for RuleCollectionGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/ruleCollectionGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/firewallPolicies/policy1/ruleCollectionGroups/ruleCollectionGroup1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FIREWALLPOLICIES/POLICY1/RULECOLLECTIONGROUPS/RULECOLLECTIONGROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := FirewallPolicyRuleCollectionGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_name.go b/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_name.go new file mode 100644 index 000000000000..ed8abec66f12 --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_rule_collection_group_name.go @@ -0,0 +1,12 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func FirewallPolicyRuleCollectionGroupName() func(i interface{}, k string) (warnings []string, errors []error) { + return validation.StringMatch(regexp.MustCompile(`^[^\W_][\w-.]*[\w]$`), + "The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens.") +} diff --git a/azurerm/internal/services/firewall/validate/firewall_policy_rule_name.go b/azurerm/internal/services/firewall/validate/firewall_policy_rule_name.go new file mode 100644 index 000000000000..12840cd0296e --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_policy_rule_name.go @@ -0,0 +1,12 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func FirewallPolicyRuleName() func(i interface{}, k string) (warnings []string, errors []error) { + return validation.StringMatch(regexp.MustCompile(`^[^\W_][\w-.]*[\w]$`), + "The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens.") +} diff --git a/azurerm/internal/services/firewall/validate/firewall_subnet_name.go b/azurerm/internal/services/firewall/validate/firewall_subnet_name.go new file mode 100644 index 000000000000..f003e46b5118 --- /dev/null +++ b/azurerm/internal/services/firewall/validate/firewall_subnet_name.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +func FirewallSubnetName(v interface{}, k string) (warnings []string, errors []error) { + parsed, err := azure.ParseAzureResourceID(v.(string)) + if err != nil { + errors = append(errors, fmt.Errorf("Error parsing Azure Resource ID %q", v.(string))) + return warnings, errors + } + subnetName := parsed.Path["subnets"] + if subnetName != "AzureFirewallSubnet" { + errors = append(errors, fmt.Errorf("The name of the Subnet for %q must be exactly 'AzureFirewallSubnet' to be used for the Azure Firewall resource", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/frontdoor/customizediff.go b/azurerm/internal/services/frontdoor/customizediff.go index a78181b6d8d2..0ee628676596 100644 --- a/azurerm/internal/services/frontdoor/customizediff.go +++ b/azurerm/internal/services/frontdoor/customizediff.go @@ -11,7 +11,7 @@ import ( func customizeHttpsConfigurationCustomizeDiff(d *schema.ResourceDiff, v interface{}) error { if v, ok := d.GetOk("frontend_endpoint_id"); ok && v.(string) != "" { - id, err := parse.FrontendEndpointIDForImport(v.(string)) + id, err := parse.FrontendEndpointID(v.(string)) if err != nil { return err } diff --git a/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource.go b/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource.go index 25108385d404..3a155bcd10bc 100644 --- a/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource.go +++ b/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource.go @@ -19,15 +19,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFrontDoorCustomHttpsConfiguration() *schema.Resource { +func resourceFrontDoorCustomHttpsConfiguration() *schema.Resource { return &schema.Resource{ - Create: resourceArmFrontDoorCustomHttpsConfigurationCreateUpdate, - Read: resourceArmFrontDoorCustomHttpsConfigurationRead, - Update: resourceArmFrontDoorCustomHttpsConfigurationCreateUpdate, - Delete: resourceArmFrontDoorCustomHttpsConfigurationDelete, + Create: resourceFrontDoorCustomHttpsConfigurationCreateUpdate, + Read: resourceFrontDoorCustomHttpsConfigurationRead, + Update: resourceFrontDoorCustomHttpsConfigurationCreateUpdate, + Delete: resourceFrontDoorCustomHttpsConfigurationDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.FrontendEndpointIDForImport(id) + _, err := parse.FrontendEndpointID(id) return err }), @@ -77,13 +77,12 @@ func resourceArmFrontDoorCustomHttpsConfiguration() *schema.Resource { } } -func resourceArmFrontDoorCustomHttpsConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorCustomHttpsConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsFrontendClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FrontendEndpointID(d.Get("frontend_endpoint_id").(string)) + id, err := parse.FrontendEndpointIDInsensitively(d.Get("frontend_endpoint_id").(string)) if err != nil { return err } @@ -106,26 +105,24 @@ func resourceArmFrontDoorCustomHttpsConfigurationCreateUpdate(d *schema.Resource customHttpsProvisioningEnabled: d.Get("custom_https_provisioning_enabled").(bool), frontendEndpointId: *id, provisioningState: props.CustomHTTPSProvisioningState, - subscriptionId: subscriptionId, } if err := updateCustomHttpsConfiguration(ctx, client, input); err != nil { return fmt.Errorf("updating Custom HTTPS configuration for Frontend Endpoint %q (Front Door %q / Resource Group %q): %+v", id.Name, id.FrontDoorName, id.ResourceGroup, err) } if d.IsNewResource() { - d.SetId(id.ID(subscriptionId)) + d.SetId(id.ID()) } - return resourceArmFrontDoorCustomHttpsConfigurationRead(d, meta) + return resourceFrontDoorCustomHttpsConfigurationRead(d, meta) } -func resourceArmFrontDoorCustomHttpsConfigurationRead(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorCustomHttpsConfigurationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsFrontendClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FrontendEndpointID(d.Id()) + id, err := parse.FrontendEndpointIDInsensitively(d.Id()) if err != nil { return err } @@ -141,7 +138,7 @@ func resourceArmFrontDoorCustomHttpsConfigurationRead(d *schema.ResourceData, me return fmt.Errorf("reading Front Door Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("frontend_endpoint_id", id.ID(subscriptionId)) + d.Set("frontend_endpoint_id", id.ID()) d.Set("resource_group_name", id.ResourceGroup) flattenedHttpsConfig := flattenCustomHttpsConfiguration(resp.FrontendEndpointProperties) @@ -155,13 +152,12 @@ func resourceArmFrontDoorCustomHttpsConfigurationRead(d *schema.ResourceData, me return nil } -func resourceArmFrontDoorCustomHttpsConfigurationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorCustomHttpsConfigurationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsFrontendClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FrontendEndpointID(d.Id()) + id, err := parse.FrontendEndpointIDInsensitively(d.Id()) if err != nil { return err } @@ -185,7 +181,6 @@ func resourceArmFrontDoorCustomHttpsConfigurationDelete(d *schema.ResourceData, customHttpsProvisioningEnabled: false, frontendEndpointId: *id, provisioningState: props.CustomHTTPSProvisioningState, - subscriptionId: subscriptionId, } if err := updateCustomHttpsConfiguration(ctx, client, input); err != nil { return fmt.Errorf("disabling Custom HTTPS configuration for Frontend Endpoint %q (Front Door %q / Resource Group %q): %+v", id.Name, id.FrontDoorName, id.ResourceGroup, err) @@ -200,12 +195,11 @@ type customHttpsConfigurationUpdateInput struct { customHttpsProvisioningEnabled bool frontendEndpointId parse.FrontendEndpointId provisioningState frontdoor.CustomHTTPSProvisioningState - subscriptionId string } func updateCustomHttpsConfiguration(ctx context.Context, client *frontdoor.FrontendEndpointsClient, input customHttpsConfigurationUpdateInput) error { // Locking to prevent parallel changes causing issues - frontendEndpointResourceId := input.frontendEndpointId.ID(input.subscriptionId) + frontendEndpointResourceId := input.frontendEndpointId.ID() locks.ByID(frontendEndpointResourceId) defer locks.UnlockByID(frontendEndpointResourceId) @@ -232,13 +226,13 @@ func updateCustomHttpsConfiguration(ctx context.Context, client *frontdoor.Front customHTTPSConfigurationUpdate := makeCustomHttpsConfiguration(customHTTPSConfiguration, minTLSVersion) if input.provisioningState == frontdoor.CustomHTTPSProvisioningStateDisabled || customHTTPSConfigurationUpdate != *input.customHttpsConfigurationCurrent { // Enable Custom Domain HTTPS for the Frontend Endpoint - if err := resourceArmFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx, client, input.frontendEndpointId, true, customHTTPSConfigurationUpdate); err != nil { + if err := resourceFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx, client, input.frontendEndpointId, true, customHTTPSConfigurationUpdate); err != nil { return fmt.Errorf("unable to enable/update Custom Domain HTTPS for Frontend Endpoint %q (Resource Group %q): %+v", input.frontendEndpointId.Name, input.frontendEndpointId.ResourceGroup, err) } } } else if !input.customHttpsProvisioningEnabled && input.provisioningState == frontdoor.CustomHTTPSProvisioningStateEnabled { // Disable Custom Domain HTTPS for the Frontend Endpoint - if err := resourceArmFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx, client, input.frontendEndpointId, false, frontdoor.CustomHTTPSConfiguration{}); err != nil { + if err := resourceFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx, client, input.frontendEndpointId, false, frontdoor.CustomHTTPSConfiguration{}); err != nil { return fmt.Errorf("unable to disable Custom Domain HTTPS for Frontend Endpoint %q (Resource Group %q): %+v", input.frontendEndpointId.Name, input.frontendEndpointId.ResourceGroup, err) } } @@ -246,7 +240,7 @@ func updateCustomHttpsConfiguration(ctx context.Context, client *frontdoor.Front return nil } -func resourceArmFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx context.Context, client *frontdoor.FrontendEndpointsClient, id parse.FrontendEndpointId, enableCustomHttpsProvisioning bool, customHTTPSConfiguration frontdoor.CustomHTTPSConfiguration) error { +func resourceFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx context.Context, client *frontdoor.FrontendEndpointsClient, id parse.FrontendEndpointId, enableCustomHttpsProvisioning bool, customHTTPSConfiguration frontdoor.CustomHTTPSConfiguration) error { if enableCustomHttpsProvisioning { future, err := client.EnableHTTPS(ctx, id.ResourceGroup, id.FrontDoorName, id.Name, customHTTPSConfiguration) if err != nil { @@ -260,7 +254,6 @@ func resourceArmFrontDoorFrontendEndpointEnableHttpsProvisioning(ctx context.Con } future, err := client.DisableHTTPS(ctx, id.ResourceGroup, id.FrontDoorName, id.Name) - if err != nil { return fmt.Errorf("disabling Custom Domain HTTPS for Frontend Endpoint: %+v", err) } diff --git a/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource_test.go b/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource_test.go new file mode 100644 index 000000000000..dcfae37ca338 --- /dev/null +++ b/azurerm/internal/services/frontdoor/frontdoor_custom_https_configuration_resource_test.go @@ -0,0 +1,146 @@ +package frontdoor_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FrontDoorCustomHttpsConfigurationResource struct { +} + +func TestAccFrontDoorCustomHttpsConfiguration_CustomHttps(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor_custom_https_configuration", "test") + r := FrontDoorCustomHttpsConfigurationResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.CustomHttpsEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("custom_https_provisioning_enabled").HasValue("true"), + check.That(data.ResourceName).Key("custom_https_configuration.0.certificate_source").HasValue("FrontDoor"), + ), + }, + { + Config: r.CustomHttpsDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("custom_https_provisioning_enabled").HasValue("false"), + ), + }, + }) +} + +func (FrontDoorCustomHttpsConfigurationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.FrontendEndpointIDInsensitively(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Frontdoor.FrontDoorsFrontendClient.Get(ctx, id.ResourceGroup, id.FrontDoorName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Frontend Endpoint %q (Front Door %q / Resource Group %q): %v", id.Name, id.FrontDoorName, id.ResourceGroup, err) + } + + return utils.Bool(resp.FrontendEndpointProperties != nil), nil +} + +func (r FrontDoorCustomHttpsConfigurationResource) CustomHttpsEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_frontdoor_custom_https_configuration" "test" { + frontend_endpoint_id = azurerm_frontdoor.test.frontend_endpoint[0].id + resource_group_name = azurerm_resource_group.test.name + custom_https_provisioning_enabled = true + + custom_https_configuration { + certificate_source = "FrontDoor" + } +} +`, r.template(data)) +} + +func (r FrontDoorCustomHttpsConfigurationResource) CustomHttpsDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_frontdoor_custom_https_configuration" "test" { + frontend_endpoint_id = azurerm_frontdoor.test.frontend_endpoint[0].id + resource_group_name = azurerm_resource_group.test.name + custom_https_provisioning_enabled = false +} +`, r.template(data)) +} + +func (FrontDoorCustomHttpsConfigurationResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing-custom" + endpoint_name = "frontend-endpoint-custom" + health_probe_name = "health-probe-custom" + load_balancing_name = "load-balancing-setting-custom" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource.go b/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource.go index 33af01949828..ebbae524de22 100644 --- a/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource.go +++ b/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource.go @@ -22,15 +22,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFrontDoorFirewallPolicy() *schema.Resource { +func resourceFrontDoorFirewallPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmFrontDoorFirewallPolicyCreateUpdate, - Read: resourceArmFrontDoorFirewallPolicyRead, - Update: resourceArmFrontDoorFirewallPolicyCreateUpdate, - Delete: resourceArmFrontDoorFirewallPolicyDelete, + Create: resourceFrontDoorFirewallPolicyCreateUpdate, + Read: resourceFrontDoorFirewallPolicyRead, + Update: resourceFrontDoorFirewallPolicyCreateUpdate, + Delete: resourceFrontDoorFirewallPolicyDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.WebApplicationFirewallPolicyID(id) + _, err := parse.WebApplicationFirewallPolicyIDInsensitively(id) return err }), @@ -90,7 +90,7 @@ func resourceArmFrontDoorFirewallPolicy() *schema.Resource { "custom_block_response_body": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.FrontdoorCustomBlockResponseBody, + ValidateFunc: validate.CustomBlockResponseBody, }, "custom_rule": { @@ -433,7 +433,7 @@ func resourceArmFrontDoorFirewallPolicy() *schema.Resource { } } -func resourceArmFrontDoorFirewallPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorFirewallPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsPolicyClient subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) @@ -443,7 +443,7 @@ func resourceArmFrontDoorFirewallPolicyCreateUpdate(d *schema.ResourceData, meta name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) - id := parse.NewWebApplicationFirewallPolicyID(resourceGroup, name).ID(subscriptionId) + id := parse.NewWebApplicationFirewallPolicyID(subscriptionId, resourceGroup, name).ID() if d.IsNewResource() { existing, err := client.Get(ctx, resourceGroup, name) @@ -479,8 +479,8 @@ func resourceArmFrontDoorFirewallPolicyCreateUpdate(d *schema.ResourceData, meta EnabledState: enabled, Mode: frontdoor.PolicyMode(mode), }, - CustomRules: expandArmFrontDoorFirewallCustomRules(customRules), - ManagedRules: expandArmFrontDoorFirewallManagedRules(managedRules), + CustomRules: expandFrontDoorFirewallCustomRules(customRules), + ManagedRules: expandFrontDoorFirewallManagedRules(managedRules), }, Tags: tags.Expand(t), } @@ -504,30 +504,30 @@ func resourceArmFrontDoorFirewallPolicyCreateUpdate(d *schema.ResourceData, meta } d.SetId(id) - return resourceArmFrontDoorFirewallPolicyRead(d, meta) + return resourceFrontDoorFirewallPolicyRead(d, meta) } -func resourceArmFrontDoorFirewallPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorFirewallPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsPolicyClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.WebApplicationFirewallPolicyID(d.Id()) + id, err := parse.WebApplicationFirewallPolicyIDInsensitively(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.FrontDoorWebApplicationFirewallPolicyName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Front Door Firewall Policy %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("retrieving Front Door Firewall Policy %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Front Door Firewall Policy %q (Resource Group %q): %+v", id.FrontDoorWebApplicationFirewallPolicyName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.FrontDoorWebApplicationFirewallPolicyName) d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { @@ -543,7 +543,7 @@ func resourceArmFrontDoorFirewallPolicyRead(d *schema.ResourceData, meta interfa d.Set("custom_block_response_body", policy.CustomBlockResponseBody) } - if err := d.Set("custom_rule", flattenArmFrontDoorFirewallCustomRules(properties.CustomRules)); err != nil { + if err := d.Set("custom_rule", flattenFrontDoorFirewallCustomRules(properties.CustomRules)); err != nil { return fmt.Errorf("flattening `custom_rule`: %+v", err) } @@ -551,7 +551,7 @@ func resourceArmFrontDoorFirewallPolicyRead(d *schema.ResourceData, meta interfa return fmt.Errorf("flattening `frontend_endpoint_ids`: %+v", err) } - if err := d.Set("managed_rule", flattenArmFrontDoorFirewallManagedRules(properties.ManagedRules)); err != nil { + if err := d.Set("managed_rule", flattenFrontDoorFirewallManagedRules(properties.ManagedRules)); err != nil { return fmt.Errorf("flattening `managed_rule`: %+v", err) } } @@ -559,34 +559,34 @@ func resourceArmFrontDoorFirewallPolicyRead(d *schema.ResourceData, meta interfa return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmFrontDoorFirewallPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorFirewallPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsPolicyClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.WebApplicationFirewallPolicyID(d.Id()) + id, err := parse.WebApplicationFirewallPolicyIDInsensitively(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.FrontDoorWebApplicationFirewallPolicyName) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("deleting Front Door Firewall %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("deleting Front Door Firewall %q (Resource Group %q): %+v", id.FrontDoorWebApplicationFirewallPolicyName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("waiting for deleting Front Door Firewall %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for deleting Front Door Firewall %q (Resource Group %q): %+v", id.FrontDoorWebApplicationFirewallPolicyName, id.ResourceGroup, err) } } return nil } -func expandArmFrontDoorFirewallCustomRules(input []interface{}) *frontdoor.CustomRuleList { +func expandFrontDoorFirewallCustomRules(input []interface{}) *frontdoor.CustomRuleList { if len(input) == 0 { return nil } @@ -616,7 +616,7 @@ func expandArmFrontDoorFirewallCustomRules(input []interface{}) *frontdoor.Custo RuleType: frontdoor.RuleType(ruleType), RateLimitDurationInMinutes: utils.Int32(rateLimitDurationInMinutes), RateLimitThreshold: utils.Int32(rateLimitThreshold), - MatchConditions: expandArmFrontDoorFirewallMatchConditions(matchConditions), + MatchConditions: expandFrontDoorFirewallMatchConditions(matchConditions), Action: frontdoor.ActionType(action), } output = append(output, customRule) @@ -627,7 +627,7 @@ func expandArmFrontDoorFirewallCustomRules(input []interface{}) *frontdoor.Custo } } -func expandArmFrontDoorFirewallMatchConditions(input []interface{}) *[]frontdoor.MatchCondition { +func expandFrontDoorFirewallMatchConditions(input []interface{}) *[]frontdoor.MatchCondition { if len(input) == 0 { return nil } @@ -648,7 +648,7 @@ func expandArmFrontDoorFirewallMatchConditions(input []interface{}) *[]frontdoor Operator: frontdoor.Operator(operator), NegateCondition: &negateCondition, MatchValue: utils.ExpandStringSlice(matchValues), - Transforms: expandArmFrontDoorFirewallTransforms(transforms), + Transforms: expandFrontDoorFirewallTransforms(transforms), } if matchVariable != "" { @@ -664,7 +664,7 @@ func expandArmFrontDoorFirewallMatchConditions(input []interface{}) *[]frontdoor return &result } -func expandArmFrontDoorFirewallTransforms(input []interface{}) *[]frontdoor.TransformType { +func expandFrontDoorFirewallTransforms(input []interface{}) *[]frontdoor.TransformType { if len(input) == 0 { return nil } @@ -677,7 +677,7 @@ func expandArmFrontDoorFirewallTransforms(input []interface{}) *[]frontdoor.Tran return &result } -func expandArmFrontDoorFirewallManagedRules(input []interface{}) *frontdoor.ManagedRuleSetList { +func expandFrontDoorFirewallManagedRules(input []interface{}) *frontdoor.ManagedRuleSetList { if len(input) == 0 { return nil } @@ -697,11 +697,11 @@ func expandArmFrontDoorFirewallManagedRules(input []interface{}) *frontdoor.Mana RuleSetVersion: utils.String(version), } - if exclusions := expandArmFrontDoorFirewallManagedRuleGroupExclusion(exclusions); exclusions != nil { + if exclusions := expandFrontDoorFirewallManagedRuleGroupExclusion(exclusions); exclusions != nil { managedRuleSet.Exclusions = exclusions } - if ruleGroupOverrides := expandArmFrontDoorFirewallManagedRuleGroupOverride(overrides); ruleGroupOverrides != nil { + if ruleGroupOverrides := expandFrontDoorFirewallManagedRuleGroupOverride(overrides); ruleGroupOverrides != nil { managedRuleSet.RuleGroupOverrides = ruleGroupOverrides } @@ -713,7 +713,7 @@ func expandArmFrontDoorFirewallManagedRules(input []interface{}) *frontdoor.Mana } } -func expandArmFrontDoorFirewallManagedRuleGroupExclusion(input []interface{}) *[]frontdoor.ManagedRuleExclusion { +func expandFrontDoorFirewallManagedRuleGroupExclusion(input []interface{}) *[]frontdoor.ManagedRuleExclusion { if len(input) == 0 { return nil } @@ -738,7 +738,7 @@ func expandArmFrontDoorFirewallManagedRuleGroupExclusion(input []interface{}) *[ return &managedRuleExclusions } -func expandArmFrontDoorFirewallManagedRuleGroupOverride(input []interface{}) *[]frontdoor.ManagedRuleGroupOverride { +func expandFrontDoorFirewallManagedRuleGroupOverride(input []interface{}) *[]frontdoor.ManagedRuleGroupOverride { if len(input) == 0 { return nil } @@ -755,11 +755,11 @@ func expandArmFrontDoorFirewallManagedRuleGroupOverride(input []interface{}) *[] RuleGroupName: utils.String(ruleGroupName), } - if exclusions := expandArmFrontDoorFirewallManagedRuleGroupExclusion(exclusions); exclusions != nil { + if exclusions := expandFrontDoorFirewallManagedRuleGroupExclusion(exclusions); exclusions != nil { managedRuleGroupOverride.Exclusions = exclusions } - if managedRuleOverride := expandArmFrontDoorFirewallRuleOverride(rules); managedRuleOverride != nil { + if managedRuleOverride := expandFrontDoorFirewallRuleOverride(rules); managedRuleOverride != nil { managedRuleGroupOverride.Rules = managedRuleOverride } @@ -769,7 +769,7 @@ func expandArmFrontDoorFirewallManagedRuleGroupOverride(input []interface{}) *[] return &managedRuleGroupOverrides } -func expandArmFrontDoorFirewallRuleOverride(input []interface{}) *[]frontdoor.ManagedRuleOverride { +func expandFrontDoorFirewallRuleOverride(input []interface{}) *[]frontdoor.ManagedRuleOverride { if len(input) == 0 { return nil } @@ -792,7 +792,7 @@ func expandArmFrontDoorFirewallRuleOverride(input []interface{}) *[]frontdoor.Ma Action: frontdoor.ActionType(action), } - if exclusions := expandArmFrontDoorFirewallManagedRuleGroupExclusion(exclusions); exclusions != nil { + if exclusions := expandFrontDoorFirewallManagedRuleGroupExclusion(exclusions); exclusions != nil { managedRuleOverride.Exclusions = exclusions } @@ -802,7 +802,7 @@ func expandArmFrontDoorFirewallRuleOverride(input []interface{}) *[]frontdoor.Ma return &managedRuleOverrides } -func flattenArmFrontDoorFirewallCustomRules(input *frontdoor.CustomRuleList) []interface{} { +func flattenFrontDoorFirewallCustomRules(input *frontdoor.CustomRuleList) []interface{} { if input == nil || input.Rules == nil { return make([]interface{}, 0) } @@ -815,7 +815,7 @@ func flattenArmFrontDoorFirewallCustomRules(input *frontdoor.CustomRuleList) []i output["type"] = string(r.RuleType) output["action"] = string(r.Action) output["enabled"] = r.EnabledState == frontdoor.CustomRuleEnabledStateEnabled - output["match_condition"] = flattenArmFrontDoorFirewallMatchConditions(r.MatchConditions) + output["match_condition"] = flattenFrontDoorFirewallMatchConditions(r.MatchConditions) if v := r.Priority; v != nil { output["priority"] = int(*v) @@ -835,7 +835,7 @@ func flattenArmFrontDoorFirewallCustomRules(input *frontdoor.CustomRuleList) []i return results } -func flattenArmFrontDoorFirewallMatchConditions(condition *[]frontdoor.MatchCondition) []interface{} { +func flattenFrontDoorFirewallMatchConditions(condition *[]frontdoor.MatchCondition) []interface{} { if condition == nil { return make([]interface{}, 0) } @@ -863,7 +863,7 @@ func flattenArmFrontDoorFirewallMatchConditions(condition *[]frontdoor.MatchCond return results } -func flattenArmFrontDoorFirewallManagedRules(input *frontdoor.ManagedRuleSetList) []interface{} { +func flattenFrontDoorFirewallManagedRules(input *frontdoor.ManagedRuleSetList) []interface{} { if input == nil || input.ManagedRuleSets == nil { return make([]interface{}, 0) } @@ -881,11 +881,11 @@ func flattenArmFrontDoorFirewallManagedRules(input *frontdoor.ManagedRuleSetList } if v := r.RuleGroupOverrides; v != nil { - output["override"] = flattenArmFrontDoorFirewallOverrides(v) + output["override"] = flattenFrontDoorFirewallOverrides(v) } if v := r.Exclusions; v != nil { - output["exclusion"] = flattenArmFrontDoorFirewallExclusions(v) + output["exclusion"] = flattenFrontDoorFirewallExclusions(v) } results = append(results, output) @@ -894,7 +894,7 @@ func flattenArmFrontDoorFirewallManagedRules(input *frontdoor.ManagedRuleSetList return results } -func flattenArmFrontDoorFirewallExclusions(managedRuleExclusion *[]frontdoor.ManagedRuleExclusion) []interface{} { +func flattenFrontDoorFirewallExclusions(managedRuleExclusion *[]frontdoor.ManagedRuleExclusion) []interface{} { if managedRuleExclusion == nil { return make([]interface{}, 0) } @@ -913,7 +913,7 @@ func flattenArmFrontDoorFirewallExclusions(managedRuleExclusion *[]frontdoor.Man return results } -func flattenArmFrontDoorFirewallOverrides(groupOverride *[]frontdoor.ManagedRuleGroupOverride) []interface{} { +func flattenFrontDoorFirewallOverrides(groupOverride *[]frontdoor.ManagedRuleGroupOverride) []interface{} { if groupOverride == nil { return make([]interface{}, 0) } @@ -927,7 +927,7 @@ func flattenArmFrontDoorFirewallOverrides(groupOverride *[]frontdoor.ManagedRule } if v := o.Exclusions; v != nil { - output["exclusion"] = flattenArmFrontDoorFirewallExclusions(v) + output["exclusion"] = flattenFrontDoorFirewallExclusions(v) } if rules := o.Rules; rules != nil { @@ -957,7 +957,7 @@ func flattenArmFrontdoorFirewallRules(override *[]frontdoor.ManagedRuleOverride) } if v := o.Exclusions; v != nil { - output["exclusion"] = flattenArmFrontDoorFirewallExclusions(v) + output["exclusion"] = flattenFrontDoorFirewallExclusions(v) } results = append(results, output) diff --git a/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource_test.go b/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource_test.go new file mode 100644 index 000000000000..25165d049112 --- /dev/null +++ b/azurerm/internal/services/frontdoor/frontdoor_firewall_policy_resource_test.go @@ -0,0 +1,354 @@ +package frontdoor_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FrontDoorFirewallPolicyResource struct { +} + +func TestAccFrontDoorFirewallPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") + r := FrontDoorFirewallPolicyResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), + check.That(data.ResourceName).Key("mode").HasValue("Prevention"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoorFirewallPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") + r := FrontDoorFirewallPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccFrontDoorFirewallPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") + r := FrontDoorFirewallPolicyResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.update(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), + check.That(data.ResourceName).Key("mode").HasValue("Prevention"), + ), + }, + { + Config: r.update(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), + check.That(data.ResourceName).Key("mode").HasValue("Prevention"), + check.That(data.ResourceName).Key("custom_rule.1.name").HasValue("Rule2"), + check.That(data.ResourceName).Key("custom_rule.2.name").HasValue("Rule3"), + ), + }, + { + Config: r.update(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("custom_rule.1.name").DoesNotExist(), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), + check.That(data.ResourceName).Key("mode").HasValue("Prevention"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoorFirewallPolicy_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") + r := FrontDoorFirewallPolicyResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.update(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), + check.That(data.ResourceName).Key("mode").HasValue("Prevention"), + check.That(data.ResourceName).Key("redirect_url").HasValue("https://www.contoso.com"), + check.That(data.ResourceName).Key("custom_block_response_status_code").HasValue("403"), + check.That(data.ResourceName).Key("custom_rule.0.name").HasValue("Rule1"), + check.That(data.ResourceName).Key("custom_rule.1.name").HasValue("Rule2"), + check.That(data.ResourceName).Key("managed_rule.0.type").HasValue("DefaultRuleSet"), + check.That(data.ResourceName).Key("managed_rule.0.exclusion.0.match_variable").HasValue("QueryStringArgNames"), + check.That(data.ResourceName).Key("managed_rule.0.override.1.exclusion.0.selector").HasValue("really_not_suspicious"), + check.That(data.ResourceName).Key("managed_rule.0.override.1.rule.0.exclusion.0.selector").HasValue("innocent"), + check.That(data.ResourceName).Key("managed_rule.1.type").HasValue("Microsoft_BotManagerRuleSet"), + ), + }, + data.ImportStep(), + }) +} + +func (FrontDoorFirewallPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.WebApplicationFirewallPolicyIDInsensitively(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Frontdoor.FrontDoorsPolicyClient.Get(ctx, id.ResourceGroup, id.FrontDoorWebApplicationFirewallPolicyName) + if err != nil { + return nil, fmt.Errorf("retrieving Front Door Firewall Policy %q (Resource Group %q): %v", id.FrontDoorWebApplicationFirewallPolicyName, id.ResourceGroup, err) + } + + return utils.Bool(resp.WebApplicationFirewallPolicyProperties != nil), nil +} + +func (FrontDoorFirewallPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "testaccRG-%d" + location = "%s" +} + +resource "azurerm_frontdoor_firewall_policy" "test" { + name = "testAccFrontDoorWAF%[1]d" + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r FrontDoorFirewallPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_frontdoor_firewall_policy" "import" { + name = azurerm_frontdoor_firewall_policy.test.name + resource_group_name = azurerm_frontdoor_firewall_policy.test.resource_group_name +} +`, r.basic(data)) +} + +func (r FrontDoorFirewallPolicyResource) update(data acceptance.TestData, update bool) string { + if update { + return r.updated(data) + } + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "testaccRG-%d" + location = "%s" +} + +resource "azurerm_frontdoor_firewall_policy" "test" { + name = "testAccFrontDoorWAF%[1]d" + resource_group_name = azurerm_resource_group.test.name + enabled = true + mode = "Prevention" + redirect_url = "https://www.contoso.com" + custom_block_response_status_code = 403 + custom_block_response_body = "PGh0bWw+CjxoZWFkZXI+PHRpdGxlPkhlbGxvPC90aXRsZT48L2hlYWRlcj4KPGJvZHk+CkhlbGxvIHdvcmxkCjwvYm9keT4KPC9odG1sPg==" + + custom_rule { + name = "Rule1" + enabled = true + priority = 1 + rate_limit_duration_in_minutes = 1 + rate_limit_threshold = 10 + type = "MatchRule" + action = "Block" + + match_condition { + match_variable = "RemoteAddr" + operator = "IPMatch" + negation_condition = false + match_values = ["192.168.1.0/24", "10.0.0.0/24"] + } + } + + managed_rule { + type = "DefaultRuleSet" + version = "preview-0.1" + + override { + rule_group_name = "PHP" + + rule { + rule_id = "933111" + enabled = false + action = "Block" + } + } + } + + managed_rule { + type = "BotProtection" + version = "preview-0.1" + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (FrontDoorFirewallPolicyResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "testaccRG-%d" + location = "%[2]s" +} + +resource "azurerm_frontdoor_firewall_policy" "test" { + name = "testAccFrontDoorWAF%[1]d" + resource_group_name = azurerm_resource_group.test.name + enabled = true + mode = "Prevention" + redirect_url = "https://www.contoso.com" + custom_block_response_status_code = 403 + custom_block_response_body = "PGh0bWw+CjxoZWFkZXI+PHRpdGxlPkhlbGxvPC90aXRsZT48L2hlYWRlcj4KPGJvZHk+CkhlbGxvIHdvcmxkCjwvYm9keT4KPC9odG1sPg==" + + custom_rule { + name = "Rule1" + enabled = true + priority = 1 + rate_limit_duration_in_minutes = 1 + rate_limit_threshold = 10 + type = "MatchRule" + action = "Block" + + match_condition { + match_variable = "RemoteAddr" + operator = "IPMatch" + negation_condition = false + match_values = ["192.168.1.0/24", "10.0.0.0/24"] + } + } + + custom_rule { + name = "Rule2" + enabled = true + priority = 2 + rate_limit_duration_in_minutes = 1 + rate_limit_threshold = 10 + type = "MatchRule" + action = "Block" + + match_condition { + match_variable = "RemoteAddr" + operator = "IPMatch" + negation_condition = false + match_values = ["192.168.1.0/24"] + } + + match_condition { + match_variable = "RequestHeader" + selector = "UserAgent" + operator = "Contains" + negation_condition = false + match_values = ["windows"] + transforms = ["Lowercase", "Trim"] + } + } + + custom_rule { + name = "Rule3" + enabled = true + priority = 3 + rate_limit_duration_in_minutes = 1 + rate_limit_threshold = 10 + type = "MatchRule" + action = "Block" + + match_condition { + match_variable = "SocketAddr" + operator = "IPMatch" + negation_condition = false + match_values = ["192.168.1.0/24"] + } + + match_condition { + match_variable = "RequestHeader" + selector = "UserAgent" + operator = "Contains" + negation_condition = false + match_values = ["windows"] + transforms = ["Lowercase", "Trim"] + } + } + + managed_rule { + type = "DefaultRuleSet" + version = "1.0" + + exclusion { + match_variable = "QueryStringArgNames" + operator = "Equals" + selector = "not_suspicious" + } + + override { + rule_group_name = "PHP" + + rule { + rule_id = "933100" + enabled = false + action = "Block" + } + } + + override { + rule_group_name = "SQLI" + + exclusion { + match_variable = "QueryStringArgNames" + operator = "Equals" + selector = "really_not_suspicious" + } + + rule { + rule_id = "942200" + action = "Block" + + exclusion { + match_variable = "QueryStringArgNames" + operator = "Equals" + selector = "innocent" + } + } + } + } + + managed_rule { + type = "Microsoft_BotManagerRuleSet" + version = "1.0" + } +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/frontdoor/frontdoor_resource.go b/azurerm/internal/services/frontdoor/frontdoor_resource.go index 89219f226a5b..8c5dcfdd5740 100644 --- a/azurerm/internal/services/frontdoor/frontdoor_resource.go +++ b/azurerm/internal/services/frontdoor/frontdoor_resource.go @@ -23,15 +23,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmFrontDoor() *schema.Resource { +func resourceFrontDoor() *schema.Resource { return &schema.Resource{ - Create: resourceArmFrontDoorCreateUpdate, - Read: resourceArmFrontDoorRead, - Update: resourceArmFrontDoorCreateUpdate, - Delete: resourceArmFrontDoorDelete, + Create: resourceFrontDoorCreateUpdate, + Read: resourceFrontDoorRead, + Update: resourceFrontDoorCreateUpdate, + Delete: resourceFrontDoorDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.FrontDoorIDForImport(id) + _, err := parse.FrontDoorID(id) return err }), @@ -127,7 +127,7 @@ func resourceArmFrontDoor() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.FrontDoorBackendPoolRoutingRuleName, + ValidateFunc: validate.BackendPoolRoutingRuleName, }, "enabled": { Type: schema.TypeBool, @@ -215,7 +215,7 @@ func resourceArmFrontDoor() *schema.Resource { "backend_pool_name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.FrontDoorBackendPoolRoutingRuleName, + ValidateFunc: validate.BackendPoolRoutingRuleName, }, "cache_enabled": { Type: schema.TypeBool, @@ -270,7 +270,7 @@ func resourceArmFrontDoor() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.FrontDoorBackendPoolRoutingRuleName, + ValidateFunc: validate.BackendPoolRoutingRuleName, }, "sample_size": { Type: schema.TypeInt, @@ -304,7 +304,7 @@ func resourceArmFrontDoor() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.FrontDoorBackendPoolRoutingRuleName, + ValidateFunc: validate.BackendPoolRoutingRuleName, }, "enabled": { Type: schema.TypeBool, @@ -400,7 +400,7 @@ func resourceArmFrontDoor() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.FrontDoorBackendPoolRoutingRuleName, + ValidateFunc: validate.BackendPoolRoutingRuleName, }, "health_probe_name": { Type: schema.TypeString, @@ -427,7 +427,7 @@ func resourceArmFrontDoor() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.FrontDoorBackendPoolRoutingRuleName, + ValidateFunc: validate.BackendPoolRoutingRuleName, }, "host_name": { Type: schema.TypeString, @@ -475,7 +475,7 @@ func resourceArmFrontDoor() *schema.Resource { } } -func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -483,7 +483,7 @@ func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) subscriptionId := meta.(*clients.Client).Account.SubscriptionId - frontDoorId := parse.NewFrontDoorID(resourceGroup, name) + frontDoorId := parse.NewFrontDoorID(subscriptionId, resourceGroup, name) if d.IsNewResource() { resp, err := client.Get(ctx, resourceGroup, name) @@ -493,7 +493,7 @@ func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) } } if !utils.ResponseWasNotFound(resp.Response) { - return tf.ImportAsExistsError("azurerm_frontdoor", frontDoorId.ID(subscriptionId)) + return tf.ImportAsExistsError("azurerm_frontdoor", frontDoorId.ID()) } } @@ -535,13 +535,13 @@ func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) Location: utils.String(location), Properties: &frontdoor.Properties{ FriendlyName: utils.String(friendlyName), - RoutingRules: expandArmFrontDoorRoutingRule(routingRules, frontDoorId, subscriptionId), - BackendPools: expandArmFrontDoorBackendPools(backendPools, frontDoorId, subscriptionId), - BackendPoolsSettings: expandArmFrontDoorBackendPoolsSettings(backendPoolsSettings, backendPoolsSendReceiveTimeoutSeconds), - FrontendEndpoints: expandArmFrontDoorFrontendEndpoint(frontendEndpoints, frontDoorId, subscriptionId), - HealthProbeSettings: expandArmFrontDoorHealthProbeSettingsModel(healthProbeSettings, frontDoorId, subscriptionId), - LoadBalancingSettings: expandArmFrontDoorLoadBalancingSettingsModel(loadBalancingSettings, frontDoorId, subscriptionId), - EnabledState: expandArmFrontDoorEnabledState(enabledState), + RoutingRules: expandFrontDoorRoutingRule(routingRules, frontDoorId), + BackendPools: expandFrontDoorBackendPools(backendPools, frontDoorId), + BackendPoolsSettings: expandFrontDoorBackendPoolsSettings(backendPoolsSettings, backendPoolsSendReceiveTimeoutSeconds), + FrontendEndpoints: expandFrontDoorFrontendEndpoint(frontendEndpoints, frontDoorId), + HealthProbeSettings: expandFrontDoorHealthProbeSettingsModel(healthProbeSettings, frontDoorId), + LoadBalancingSettings: expandFrontDoorLoadBalancingSettingsModel(loadBalancingSettings, frontDoorId), + EnabledState: expandFrontDoorEnabledState(enabledState), }, Tags: tags.Expand(t), } @@ -554,7 +554,7 @@ func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("waiting for creation of Front Door %q (Resource Group %q): %+v", name, resourceGroup, err) } - d.SetId(frontDoorId.ID(subscriptionId)) + d.SetId(frontDoorId.ID()) // Now loop through the FrontendEndpoints and enable/disable Custom Domain HTTPS // on each individual Frontend Endpoint if required @@ -574,14 +574,13 @@ func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) if properties := resp.FrontendEndpointProperties; properties != nil { frontendClient := meta.(*clients.Client).Frontdoor.FrontDoorsFrontendClient customHttpsConfigurationNew := frontendEndpoint["custom_https_configuration"].([]interface{}) - frontendInputId := parse.NewFrontendEndpointID(frontDoorId, endpointName) + frontendInputId := parse.NewFrontendEndpointID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, endpointName) input := customHttpsConfigurationUpdateInput{ customHttpsConfigurationCurrent: properties.CustomHTTPSConfiguration, customHttpsConfigurationNew: customHttpsConfigurationNew, customHttpsProvisioningEnabled: customHttpsProvisioningEnabled, frontendEndpointId: frontendInputId, provisioningState: properties.CustomHTTPSProvisioningState, - subscriptionId: subscriptionId, } if err := updateCustomHttpsConfiguration(ctx, frontendClient, input); err != nil { return fmt.Errorf("updating Custom HTTPS configuration for Frontend Endpoint %q (Front Door %q / Resource Group %q): %+v", endpointName, name, resourceGroup, err) @@ -589,16 +588,15 @@ func resourceArmFrontDoorCreateUpdate(d *schema.ResourceData, meta interface{}) } } - return resourceArmFrontDoorRead(d, meta) + return resourceFrontDoorRead(d, meta) } -func resourceArmFrontDoorRead(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FrontDoorID(d.Id()) + id, err := parse.FrontDoorIDInsensitively(d.Id()) if err != nil { return err } @@ -618,7 +616,7 @@ func resourceArmFrontDoorRead(d *schema.ResourceData, meta interface{}) error { d.Set("location", azure.NormalizeLocation(*resp.Location)) if props := resp.Properties; props != nil { - flattenedBackendPools, err := flattenArmFrontDoorBackendPools(props.BackendPools, *id, subscriptionId) + flattenedBackendPools, err := flattenFrontDoorBackendPools(props.BackendPools, *id) if err != nil { return fmt.Errorf("flattening `backend_pool`: %+v", err) } @@ -626,7 +624,7 @@ func resourceArmFrontDoorRead(d *schema.ResourceData, meta interface{}) error { return fmt.Errorf("setting `backend_pool`: %+v", err) } - backendPoolSettings := flattenArmFrontDoorBackendPoolsSettings(props.BackendPoolsSettings) + backendPoolSettings := flattenFrontDoorBackendPoolsSettings(props.BackendPoolsSettings) d.Set("enforce_backend_pools_certificate_name_check", backendPoolSettings.enforceBackendPoolsCertificateNameCheck) d.Set("backend_pools_send_receive_timeout_seconds", backendPoolSettings.backendPoolsSendReceiveTimeoutSeconds) @@ -642,7 +640,7 @@ func resourceArmFrontDoorRead(d *schema.ResourceData, meta interface{}) error { if err != nil { return fmt.Errorf("retrieving FrontEnd Endpoint Custom HTTPS Information: %+v", err) } - frontDoorFrontendEndpoints, err := flattenFrontEndEndpoints(frontEndEndpointInfo, *id, subscriptionId) + frontDoorFrontendEndpoints, err := flattenFrontEndEndpoints(frontEndEndpointInfo, *id) if err != nil { return fmt.Errorf("flattening `frontend_endpoint`: %+v", err) } @@ -650,15 +648,15 @@ func resourceArmFrontDoorRead(d *schema.ResourceData, meta interface{}) error { return fmt.Errorf("setting `frontend_endpoint`: %+v", err) } - if err := d.Set("backend_pool_health_probe", flattenArmFrontDoorHealthProbeSettingsModel(props.HealthProbeSettings, *id, subscriptionId)); err != nil { + if err := d.Set("backend_pool_health_probe", flattenFrontDoorHealthProbeSettingsModel(props.HealthProbeSettings, *id)); err != nil { return fmt.Errorf("setting `backend_pool_health_probe`: %+v", err) } - if err := d.Set("backend_pool_load_balancing", flattenArmFrontDoorLoadBalancingSettingsModel(props.LoadBalancingSettings, *id, subscriptionId)); err != nil { + if err := d.Set("backend_pool_load_balancing", flattenFrontDoorLoadBalancingSettingsModel(props.LoadBalancingSettings, *id)); err != nil { return fmt.Errorf("setting `backend_pool_load_balancing`: %+v", err) } - flattenedRoutingRules, err := flattenArmFrontDoorRoutingRule(props.RoutingRules, d.Get("routing_rule"), *id, subscriptionId) + flattenedRoutingRules, err := flattenFrontDoorRoutingRule(props.RoutingRules, d.Get("routing_rule"), *id) if err != nil { return fmt.Errorf("flattening `routing_rules`: %+v", err) } @@ -670,12 +668,12 @@ func resourceArmFrontDoorRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmFrontDoorDelete(d *schema.ResourceData, meta interface{}) error { +func resourceFrontDoorDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Frontdoor.FrontDoorsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FrontDoorID(d.Id()) + id, err := parse.FrontDoorIDInsensitively(d.Id()) if err != nil { return err } @@ -697,7 +695,7 @@ func resourceArmFrontDoorDelete(d *schema.ResourceData, meta interface{}) error return nil } -func expandArmFrontDoorBackendPools(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) *[]frontdoor.BackendPool { +func expandFrontDoorBackendPools(input []interface{}, frontDoorId parse.FrontDoorId) *[]frontdoor.BackendPool { if len(input) == 0 { return &[]frontdoor.BackendPool{} } @@ -713,15 +711,15 @@ func expandArmFrontDoorBackendPools(input []interface{}, frontDoorId parse.Front backends := backendPool["backend"].([]interface{}) - backendPoolId := parse.NewBackendPoolID(frontDoorId, backendPoolName).ID(subscriptionId) - healthProbeId := parse.NewHealthProbeID(frontDoorId, backendPoolHealthProbeName).ID(subscriptionId) - loadBalancingId := parse.NewLoadBalancingID(frontDoorId, backendPoolLoadBalancingName).ID(subscriptionId) + backendPoolId := parse.NewBackendPoolID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, backendPoolName).ID() + healthProbeId := parse.NewHealthProbeID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, backendPoolHealthProbeName).ID() + loadBalancingId := parse.NewLoadBalancingID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, backendPoolLoadBalancingName).ID() result := frontdoor.BackendPool{ ID: utils.String(backendPoolId), Name: utils.String(backendPoolName), BackendPoolProperties: &frontdoor.BackendPoolProperties{ - Backends: expandArmFrontDoorBackend(backends), + Backends: expandFrontDoorBackend(backends), HealthProbeSettings: &frontdoor.SubResource{ ID: utils.String(healthProbeId), }, @@ -737,7 +735,7 @@ func expandArmFrontDoorBackendPools(input []interface{}, frontDoorId parse.Front return &output } -func expandArmFrontDoorBackend(input []interface{}) *[]frontdoor.Backend { +func expandFrontDoorBackend(input []interface{}) *[]frontdoor.Backend { if len(input) == 0 { return &[]frontdoor.Backend{} } @@ -758,7 +756,7 @@ func expandArmFrontDoorBackend(input []interface{}) *[]frontdoor.Backend { result := frontdoor.Backend{ Address: utils.String(address), BackendHostHeader: utils.String(hostHeader), - EnabledState: expandArmFrontDoorBackendEnabledState(enabled), + EnabledState: expandFrontDoorBackendEnabledState(enabled), HTTPPort: utils.Int32(httpPort), HTTPSPort: utils.Int32(httpsPort), Priority: utils.Int32(priority), @@ -771,7 +769,7 @@ func expandArmFrontDoorBackend(input []interface{}) *[]frontdoor.Backend { return &output } -func expandArmFrontDoorBackendEnabledState(isEnabled bool) frontdoor.BackendEnabledState { +func expandFrontDoorBackendEnabledState(isEnabled bool) frontdoor.BackendEnabledState { if isEnabled { return frontdoor.Enabled } @@ -779,7 +777,7 @@ func expandArmFrontDoorBackendEnabledState(isEnabled bool) frontdoor.BackendEnab return frontdoor.Disabled } -func expandArmFrontDoorBackendPoolsSettings(enforceCertificateNameCheck bool, backendPoolsSendReceiveTimeoutSeconds int32) *frontdoor.BackendPoolsSettings { +func expandFrontDoorBackendPoolsSettings(enforceCertificateNameCheck bool, backendPoolsSendReceiveTimeoutSeconds int32) *frontdoor.BackendPoolsSettings { enforceCheck := frontdoor.EnforceCertificateNameCheckEnabledStateDisabled if enforceCertificateNameCheck { @@ -794,7 +792,7 @@ func expandArmFrontDoorBackendPoolsSettings(enforceCertificateNameCheck bool, ba return &result } -func expandArmFrontDoorFrontendEndpoint(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) *[]frontdoor.FrontendEndpoint { +func expandFrontDoorFrontendEndpoint(input []interface{}, frontDoorId parse.FrontDoorId) *[]frontdoor.FrontendEndpoint { if len(input) == 0 { return &[]frontdoor.FrontendEndpoint{} } @@ -809,7 +807,7 @@ func expandArmFrontDoorFrontendEndpoint(input []interface{}, frontDoorId parse.F sessionAffinityTtlSeconds := int32(frontendEndpoint["session_affinity_ttl_seconds"].(int)) waf := frontendEndpoint["web_application_firewall_policy_link_id"].(string) name := frontendEndpoint["name"].(string) - id := parse.NewFrontendEndpointID(frontDoorId, name).ID(subscriptionId) + id := parse.NewFrontendEndpointID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, name).ID() sessionAffinityEnabled := frontdoor.SessionAffinityEnabledStateDisabled if isSessionAffinityEnabled { @@ -838,7 +836,7 @@ func expandArmFrontDoorFrontendEndpoint(input []interface{}, frontDoorId parse.F return &output } -func expandArmFrontDoorHealthProbeSettingsModel(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) *[]frontdoor.HealthProbeSettingsModel { +func expandFrontDoorHealthProbeSettingsModel(input []interface{}, frontDoorId parse.FrontDoorId) *[]frontdoor.HealthProbeSettingsModel { if len(input) == 0 { return &[]frontdoor.HealthProbeSettingsModel{} } @@ -859,7 +857,7 @@ func expandArmFrontDoorHealthProbeSettingsModel(input []interface{}, frontDoorId healthProbeEnabled = frontdoor.HealthProbeEnabledDisabled } - healthProbeId := parse.NewHealthProbeID(frontDoorId, name).ID(subscriptionId) + healthProbeId := parse.NewHealthProbeID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, name).ID() result := frontdoor.HealthProbeSettingsModel{ ID: utils.String(healthProbeId), @@ -879,7 +877,7 @@ func expandArmFrontDoorHealthProbeSettingsModel(input []interface{}, frontDoorId return &output } -func expandArmFrontDoorLoadBalancingSettingsModel(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) *[]frontdoor.LoadBalancingSettingsModel { +func expandFrontDoorLoadBalancingSettingsModel(input []interface{}, frontDoorId parse.FrontDoorId) *[]frontdoor.LoadBalancingSettingsModel { if len(input) == 0 { return &[]frontdoor.LoadBalancingSettingsModel{} } @@ -893,7 +891,7 @@ func expandArmFrontDoorLoadBalancingSettingsModel(input []interface{}, frontDoor sampleSize := int32(loadBalanceSetting["sample_size"].(int)) successfulSamplesRequired := int32(loadBalanceSetting["successful_samples_required"].(int)) additionalLatencyMilliseconds := int32(loadBalanceSetting["additional_latency_milliseconds"].(int)) - loadBalancingId := parse.NewLoadBalancingID(frontDoorId, name).ID(subscriptionId) + loadBalancingId := parse.NewLoadBalancingID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, name).ID() result := frontdoor.LoadBalancingSettingsModel{ ID: utils.String(loadBalancingId), @@ -911,7 +909,7 @@ func expandArmFrontDoorLoadBalancingSettingsModel(input []interface{}, frontDoor return &output } -func expandArmFrontDoorRoutingRule(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) *[]frontdoor.RoutingRule { +func expandFrontDoorRoutingRule(input []interface{}, frontDoorId parse.FrontDoorId) *[]frontdoor.RoutingRule { if len(input) == 0 { return nil } @@ -937,19 +935,19 @@ func expandArmFrontDoorRoutingRule(input []interface{}, frontDoorId parse.FrontD var routingConfiguration frontdoor.BasicRouteConfiguration if rc := routingRule["redirect_configuration"].([]interface{}); len(rc) != 0 { - routingConfiguration = expandArmFrontDoorRedirectConfiguration(rc) + routingConfiguration = expandFrontDoorRedirectConfiguration(rc) } else if fc := routingRule["forwarding_configuration"].([]interface{}); len(fc) != 0 { - routingConfiguration = expandArmFrontDoorForwardingConfiguration(fc, frontDoorId, subscriptionId) + routingConfiguration = expandFrontDoorForwardingConfiguration(fc, frontDoorId) } currentRoutingRule := frontdoor.RoutingRule{ ID: utils.String(id), Name: utils.String(name), RoutingRuleProperties: &frontdoor.RoutingRuleProperties{ - FrontendEndpoints: expandArmFrontDoorFrontEndEndpoints(frontendEndpoints, frontDoorId, subscriptionId), - AcceptedProtocols: expandArmFrontDoorAcceptedProtocols(acceptedProtocols), + FrontendEndpoints: expandFrontDoorFrontEndEndpoints(frontendEndpoints, frontDoorId), + AcceptedProtocols: expandFrontDoorAcceptedProtocols(acceptedProtocols), PatternsToMatch: &patternsToMatch, - EnabledState: frontdoor.RoutingRuleEnabledState(expandArmFrontDoorEnabledState(enabled)), + EnabledState: frontdoor.RoutingRuleEnabledState(expandFrontDoorEnabledState(enabled)), RouteConfiguration: routingConfiguration, }, } @@ -959,7 +957,7 @@ func expandArmFrontDoorRoutingRule(input []interface{}, frontDoorId parse.FrontD return &output } -func expandArmFrontDoorAcceptedProtocols(input []interface{}) *[]frontdoor.Protocol { +func expandFrontDoorAcceptedProtocols(input []interface{}) *[]frontdoor.Protocol { if len(input) == 0 { return &[]frontdoor.Protocol{} } @@ -979,14 +977,14 @@ func expandArmFrontDoorAcceptedProtocols(input []interface{}) *[]frontdoor.Proto return &output } -func expandArmFrontDoorFrontEndEndpoints(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) *[]frontdoor.SubResource { +func expandFrontDoorFrontEndEndpoints(input []interface{}, frontDoorId parse.FrontDoorId) *[]frontdoor.SubResource { if len(input) == 0 { return &[]frontdoor.SubResource{} } output := make([]frontdoor.SubResource, 0) for _, name := range input { - frontendEndpointId := parse.NewFrontendEndpointID(frontDoorId, name.(string)).ID(subscriptionId) + frontendEndpointId := parse.NewFrontendEndpointID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, name.(string)).ID() result := frontdoor.SubResource{ ID: utils.String(frontendEndpointId), } @@ -996,7 +994,7 @@ func expandArmFrontDoorFrontEndEndpoints(input []interface{}, frontDoorId parse. return &output } -func expandArmFrontDoorEnabledState(enabled bool) frontdoor.EnabledState { +func expandFrontDoorEnabledState(enabled bool) frontdoor.EnabledState { if enabled { return frontdoor.EnabledStateEnabled } @@ -1004,7 +1002,7 @@ func expandArmFrontDoorEnabledState(enabled bool) frontdoor.EnabledState { return frontdoor.EnabledStateDisabled } -func expandArmFrontDoorRedirectConfiguration(input []interface{}) frontdoor.RedirectConfiguration { +func expandFrontDoorRedirectConfiguration(input []interface{}) frontdoor.RedirectConfiguration { if len(input) == 0 { return frontdoor.RedirectConfiguration{} } @@ -1042,7 +1040,7 @@ func expandArmFrontDoorRedirectConfiguration(input []interface{}) frontdoor.Redi return redirectConfiguration } -func expandArmFrontDoorForwardingConfiguration(input []interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) frontdoor.ForwardingConfiguration { +func expandFrontDoorForwardingConfiguration(input []interface{}, frontDoorId parse.FrontDoorId) frontdoor.ForwardingConfiguration { if len(input) == 0 { return frontdoor.ForwardingConfiguration{} } @@ -1055,7 +1053,7 @@ func expandArmFrontDoorForwardingConfiguration(input []interface{}, frontDoorId cacheQueryParameterStripDirective := v["cache_query_parameter_strip_directive"].(string) cacheEnabled := v["cache_enabled"].(bool) - backendPoolId := parse.NewBackendPoolID(frontDoorId, backendPoolName).ID(subscriptionId) + backendPoolId := parse.NewBackendPoolID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, backendPoolName).ID() backend := &frontdoor.SubResource{ ID: utils.String(backendPoolId), } @@ -1093,7 +1091,7 @@ func expandArmFrontDoorForwardingConfiguration(input []interface{}, frontDoorId return forwardingConfiguration } -func flattenArmFrontDoorBackendPools(input *[]frontdoor.BackendPool, frontDoorId parse.FrontDoorId, subscriptionId string) (*[]interface{}, error) { +func flattenFrontDoorBackendPools(input *[]frontdoor.BackendPool, frontDoorId parse.FrontDoorId) (*[]interface{}, error) { if input == nil { return &[]interface{}{}, nil } @@ -1104,7 +1102,7 @@ func flattenArmFrontDoorBackendPools(input *[]frontdoor.BackendPool, frontDoorId name := "" if v.Name != nil { // rewrite the ID to ensure it's consistent - id = parse.NewBackendPoolID(frontDoorId, *v.Name).ID(subscriptionId) + id = parse.NewBackendPoolID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, *v.Name).ID() name = *v.Name } @@ -1113,24 +1111,24 @@ func flattenArmFrontDoorBackendPools(input *[]frontdoor.BackendPool, frontDoorId loadBalancingName := "" if props := v.BackendPoolProperties; props != nil { - backend = flattenArmFrontDoorBackend(props.Backends) + backend = flattenFrontDoorBackend(props.Backends) if props.HealthProbeSettings != nil && props.HealthProbeSettings.ID != nil { - name, err := parse.HealthProbeID(*props.HealthProbeSettings.ID) + name, err := parse.HealthProbeIDInsensitively(*props.HealthProbeSettings.ID) if err != nil { return nil, err } - healthProbeName = name.Name + healthProbeName = name.HealthProbeSettingName } if props.LoadBalancingSettings != nil && props.LoadBalancingSettings.ID != nil { - name, err := parse.LoadBalancingID(*props.LoadBalancingSettings.ID) + name, err := parse.LoadBalancingIDInsensitively(*props.LoadBalancingSettings.ID) if err != nil { return nil, err } - loadBalancingName = name.Name + loadBalancingName = name.LoadBalancingSettingName } } output = append(output, map[string]interface{}{ @@ -1150,7 +1148,7 @@ type flattenedBackendPoolSettings struct { backendPoolsSendReceiveTimeoutSeconds int } -func flattenArmFrontDoorBackendPoolsSettings(input *frontdoor.BackendPoolsSettings) flattenedBackendPoolSettings { +func flattenFrontDoorBackendPoolsSettings(input *frontdoor.BackendPoolsSettings) flattenedBackendPoolSettings { if input == nil { return flattenedBackendPoolSettings{ enforceBackendPoolsCertificateNameCheck: true, @@ -1174,7 +1172,7 @@ func flattenArmFrontDoorBackendPoolsSettings(input *frontdoor.BackendPoolsSettin } } -func flattenArmFrontDoorBackend(input *[]frontdoor.Backend) []interface{} { +func flattenFrontDoorBackend(input *[]frontdoor.Backend) []interface{} { if input == nil { return make([]interface{}, 0) } @@ -1238,7 +1236,7 @@ func retrieveFrontEndEndpointInformation(ctx context.Context, client *frontdoor. return &output, nil } -func flattenFrontEndEndpoints(input *[]frontdoor.FrontendEndpoint, frontDoorId parse.FrontDoorId, subscriptionId string) (*[]interface{}, error) { +func flattenFrontEndEndpoints(input *[]frontdoor.FrontendEndpoint, frontDoorId parse.FrontDoorId) (*[]interface{}, error) { results := make([]interface{}, 0) if input == nil { return &results, nil @@ -1249,7 +1247,7 @@ func flattenFrontEndEndpoints(input *[]frontdoor.FrontendEndpoint, frontDoorId p name := "" if item.Name != nil { // rewrite the ID to ensure it's consistent - id = parse.NewFrontendEndpointID(frontDoorId, *item.Name).ID(subscriptionId) + id = parse.NewFrontendEndpointID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, *item.Name).ID() name = *item.Name } @@ -1274,12 +1272,12 @@ func flattenFrontEndEndpoints(input *[]frontdoor.FrontendEndpoint, frontDoorId p if waf := props.WebApplicationFirewallPolicyLink; waf != nil && waf.ID != nil { // rewrite the ID to ensure it's consistent - parsed, err := parse.WebApplicationFirewallPolicyID(*waf.ID) + parsed, err := parse.WebApplicationFirewallPolicyIDInsensitively(*waf.ID) if err != nil { return nil, err } - webApplicationFirewallPolicyLinkId = parsed.ID(subscriptionId) + webApplicationFirewallPolicyLinkId = parsed.ID() } flattenedHttpsConfig := flattenCustomHttpsConfiguration(props) @@ -1302,7 +1300,7 @@ func flattenFrontEndEndpoints(input *[]frontdoor.FrontendEndpoint, frontDoorId p return &results, nil } -func flattenArmFrontDoorHealthProbeSettingsModel(input *[]frontdoor.HealthProbeSettingsModel, frontDoorId parse.FrontDoorId, subscriptionId string) []interface{} { +func flattenFrontDoorHealthProbeSettingsModel(input *[]frontdoor.HealthProbeSettingsModel, frontDoorId parse.FrontDoorId) []interface{} { results := make([]interface{}, 0) if input == nil { return results @@ -1313,7 +1311,7 @@ func flattenArmFrontDoorHealthProbeSettingsModel(input *[]frontdoor.HealthProbeS name := "" if v.Name != nil { // rewrite the ID to ensure it's consistent - id = parse.NewHealthProbeID(frontDoorId, *v.Name).ID(subscriptionId) + id = parse.NewHealthProbeID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, *v.Name).ID() name = *v.Name } @@ -1354,7 +1352,7 @@ func flattenArmFrontDoorHealthProbeSettingsModel(input *[]frontdoor.HealthProbeS return results } -func flattenArmFrontDoorLoadBalancingSettingsModel(input *[]frontdoor.LoadBalancingSettingsModel, frontDoorId parse.FrontDoorId, subscriptionId string) []interface{} { +func flattenFrontDoorLoadBalancingSettingsModel(input *[]frontdoor.LoadBalancingSettingsModel, frontDoorId parse.FrontDoorId) []interface{} { results := make([]interface{}, 0) if input == nil { return results @@ -1365,7 +1363,7 @@ func flattenArmFrontDoorLoadBalancingSettingsModel(input *[]frontdoor.LoadBalanc name := "" if v.Name != nil { // rewrite the ID to ensure it's consistent - id = parse.NewLoadBalancingID(frontDoorId, *v.Name).ID(subscriptionId) + id = parse.NewLoadBalancingID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, *v.Name).ID() name = *v.Name } @@ -1396,7 +1394,7 @@ func flattenArmFrontDoorLoadBalancingSettingsModel(input *[]frontdoor.LoadBalanc return results } -func flattenArmFrontDoorRoutingRule(input *[]frontdoor.RoutingRule, oldBlocks interface{}, frontDoorId parse.FrontDoorId, subscriptionId string) (*[]interface{}, error) { +func flattenFrontDoorRoutingRule(input *[]frontdoor.RoutingRule, oldBlocks interface{}, frontDoorId parse.FrontDoorId) (*[]interface{}, error) { if input == nil { return &[]interface{}{}, nil } @@ -1407,7 +1405,7 @@ func flattenArmFrontDoorRoutingRule(input *[]frontdoor.RoutingRule, oldBlocks in name := "" if v.Name != nil { // rewrite the ID to ensure it's consistent - id = parse.NewRoutingRuleID(frontDoorId, *v.Name).ID(subscriptionId) + id = parse.NewRoutingRuleID(frontDoorId.SubscriptionId, frontDoorId.ResourceGroup, frontDoorId.Name, *v.Name).ID() name = *v.Name } @@ -1418,7 +1416,7 @@ func flattenArmFrontDoorRoutingRule(input *[]frontdoor.RoutingRule, oldBlocks in patternsToMatch := make([]string, 0) redirectConfiguration := make([]interface{}, 0) if props := v.RoutingRuleProperties; props != nil { - acceptedProtocols = flattenArmFrontDoorAcceptedProtocol(props.AcceptedProtocols) + acceptedProtocols = flattenFrontDoorAcceptedProtocol(props.AcceptedProtocols) enabled = props.EnabledState == frontdoor.RoutingRuleEnabledStateEnabled forwardConfiguration, err := flattenRoutingRuleForwardingConfiguration(props.RouteConfiguration, oldBlocks) @@ -1427,7 +1425,7 @@ func flattenArmFrontDoorRoutingRule(input *[]frontdoor.RoutingRule, oldBlocks in } forwardingConfiguration = *forwardConfiguration - frontendEndpoints, err := flattenArmFrontDoorFrontendEndpointsSubResources(props.FrontendEndpoints) + frontendEndpoints, err := flattenFrontDoorFrontendEndpointsSubResources(props.FrontendEndpoints) if err != nil { return nil, fmt.Errorf("flattening `frontend_endpoints`: %+v", err) } @@ -1462,7 +1460,7 @@ func flattenRoutingRuleForwardingConfiguration(config frontdoor.BasicRouteConfig name := "" if v.BackendPool != nil && v.BackendPool.ID != nil { - backendPoolId, err := parse.BackendPoolID(*v.BackendPool.ID) + backendPoolId, err := parse.BackendPoolIDInsensitively(*v.BackendPool.ID) if err != nil { return nil, err } @@ -1559,7 +1557,7 @@ func flattenRoutingRuleRedirectConfiguration(config frontdoor.BasicRouteConfigur } } -func flattenArmFrontDoorAcceptedProtocol(input *[]frontdoor.Protocol) []string { +func flattenFrontDoorAcceptedProtocol(input *[]frontdoor.Protocol) []string { if input == nil { return make([]string, 0) } @@ -1572,7 +1570,7 @@ func flattenArmFrontDoorAcceptedProtocol(input *[]frontdoor.Protocol) []string { return output } -func flattenArmFrontDoorFrontendEndpointsSubResources(input *[]frontdoor.SubResource) (*[]string, error) { +func flattenFrontDoorFrontendEndpointsSubResources(input *[]frontdoor.SubResource) (*[]string, error) { output := make([]string, 0) if input == nil { @@ -1584,7 +1582,7 @@ func flattenArmFrontDoorFrontendEndpointsSubResources(input *[]frontdoor.SubReso continue } - id, err := parse.FrontendEndpointID(*v.ID) + id, err := parse.FrontendEndpointIDInsensitively(*v.ID) if err != nil { return nil, err } diff --git a/azurerm/internal/services/frontdoor/frontdoor_resource_test.go b/azurerm/internal/services/frontdoor/frontdoor_resource_test.go new file mode 100644 index 000000000000..6e27cb7225ac --- /dev/null +++ b/azurerm/internal/services/frontdoor/frontdoor_resource_test.go @@ -0,0 +1,979 @@ +package frontdoor_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type FrontDoorResource struct { +} + +func TestAccFrontDoor_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backend_pool_health_probe.0.enabled").HasValue("true"), + check.That(data.ResourceName).Key("backend_pool_health_probe.0.probe_method").HasValue("GET"), + ), + }, + { + Config: r.basicDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backend_pool_health_probe.0.enabled").HasValue("false"), + check.That(data.ResourceName).Key("backend_pool_health_probe.0.probe_method").HasValue("HEAD"), + ), + }, + data.ImportStep(), + }) +} + +// remove in 3.0 +func TestAccFrontDoor_global(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.global(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("location").HasValue("global"), + ), + ExpectNonEmptyPlan: true, + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoor_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccFrontDoor_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoor_multiplePools(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiplePools(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backend_pool.#").HasValue("2"), + check.That(data.ResourceName).Key("backend_pool_health_probe.#").HasValue("2"), + check.That(data.ResourceName).Key("backend_pool_load_balancing.#").HasValue("2"), + check.That(data.ResourceName).Key("routing_rule.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoor_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoor_waf(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.waf(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoor_EnableDisableCache(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.EnableCache(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_enabled").HasValue("true"), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_use_dynamic_compression").HasValue("false"), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_query_parameter_strip_directive").HasValue("StripAll"), + ), + }, + { + Config: r.DisableCache(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_enabled").HasValue("false"), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_use_dynamic_compression").HasValue("false"), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_query_parameter_strip_directive").HasValue("StripAll"), + ), + }, + { + Config: r.EnableCache(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_enabled").HasValue("true"), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_use_dynamic_compression").HasValue("false"), + check.That(data.ResourceName).Key("routing_rule.0.forwarding_configuration.0.cache_query_parameter_strip_directive").HasValue("StripAll"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccFrontDoor_CustomHttps(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") + r := FrontDoorResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.CustomHttpsEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_endpoint.0.custom_https_provisioning_enabled").HasValue("true"), + check.That(data.ResourceName).Key("frontend_endpoint.0.custom_https_configuration.0.certificate_source").HasValue("FrontDoor"), + check.That(data.ResourceName).Key("frontend_endpoint.0.custom_https_configuration.0.minimum_tls_version").HasValue("1.2"), + check.That(data.ResourceName).Key("frontend_endpoint.0.custom_https_configuration.0.provisioning_state").HasValue("Enabled"), + check.That(data.ResourceName).Key("frontend_endpoint.0.custom_https_configuration.0.provisioning_substate").HasValue("CertificateDeployed"), + ), + }, + { + Config: r.CustomHttpsDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_endpoint.0.custom_https_provisioning_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func (FrontDoorResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.FrontDoorIDInsensitively(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Frontdoor.FrontDoorsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Front Door %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (FrontDoorResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) basicDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + enabled = false + probe_method = "HEAD" + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +// remove in 3.0 +func (FrontDoorResource) global(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + location = "%s" + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r FrontDoorResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_frontdoor" "import" { + name = azurerm_frontdoor.test.name + resource_group_name = azurerm_frontdoor.test.resource_group_name + enforce_backend_pools_certificate_name_check = azurerm_frontdoor.test.enforce_backend_pools_certificate_name_check + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, r.basic(data), data.RandomInteger) +} + +func (FrontDoorResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + backend_pools_send_receive_timeout_seconds = 45 + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) waf(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor_firewall_policy" "test" { + name = "acctestwafp%d" + resource_group_name = azurerm_resource_group.test.name + mode = "Prevention" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + web_application_firewall_policy_link_id = azurerm_frontdoor_firewall_policy.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) DisableCache(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) EnableCache(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + cache_enabled = true + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) CustomHttpsEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = true + custom_https_configuration { + certificate_source = "FrontDoor" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) CustomHttpsDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%d" + location = "%s" +} + +locals { + backend_name = "backend-bing" + endpoint_name = "frontend-endpoint" + health_probe_name = "health-probe" + load_balancing_name = "load-balancing-setting" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + routing_rule { + name = "routing-rule" + accepted_protocols = ["Http", "Https"] + patterns_to_match = ["/*"] + frontend_endpoints = [local.endpoint_name] + + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = local.backend_name + } + } + + backend_pool_load_balancing { + name = local.load_balancing_name + } + + backend_pool_health_probe { + name = local.health_probe_name + } + + backend_pool { + name = local.backend_name + backend { + host_header = "www.bing.com" + address = "www.bing.com" + http_port = 80 + https_port = 443 + } + + load_balancing_name = local.load_balancing_name + health_probe_name = local.health_probe_name + } + + frontend_endpoint { + name = local.endpoint_name + host_name = "acctest-FD-%d.azurefd.net" + custom_https_provisioning_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (FrontDoorResource) multiplePools(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-frontdoor-%[1]d" + location = "%s" +} + +resource "azurerm_frontdoor" "test" { + name = "acctest-FD-%[1]d" + resource_group_name = azurerm_resource_group.test.name + enforce_backend_pools_certificate_name_check = false + + frontend_endpoint { + name = "acctest-FD-%[1]d-default-FE" + host_name = "acctest-FD-%[1]d.azurefd.net" + custom_https_provisioning_enabled = false + } + + # --- Pool 1 + + routing_rule { + name = "acctest-FD-%[1]d-bing-RR" + accepted_protocols = ["Https"] + patterns_to_match = ["/poolBing/*"] + frontend_endpoints = ["acctest-FD-%[1]d-default-FE"] + + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = "acctest-FD-%[1]d-pool-bing" + cache_enabled = true + } + } + + backend_pool_load_balancing { + name = "acctest-FD-%[1]d-bing-LB" + additional_latency_milliseconds = 0 + sample_size = 4 + successful_samples_required = 2 + } + + backend_pool_health_probe { + name = "acctest-FD-%[1]d-bing-HP" + protocol = "Https" + enabled = true + probe_method = "HEAD" + } + + backend_pool { + name = "acctest-FD-%[1]d-pool-bing" + load_balancing_name = "acctest-FD-%[1]d-bing-LB" + health_probe_name = "acctest-FD-%[1]d-bing-HP" + + backend { + host_header = "bing.com" + address = "bing.com" + http_port = 80 + https_port = 443 + weight = 75 + enabled = true + } + } + + # --- Pool 2 + + routing_rule { + name = "acctest-FD-%[1]d-google-RR" + accepted_protocols = ["Https"] + patterns_to_match = ["/poolGoogle/*"] + frontend_endpoints = ["acctest-FD-%[1]d-default-FE"] + + forwarding_configuration { + forwarding_protocol = "MatchRequest" + backend_pool_name = "acctest-FD-%[1]d-pool-google" + cache_enabled = true + } + } + + backend_pool_load_balancing { + name = "acctest-FD-%[1]d-google-LB" + additional_latency_milliseconds = 0 + sample_size = 4 + successful_samples_required = 2 + } + + backend_pool_health_probe { + name = "acctest-FD-%[1]d-google-HP" + protocol = "Https" + } + + backend_pool { + name = "acctest-FD-%[1]d-pool-google" + load_balancing_name = "acctest-FD-%[1]d-google-LB" + health_probe_name = "acctest-FD-%[1]d-google-HP" + + backend { + host_header = "google.com" + address = "google.com" + http_port = 80 + https_port = 443 + weight = 75 + enabled = true + } + } +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/frontdoor/migration/custom_https_configuration.go b/azurerm/internal/services/frontdoor/migration/custom_https_configuration.go index 615d45e6710b..b0cab99222a3 100644 --- a/azurerm/internal/services/frontdoor/migration/custom_https_configuration.go +++ b/azurerm/internal/services/frontdoor/migration/custom_https_configuration.go @@ -101,8 +101,8 @@ func CustomHttpsConfigurationV0ToV1(rawState map[string]interface{}, _ interface return rawState, fmt.Errorf("couldn't find the `frontendEndpoints` segment in the old resource id %q", oldId) } - newId := parse.NewFrontendEndpointID(parse.NewFrontDoorID(resourceGroup, frontdoorName), frontendEndpointName) - newIdStr := newId.ID(oldParsedId.SubscriptionID) + newId := parse.NewFrontendEndpointID(oldParsedId.SubscriptionID, resourceGroup, frontdoorName, frontendEndpointName) + newIdStr := newId.ID() log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newIdStr) diff --git a/azurerm/internal/services/frontdoor/migration/frontdoor.go b/azurerm/internal/services/frontdoor/migration/frontdoor.go index 03c3da93f5cc..140646157556 100644 --- a/azurerm/internal/services/frontdoor/migration/frontdoor.go +++ b/azurerm/internal/services/frontdoor/migration/frontdoor.go @@ -411,8 +411,8 @@ func FrontDoorV1ToV2(rawState map[string]interface{}, _ interface{}) (map[string return rawState, fmt.Errorf("couldn't find the `frontDoors` segment in the old resource id %q", oldId) } - newId := parse.NewFrontDoorID(resourceGroup, frontDoorName) - newIdStr := newId.ID(oldParsedId.SubscriptionID) + newId := parse.NewFrontDoorID(oldParsedId.SubscriptionID, resourceGroup, frontDoorName) + newIdStr := newId.ID() log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newIdStr) diff --git a/azurerm/internal/services/frontdoor/migration/web_application_firewall_policy.go b/azurerm/internal/services/frontdoor/migration/web_application_firewall_policy.go index 2d0c765e8d27..9e4dfb8de534 100644 --- a/azurerm/internal/services/frontdoor/migration/web_application_firewall_policy.go +++ b/azurerm/internal/services/frontdoor/migration/web_application_firewall_policy.go @@ -111,6 +111,9 @@ func WebApplicationFirewallPolicyV0Schema() *schema.Resource { "match_values": { Type: schema.TypeList, Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, }, "operator": { @@ -305,8 +308,8 @@ func WebApplicationFirewallPolicyV0ToV1(rawState map[string]interface{}, _ inter return rawState, fmt.Errorf("couldn't find the `frontDoorWebApplicationFirewallPolicies` segment in the old resource id %q", oldId) } - newId := parse.NewWebApplicationFirewallPolicyID(resourceGroup, policyName) - newIdStr := newId.ID(oldParsedId.SubscriptionID) + newId := parse.NewWebApplicationFirewallPolicyID(oldParsedId.SubscriptionID, resourceGroup, policyName) + newIdStr := newId.ID() log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newIdStr) diff --git a/azurerm/internal/services/frontdoor/parse/backend_pool.go b/azurerm/internal/services/frontdoor/parse/backend_pool.go index 7d333ef569c7..5852ae7406f5 100644 --- a/azurerm/internal/services/frontdoor/parse/backend_pool.go +++ b/azurerm/internal/services/frontdoor/parse/backend_pool.go @@ -1,50 +1,131 @@ package parse -import "fmt" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type BackendPoolId struct { - ResourceGroup string - FrontDoorName string - Name string + SubscriptionId string + ResourceGroup string + FrontDoorName string + Name string } -func NewBackendPoolID(id FrontDoorId, name string) BackendPoolId { +func NewBackendPoolID(subscriptionId, resourceGroup, frontDoorName, name string) BackendPoolId { return BackendPoolId{ - ResourceGroup: id.ResourceGroup, - FrontDoorName: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FrontDoorName: frontDoorName, + Name: name, + } +} + +func (id BackendPoolId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Front Door Name %q", id.FrontDoorName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Backend Pool", segmentsStr) } -func (id BackendPoolId) ID(subscriptionId string) string { - base := NewFrontDoorID(id.ResourceGroup, id.FrontDoorName).ID(subscriptionId) - return fmt.Sprintf("%s/backendPools/%s", base, id.Name) +func (id BackendPoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s/backendPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FrontDoorName, id.Name) } +// BackendPoolID parses a BackendPool ID into an BackendPoolId struct func BackendPoolID(input string) (*BackendPoolId, error) { - frontDoorId, id, err := parseFrontDoorChildResourceId(input) + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := BackendPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FrontDoorName, err = id.PopSegment("frontDoors"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("backendPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// BackendPoolIDInsensitively parses an BackendPool ID into an BackendPoolId struct, insensitively +// This should only be used to parse an ID for rewriting, the BackendPoolID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func BackendPoolIDInsensitively(input string) (*BackendPoolId, error) { + id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Backend Pool ID %q: %+v", input, err) + return nil, err + } + + resourceId := BackendPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - poolId := BackendPoolId{ - ResourceGroup: frontDoorId.ResourceGroup, - FrontDoorName: frontDoorId.Name, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - // API is broken - https://github.com/Azure/azure-sdk-for-go/issues/6762 - // note: the ordering is important since the defined case (we want to error with) is backendPools - if poolId.Name, err = id.PopSegment("backendpools"); err != nil { - if poolId.Name, err = id.PopSegment("BackendPools"); err != nil { - if poolId.Name, err = id.PopSegment("backendPools"); err != nil { - return nil, err - } + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoors' segment + frontDoorsKey := "frontDoors" + for key := range id.Path { + if strings.EqualFold(key, frontDoorsKey) { + frontDoorsKey = key + break } } + if resourceId.FrontDoorName, err = id.PopSegment(frontDoorsKey); err != nil { + return nil, err + } + + // find the correct casing for the 'backendPools' segment + backendPoolsKey := "backendPools" + for key := range id.Path { + if strings.EqualFold(key, backendPoolsKey) { + backendPoolsKey = key + break + } + } + if resourceId.Name, err = id.PopSegment(backendPoolsKey); err != nil { + return nil, err + } if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &poolId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/frontdoor/parse/backend_pool_test.go b/azurerm/internal/services/frontdoor/parse/backend_pool_test.go index 6e4ee710d2d9..cf1d8b30b513 100644 --- a/azurerm/internal/services/frontdoor/parse/backend_pool_test.go +++ b/azurerm/internal/services/frontdoor/parse/backend_pool_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,76 +11,254 @@ import ( var _ resourceid.Formatter = BackendPoolId{} func TestBackendPoolIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - frontDoorId := NewFrontDoorID("group1", "frontdoor1") - actual := NewBackendPoolID(frontDoorId, "pool1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/pool1" + actual := NewBackendPoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "frontdoor1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/pool1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestBackendPoolIDParser(t *testing.T) { +func TestBackendPoolID(t *testing.T) { testData := []struct { - input string - expected *BackendPoolId + Input string + Error bool + Expected *BackendPoolId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1/backendpools/pool1", - expected: &BackendPoolId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "pool1", - }, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1/backendPools/pool1", - expected: &BackendPoolId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "pool1", - }, + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/", + Error: true, + }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1/BackendPools/pool1", - expected: &BackendPoolId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "pool1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/pool1", + Expected: &BackendPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "pool1", }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1/Backendpools/pool1", - expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/BACKENDPOOLS/POOL1", + Error: true, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := BackendPoolID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := BackendPoolID(v.Input) + if err != nil { + if v.Error { + continue } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } + } +} + +func TestBackendPoolIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *BackendPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, - if actual.FrontDoorName != test.expected.FrontDoorName { - t.Fatalf("Expected FrontDoorName to be %q but was %q", test.expected.FrontDoorName, actual.FrontDoorName) + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/pool1", + Expected: &BackendPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "pool1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoors/frontdoor1/backendpools/pool1", + Expected: &BackendPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "pool1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORS/frontdoor1/BACKENDPOOLS/pool1", + Expected: &BackendPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "pool1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRs/frontdoor1/BaCkEnDpOoLs/pool1", + Expected: &BackendPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "pool1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := BackendPoolIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/frontdoor/parse/front_door.go b/azurerm/internal/services/frontdoor/parse/front_door.go new file mode 100644 index 000000000000..46ed38b38403 --- /dev/null +++ b/azurerm/internal/services/frontdoor/parse/front_door.go @@ -0,0 +1,113 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type FrontDoorId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewFrontDoorID(subscriptionId, resourceGroup, name string) FrontDoorId { + return FrontDoorId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id FrontDoorId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Front Door", segmentsStr) +} + +func (id FrontDoorId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// FrontDoorID parses a FrontDoor ID into an FrontDoorId struct +func FrontDoorID(input string) (*FrontDoorId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FrontDoorId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("frontDoors"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// FrontDoorIDInsensitively parses an FrontDoor ID into an FrontDoorId struct, insensitively +// This should only be used to parse an ID for rewriting, the FrontDoorID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func FrontDoorIDInsensitively(input string) (*FrontDoorId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FrontDoorId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoors' segment + frontDoorsKey := "frontDoors" + for key := range id.Path { + if strings.EqualFold(key, frontDoorsKey) { + frontDoorsKey = key + break + } + } + if resourceId.Name, err = id.PopSegment(frontDoorsKey); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/frontdoor/parse/front_door_test.go b/azurerm/internal/services/frontdoor/parse/front_door_test.go new file mode 100644 index 000000000000..745e886ce6b6 --- /dev/null +++ b/azurerm/internal/services/frontdoor/parse/front_door_test.go @@ -0,0 +1,229 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = FrontDoorId{} + +func TestFrontDoorIDFormatter(t *testing.T) { + actual := NewFrontDoorID("12345678-1234-9876-4563-123456789012", "resGroup1", "frontdoor1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestFrontDoorID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *FrontDoorId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1", + Expected: &FrontDoorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "frontdoor1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FrontDoorID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} + +func TestFrontDoorIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *FrontDoorId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1", + Expected: &FrontDoorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "frontdoor1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoors/frontdoor1", + Expected: &FrontDoorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "frontdoor1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORS/frontdoor1", + Expected: &FrontDoorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "frontdoor1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRs/frontdoor1", + Expected: &FrontDoorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "frontdoor1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FrontDoorIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/frontdoor/parse/frontdoor.go b/azurerm/internal/services/frontdoor/parse/frontdoor.go deleted file mode 100644 index 9172078ff58c..000000000000 --- a/azurerm/internal/services/frontdoor/parse/frontdoor.go +++ /dev/null @@ -1,83 +0,0 @@ -package parse - -import ( - "fmt" - "strings" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type FrontDoorId struct { - Name string - ResourceGroup string -} - -func NewFrontDoorID(resourceGroup, name string) FrontDoorId { - return FrontDoorId{ - Name: name, - ResourceGroup: resourceGroup, - } -} - -func FrontDoorID(input string) (*FrontDoorId, error) { - frontDoorId, id, err := parseFrontDoorChildResourceId(input) - if err != nil { - return nil, fmt.Errorf("parsing FrontDoor ID %q: %+v", input, err) - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return frontDoorId, nil -} - -func FrontDoorIDForImport(input string) (*FrontDoorId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing FrontDoor ID %q: %+v", input, err) - } - - frontDoorId := FrontDoorId{ - ResourceGroup: id.ResourceGroup, - } - - if frontDoorId.Name, err = id.PopSegment("frontDoors"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &frontDoorId, nil -} - -func (id FrontDoorId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s", subscriptionId, id.ResourceGroup, id.Name) -} - -func parseFrontDoorChildResourceId(input string) (*FrontDoorId, *azure.ResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, nil, err - } - - frontdoor := FrontDoorId{ - ResourceGroup: id.ResourceGroup, - } - - for key, value := range id.Path { - // In Azure API's should follow Postel's Law - where URI's should be insensitive for requests, - // but case-sensitive when referencing URI's in responses. Unfortunately the Networking API's - // treat both as case-insensitive - so until these API's follow the spec we need to identify - // the correct casing here. - if strings.EqualFold(key, "frontDoors") { - frontdoor.Name = value - delete(id.Path, key) - break - } - } - - return &frontdoor, id, nil -} diff --git a/azurerm/internal/services/frontdoor/parse/frontdoor_test.go b/azurerm/internal/services/frontdoor/parse/frontdoor_test.go deleted file mode 100644 index 3cc2cadcbb4e..000000000000 --- a/azurerm/internal/services/frontdoor/parse/frontdoor_test.go +++ /dev/null @@ -1,131 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = FrontDoorId{} - -func TestFrontDoorIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewFrontDoorID("group1", "frontDoor1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestFrontDoorIDParser(t *testing.T) { - testData := []struct { - input string - expected *FrontDoorId - }{ - { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1", - expected: &FrontDoorId{ - ResourceGroup: "group1", - Name: "frontDoor1", - }, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1", - expected: &FrontDoorId{ - ResourceGroup: "group1", - Name: "frontDoor1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1", - expected: &FrontDoorId{ - ResourceGroup: "group1", - Name: "frontDoor1", - }, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1", - expected: &FrontDoorId{ - ResourceGroup: "group1", - Name: "frontDoor1", - }, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := FrontDoorID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} - -func TestFrontDoorIDForImportParser(t *testing.T) { - testData := []struct { - input string - expected *FrontDoorId - }{ - { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1", - expected: &FrontDoorId{ - ResourceGroup: "group1", - Name: "frontDoor1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := FrontDoorIDForImport(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/frontdoor/parse/frontend_endpoint.go b/azurerm/internal/services/frontdoor/parse/frontend_endpoint.go index 74c04f61b9c1..f11df7113c4c 100644 --- a/azurerm/internal/services/frontdoor/parse/frontend_endpoint.go +++ b/azurerm/internal/services/frontdoor/parse/frontend_endpoint.go @@ -1,67 +1,131 @@ package parse -import "fmt" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type FrontendEndpointId struct { - ResourceGroup string - FrontDoorName string - Name string + SubscriptionId string + ResourceGroup string + FrontDoorName string + Name string } -func NewFrontendEndpointID(id FrontDoorId, name string) FrontendEndpointId { +func NewFrontendEndpointID(subscriptionId, resourceGroup, frontDoorName, name string) FrontendEndpointId { return FrontendEndpointId{ - ResourceGroup: id.ResourceGroup, - FrontDoorName: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FrontDoorName: frontDoorName, + Name: name, } } -func (id FrontendEndpointId) ID(subscriptionId string) string { - base := NewFrontDoorID(id.ResourceGroup, id.FrontDoorName).ID(subscriptionId) - return fmt.Sprintf("%s/frontendEndpoints/%s", base, id.Name) +func (id FrontendEndpointId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Front Door Name %q", id.FrontDoorName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Frontend Endpoint", segmentsStr) } -func FrontendEndpointID(input string) (*FrontendEndpointId, error) { - return parseFrontendEndpointID(input, false) +func (id FrontendEndpointId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s/frontendEndpoints/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FrontDoorName, id.Name) } -func FrontendEndpointIDForImport(input string) (*FrontendEndpointId, error) { - return parseFrontendEndpointID(input, true) +// FrontendEndpointID parses a FrontendEndpoint ID into an FrontendEndpointId struct +func FrontendEndpointID(input string) (*FrontendEndpointId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FrontendEndpointId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FrontDoorName, err = id.PopSegment("frontDoors"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("frontendEndpoints"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil } -func parseFrontendEndpointID(input string, caseSensitive bool) (*FrontendEndpointId, error) { - frontDoorId, id, err := parseFrontDoorChildResourceId(input) +// FrontendEndpointIDInsensitively parses an FrontendEndpoint ID into an FrontendEndpointId struct, insensitively +// This should only be used to parse an ID for rewriting, the FrontendEndpointID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func FrontendEndpointIDInsensitively(input string) (*FrontendEndpointId, error) { + id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Frontend Endpoint ID %q: %+v", input, err) + return nil, err + } + + resourceId := FrontendEndpointId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - endpointId := FrontendEndpointId{ - ResourceGroup: frontDoorId.ResourceGroup, - FrontDoorName: frontDoorId.Name, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - // The Azure API (per the ARM Spec/chatting with the ARM Team) should be following Postel's Law; - // where ID's are insensitive for Requests but sensitive in responses - but it's not. - // - // For us this means ID's should be sensitive at import time - but we have to work around these - // API bugs for the moment. - if caseSensitive { - if endpointId.Name, err = id.PopSegment("frontendEndpoints"); err != nil { - return nil, err + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoors' segment + frontDoorsKey := "frontDoors" + for key := range id.Path { + if strings.EqualFold(key, frontDoorsKey) { + frontDoorsKey = key + break } - } else { - // https://github.com/Azure/azure-sdk-for-go/issues/6762 - // note: the ordering is important since the defined case (we want to error with) is frontendEndpoints - if endpointId.Name, err = id.PopSegment("FrontendEndpoints"); err != nil { - if endpointId.Name, err = id.PopSegment("frontendEndpoints"); err != nil { - return nil, err - } + } + if resourceId.FrontDoorName, err = id.PopSegment(frontDoorsKey); err != nil { + return nil, err + } + + // find the correct casing for the 'frontendEndpoints' segment + frontendEndpointsKey := "frontendEndpoints" + for key := range id.Path { + if strings.EqualFold(key, frontendEndpointsKey) { + frontendEndpointsKey = key + break } } + if resourceId.Name, err = id.PopSegment(frontendEndpointsKey); err != nil { + return nil, err + } if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &endpointId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/frontdoor/parse/frontend_endpoint_test.go b/azurerm/internal/services/frontdoor/parse/frontend_endpoint_test.go index ea51d53f3233..3f0f102298cb 100644 --- a/azurerm/internal/services/frontdoor/parse/frontend_endpoint_test.go +++ b/azurerm/internal/services/frontdoor/parse/frontend_endpoint_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,129 +11,254 @@ import ( var _ resourceid.Formatter = FrontendEndpointId{} func TestFrontendEndpointIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - frontDoorId := NewFrontDoorID("group1", "frontdoor1") - actual := NewFrontendEndpointID(frontDoorId, "endpoint1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/endpoint1" + actual := NewFrontendEndpointID("12345678-1234-9876-4563-123456789012", "resGroup1", "frontdoor1", "endpoint1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/endpoint1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestFrontendEndpointIDParser(t *testing.T) { +func TestFrontendEndpointID(t *testing.T) { testData := []struct { - input string - expected *FrontendEndpointId + Input string + Error bool + Expected *FrontendEndpointId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1/frontendendpoints/endpoint1", - expected: nil, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1/frontendEndpoints/endpoint1", - expected: &FrontendEndpointId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "endpoint1", - }, + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/", + Error: true, + }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1/FrontendEndpoints/endpoint1", - expected: &FrontendEndpointId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "endpoint1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/endpoint1", + Expected: &FrontendEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "endpoint1", }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1/Frontendendpoints/endpoint1", - expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/FRONTENDENDPOINTS/ENDPOINT1", + Error: true, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := FrontendEndpointID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FrontendEndpointID(v.Input) + if err != nil { + if v.Error { + continue } - } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.FrontDoorName != test.expected.FrontDoorName { - t.Fatalf("Expected FrontDoorName to be %q but was %q", test.expected.FrontDoorName, actual.FrontDoorName) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } -func TestFrontendEndpointIDForImportParser(t *testing.T) { +func TestFrontendEndpointIDInsensitively(t *testing.T) { testData := []struct { - input string - expected *FrontendEndpointId + Input string + Error bool + Expected *FrontendEndpointId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1/frontendendpoints/endpoint1", - expected: nil, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1/frontendEndpoints/endpoint1", - expected: &FrontendEndpointId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "endpoint1", + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/endpoint1", + Expected: &FrontendEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "endpoint1", }, }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1/FrontendEndpoints/endpoint1", - expected: nil, + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoors/frontdoor1/frontendendpoints/endpoint1", + Expected: &FrontendEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "endpoint1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORS/frontdoor1/FRONTENDENDPOINTS/endpoint1", + Expected: &FrontendEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "endpoint1", + }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1/Frontendendpoints/endpoint1", - expected: nil, + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRs/frontdoor1/FrOnTeNdEnDpOiNtS/endpoint1", + Expected: &FrontendEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "endpoint1", + }, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := FrontendEndpointIDForImport(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FrontendEndpointIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue } - } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.FrontDoorName != test.expected.FrontDoorName { - t.Fatalf("Expected FrontDoorName to be %q but was %q", test.expected.FrontDoorName, actual.FrontDoorName) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/frontdoor/parse/health_probe.go b/azurerm/internal/services/frontdoor/parse/health_probe.go index 1cc56b6865cb..924979b4f7af 100644 --- a/azurerm/internal/services/frontdoor/parse/health_probe.go +++ b/azurerm/internal/services/frontdoor/parse/health_probe.go @@ -1,48 +1,131 @@ package parse -import "fmt" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type HealthProbeId struct { - ResourceGroup string - FrontDoorName string - Name string + SubscriptionId string + ResourceGroup string + FrontDoorName string + HealthProbeSettingName string } -func NewHealthProbeID(id FrontDoorId, name string) HealthProbeId { +func NewHealthProbeID(subscriptionId, resourceGroup, frontDoorName, healthProbeSettingName string) HealthProbeId { return HealthProbeId{ - ResourceGroup: id.ResourceGroup, - FrontDoorName: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FrontDoorName: frontDoorName, + HealthProbeSettingName: healthProbeSettingName, + } +} + +func (id HealthProbeId) String() string { + segments := []string{ + fmt.Sprintf("Health Probe Setting Name %q", id.HealthProbeSettingName), + fmt.Sprintf("Front Door Name %q", id.FrontDoorName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Health Probe", segmentsStr) } -func (id HealthProbeId) ID(subscriptionId string) string { - base := NewFrontDoorID(id.ResourceGroup, id.FrontDoorName).ID(subscriptionId) - return fmt.Sprintf("%s/healthProbeSettings/%s", base, id.Name) +func (id HealthProbeId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s/healthProbeSettings/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FrontDoorName, id.HealthProbeSettingName) } +// HealthProbeID parses a HealthProbe ID into an HealthProbeId struct func HealthProbeID(input string) (*HealthProbeId, error) { - frontDoorId, id, err := parseFrontDoorChildResourceId(input) + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := HealthProbeId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FrontDoorName, err = id.PopSegment("frontDoors"); err != nil { + return nil, err + } + if resourceId.HealthProbeSettingName, err = id.PopSegment("healthProbeSettings"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// HealthProbeIDInsensitively parses an HealthProbe ID into an HealthProbeId struct, insensitively +// This should only be used to parse an ID for rewriting, the HealthProbeID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func HealthProbeIDInsensitively(input string) (*HealthProbeId, error) { + id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing FrontDoor Health Probe ID %q: %+v", input, err) + return nil, err + } + + resourceId := HealthProbeId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - probeId := HealthProbeId{ - ResourceGroup: frontDoorId.ResourceGroup, - FrontDoorName: frontDoorId.Name, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - // https://github.com/Azure/azure-sdk-for-go/issues/6762 - // note: the ordering is important since the defined case (we want to error with) is healthProbeSettings - if probeId.Name, err = id.PopSegment("HealthProbeSettings"); err != nil { - if probeId.Name, err = id.PopSegment("healthProbeSettings"); err != nil { - return nil, err + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoors' segment + frontDoorsKey := "frontDoors" + for key := range id.Path { + if strings.EqualFold(key, frontDoorsKey) { + frontDoorsKey = key + break } } + if resourceId.FrontDoorName, err = id.PopSegment(frontDoorsKey); err != nil { + return nil, err + } + + // find the correct casing for the 'healthProbeSettings' segment + healthProbeSettingsKey := "healthProbeSettings" + for key := range id.Path { + if strings.EqualFold(key, healthProbeSettingsKey) { + healthProbeSettingsKey = key + break + } + } + if resourceId.HealthProbeSettingName, err = id.PopSegment(healthProbeSettingsKey); err != nil { + return nil, err + } if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &probeId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/frontdoor/parse/health_probe_test.go b/azurerm/internal/services/frontdoor/parse/health_probe_test.go index 52951a31165a..cc19b8be39b1 100644 --- a/azurerm/internal/services/frontdoor/parse/health_probe_test.go +++ b/azurerm/internal/services/frontdoor/parse/health_probe_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,72 +11,254 @@ import ( var _ resourceid.Formatter = HealthProbeId{} func TestHealthProbeIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - frontDoorId := NewFrontDoorID("group1", "frontdoor1") - actual := NewHealthProbeID(frontDoorId, "probe1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/probe1" + actual := NewHealthProbeID("12345678-1234-9876-4563-123456789012", "resGroup1", "frontdoor1", "probe1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/probe1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestHealthProbeIDParser(t *testing.T) { +func TestHealthProbeID(t *testing.T) { testData := []struct { - input string - expected *HealthProbeId + Input string + Error bool + Expected *HealthProbeId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1/healthprobesettings/probe1", - expected: nil, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1/healthProbeSettings/probe1", - expected: &HealthProbeId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "probe1", - }, + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing HealthProbeSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1/HealthProbeSettings/probe1", - expected: &HealthProbeId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "probe1", + // missing value for HealthProbeSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/probe1", + Expected: &HealthProbeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + HealthProbeSettingName: "probe1", }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1/Healthprobesettings/probe1", - expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/HEALTHPROBESETTINGS/PROBE1", + Error: true, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := HealthProbeID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HealthProbeID(v.Input) + if err != nil { + if v.Error { + continue } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.HealthProbeSettingName != v.Expected.HealthProbeSettingName { + t.Fatalf("Expected %q but got %q for HealthProbeSettingName", v.Expected.HealthProbeSettingName, actual.HealthProbeSettingName) } + } +} + +func TestHealthProbeIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *HealthProbeId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, - if actual.FrontDoorName != test.expected.FrontDoorName { - t.Fatalf("Expected FrontDoorName to be %q but was %q", test.expected.FrontDoorName, actual.FrontDoorName) + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing HealthProbeSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + + { + // missing value for HealthProbeSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/probe1", + Expected: &HealthProbeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + HealthProbeSettingName: "probe1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoors/frontdoor1/healthprobesettings/probe1", + Expected: &HealthProbeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + HealthProbeSettingName: "probe1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORS/frontdoor1/HEALTHPROBESETTINGS/probe1", + Expected: &HealthProbeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + HealthProbeSettingName: "probe1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRs/frontdoor1/HeAlThPrObEsEtTiNgS/probe1", + Expected: &HealthProbeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + HealthProbeSettingName: "probe1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HealthProbeIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.HealthProbeSettingName != v.Expected.HealthProbeSettingName { + t.Fatalf("Expected %q but got %q for HealthProbeSettingName", v.Expected.HealthProbeSettingName, actual.HealthProbeSettingName) } } } diff --git a/azurerm/internal/services/frontdoor/parse/load_balancing.go b/azurerm/internal/services/frontdoor/parse/load_balancing.go index 6e54be67e5ee..2ac04daedc97 100644 --- a/azurerm/internal/services/frontdoor/parse/load_balancing.go +++ b/azurerm/internal/services/frontdoor/parse/load_balancing.go @@ -1,48 +1,131 @@ package parse -import "fmt" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type LoadBalancingId struct { - ResourceGroup string - FrontDoorName string - Name string + SubscriptionId string + ResourceGroup string + FrontDoorName string + LoadBalancingSettingName string } -func NewLoadBalancingID(id FrontDoorId, name string) LoadBalancingId { +func NewLoadBalancingID(subscriptionId, resourceGroup, frontDoorName, loadBalancingSettingName string) LoadBalancingId { return LoadBalancingId{ - ResourceGroup: id.ResourceGroup, - FrontDoorName: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FrontDoorName: frontDoorName, + LoadBalancingSettingName: loadBalancingSettingName, + } +} + +func (id LoadBalancingId) String() string { + segments := []string{ + fmt.Sprintf("Load Balancing Setting Name %q", id.LoadBalancingSettingName), + fmt.Sprintf("Front Door Name %q", id.FrontDoorName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancing", segmentsStr) } -func (id LoadBalancingId) ID(subscriptionId string) string { - base := NewFrontDoorID(id.ResourceGroup, id.FrontDoorName).ID(subscriptionId) - return fmt.Sprintf("%s/loadBalancingSettings/%s", base, id.Name) +func (id LoadBalancingId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s/loadBalancingSettings/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FrontDoorName, id.LoadBalancingSettingName) } +// LoadBalancingID parses a LoadBalancing ID into an LoadBalancingId struct func LoadBalancingID(input string) (*LoadBalancingId, error) { - frontDoorId, id, err := parseFrontDoorChildResourceId(input) + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancingId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FrontDoorName, err = id.PopSegment("frontDoors"); err != nil { + return nil, err + } + if resourceId.LoadBalancingSettingName, err = id.PopSegment("loadBalancingSettings"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// LoadBalancingIDInsensitively parses an LoadBalancing ID into an LoadBalancingId struct, insensitively +// This should only be used to parse an ID for rewriting, the LoadBalancingID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func LoadBalancingIDInsensitively(input string) (*LoadBalancingId, error) { + id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing FrontDoor Load Balancing ID %q: %+v", input, err) + return nil, err + } + + resourceId := LoadBalancingId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - loadBalancingId := LoadBalancingId{ - ResourceGroup: frontDoorId.ResourceGroup, - FrontDoorName: frontDoorId.Name, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - // https://github.com/Azure/azure-sdk-for-go/issues/6762 - // note: the ordering is important since the defined case (we want to error with) is loadBalancingSettings - if loadBalancingId.Name, err = id.PopSegment("LoadBalancingSettings"); err != nil { - if loadBalancingId.Name, err = id.PopSegment("loadBalancingSettings"); err != nil { - return nil, err + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoors' segment + frontDoorsKey := "frontDoors" + for key := range id.Path { + if strings.EqualFold(key, frontDoorsKey) { + frontDoorsKey = key + break } } + if resourceId.FrontDoorName, err = id.PopSegment(frontDoorsKey); err != nil { + return nil, err + } + + // find the correct casing for the 'loadBalancingSettings' segment + loadBalancingSettingsKey := "loadBalancingSettings" + for key := range id.Path { + if strings.EqualFold(key, loadBalancingSettingsKey) { + loadBalancingSettingsKey = key + break + } + } + if resourceId.LoadBalancingSettingName, err = id.PopSegment(loadBalancingSettingsKey); err != nil { + return nil, err + } if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &loadBalancingId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/frontdoor/parse/load_balancing_test.go b/azurerm/internal/services/frontdoor/parse/load_balancing_test.go index 375e5cc37c36..bd2f3544a52e 100644 --- a/azurerm/internal/services/frontdoor/parse/load_balancing_test.go +++ b/azurerm/internal/services/frontdoor/parse/load_balancing_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,72 +11,254 @@ import ( var _ resourceid.Formatter = LoadBalancingId{} func TestLoadBalancingIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - frontDoorId := NewFrontDoorID("group1", "frontdoor1") - actual := NewLoadBalancingID(frontDoorId, "setting1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/setting1" + actual := NewLoadBalancingID("12345678-1234-9876-4563-123456789012", "resGroup1", "frontdoor1", "setting1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/setting1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestLoadBalancingIDParser(t *testing.T) { +func TestLoadBalancingID(t *testing.T) { testData := []struct { - input string - expected *LoadBalancingId + Input string + Error bool + Expected *LoadBalancingId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1/loadbalancingsettings/setting1", - expected: nil, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1/loadBalancingSettings/setting1", - expected: &LoadBalancingId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "setting1", - }, + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing LoadBalancingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1/LoadBalancingSettings/setting1", - expected: &LoadBalancingId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "setting1", + // missing value for LoadBalancingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/setting1", + Expected: &LoadBalancingId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + LoadBalancingSettingName: "setting1", }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1/Loadbalancingsettings/setting1", - expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/LOADBALANCINGSETTINGS/SETTING1", + Error: true, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancingID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancingID(v.Input) + if err != nil { + if v.Error { + continue } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.LoadBalancingSettingName != v.Expected.LoadBalancingSettingName { + t.Fatalf("Expected %q but got %q for LoadBalancingSettingName", v.Expected.LoadBalancingSettingName, actual.LoadBalancingSettingName) } + } +} + +func TestLoadBalancingIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancingId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, - if actual.FrontDoorName != test.expected.FrontDoorName { - t.Fatalf("Expected FrontDoorName to be %q but was %q", test.expected.FrontDoorName, actual.FrontDoorName) + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing LoadBalancingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + + { + // missing value for LoadBalancingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/setting1", + Expected: &LoadBalancingId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + LoadBalancingSettingName: "setting1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoors/frontdoor1/loadbalancingsettings/setting1", + Expected: &LoadBalancingId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + LoadBalancingSettingName: "setting1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORS/frontdoor1/LOADBALANCINGSETTINGS/setting1", + Expected: &LoadBalancingId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + LoadBalancingSettingName: "setting1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRs/frontdoor1/LoAdBaLaNcInGsEtTiNgS/setting1", + Expected: &LoadBalancingId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + LoadBalancingSettingName: "setting1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancingIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.LoadBalancingSettingName != v.Expected.LoadBalancingSettingName { + t.Fatalf("Expected %q but got %q for LoadBalancingSettingName", v.Expected.LoadBalancingSettingName, actual.LoadBalancingSettingName) } } } diff --git a/azurerm/internal/services/frontdoor/parse/routing_rule.go b/azurerm/internal/services/frontdoor/parse/routing_rule.go index 688606a496e8..785a07381e26 100644 --- a/azurerm/internal/services/frontdoor/parse/routing_rule.go +++ b/azurerm/internal/services/frontdoor/parse/routing_rule.go @@ -1,48 +1,131 @@ package parse -import "fmt" +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) type RoutingRuleId struct { - ResourceGroup string - FrontDoorName string - Name string + SubscriptionId string + ResourceGroup string + FrontDoorName string + Name string } -func NewRoutingRuleID(id FrontDoorId, name string) RoutingRuleId { +func NewRoutingRuleID(subscriptionId, resourceGroup, frontDoorName, name string) RoutingRuleId { return RoutingRuleId{ - ResourceGroup: id.ResourceGroup, - FrontDoorName: id.Name, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FrontDoorName: frontDoorName, + Name: name, + } +} + +func (id RoutingRuleId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Front Door Name %q", id.FrontDoorName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Routing Rule", segmentsStr) } -func (id RoutingRuleId) ID(subscriptionId string) string { - base := NewFrontDoorID(id.ResourceGroup, id.FrontDoorName).ID(subscriptionId) - return fmt.Sprintf("%s/routingRules/%s", base, id.Name) +func (id RoutingRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoors/%s/routingRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FrontDoorName, id.Name) } +// RoutingRuleID parses a RoutingRule ID into an RoutingRuleId struct func RoutingRuleID(input string) (*RoutingRuleId, error) { - frontDoorId, id, err := parseFrontDoorChildResourceId(input) + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := RoutingRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FrontDoorName, err = id.PopSegment("frontDoors"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("routingRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// RoutingRuleIDInsensitively parses an RoutingRule ID into an RoutingRuleId struct, insensitively +// This should only be used to parse an ID for rewriting, the RoutingRuleID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func RoutingRuleIDInsensitively(input string) (*RoutingRuleId, error) { + id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Routing Rule ID %q: %+v", input, err) + return nil, err + } + + resourceId := RoutingRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - poolId := RoutingRuleId{ - ResourceGroup: frontDoorId.ResourceGroup, - FrontDoorName: frontDoorId.Name, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - // API is broken - https://github.com/Azure/azure-sdk-for-go/issues/6762 - // note: the ordering is important since the defined case (we want to error with) is routingRules - if poolId.Name, err = id.PopSegment("RoutingRules"); err != nil { - if poolId.Name, err = id.PopSegment("routingRules"); err != nil { - return nil, err + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoors' segment + frontDoorsKey := "frontDoors" + for key := range id.Path { + if strings.EqualFold(key, frontDoorsKey) { + frontDoorsKey = key + break } } + if resourceId.FrontDoorName, err = id.PopSegment(frontDoorsKey); err != nil { + return nil, err + } + + // find the correct casing for the 'routingRules' segment + routingRulesKey := "routingRules" + for key := range id.Path { + if strings.EqualFold(key, routingRulesKey) { + routingRulesKey = key + break + } + } + if resourceId.Name, err = id.PopSegment(routingRulesKey); err != nil { + return nil, err + } if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &poolId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/frontdoor/parse/routing_rule_test.go b/azurerm/internal/services/frontdoor/parse/routing_rule_test.go index 9a3e5abf95fd..4b1d86c41fa7 100644 --- a/azurerm/internal/services/frontdoor/parse/routing_rule_test.go +++ b/azurerm/internal/services/frontdoor/parse/routing_rule_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,72 +11,254 @@ import ( var _ resourceid.Formatter = RoutingRuleId{} func TestRoutingRuleIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - frontDoorId := NewFrontDoorID("group1", "frontdoor1") - actual := NewRoutingRuleID(frontDoorId, "rule1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/rule1" + actual := NewRoutingRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "frontdoor1", "rule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/rule1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestRoutingRuleIDParser(t *testing.T) { +func TestRoutingRuleID(t *testing.T) { testData := []struct { - input string - expected *RoutingRuleId + Input string + Error bool + Expected *RoutingRuleId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoors/frontDoor1/routingrules/rule1", - expected: nil, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoors/frontDoor1/routingRules/rule1", - expected: &RoutingRuleId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "rule1", - }, + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoors/frontDoor1/RoutingRules/rule1", - expected: &RoutingRuleId{ - ResourceGroup: "group1", - FrontDoorName: "frontDoor1", - Name: "rule1", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/rule1", + Expected: &RoutingRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "rule1", }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoors/frontDoor1/Routingrules/rule1", - expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/ROUTINGRULES/RULE1", + Error: true, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := RoutingRuleID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := RoutingRuleID(v.Input) + if err != nil { + if v.Error { + continue } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } + } +} + +func TestRoutingRuleIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *RoutingRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, - if actual.FrontDoorName != test.expected.FrontDoorName { - t.Fatalf("Expected FrontDoorName to be %q but was %q", test.expected.FrontDoorName, actual.FrontDoorName) + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/rule1", + Expected: &RoutingRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "rule1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoors/frontdoor1/routingrules/rule1", + Expected: &RoutingRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "rule1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORS/frontdoor1/ROUTINGRULES/rule1", + Expected: &RoutingRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "rule1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRs/frontdoor1/RoUtInGrUlEs/rule1", + Expected: &RoutingRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorName: "frontdoor1", + Name: "rule1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := RoutingRuleIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorName != v.Expected.FrontDoorName { + t.Fatalf("Expected %q but got %q for FrontDoorName", v.Expected.FrontDoorName, actual.FrontDoorName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy.go b/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy.go index 5a320251078e..a114f2f05710 100644 --- a/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy.go +++ b/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy.go @@ -1,38 +1,107 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type WebApplicationFirewallPolicyId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + FrontDoorWebApplicationFirewallPolicyName string } -func NewWebApplicationFirewallPolicyID(resourceGroup, name string) WebApplicationFirewallPolicyId { +func NewWebApplicationFirewallPolicyID(subscriptionId, resourceGroup, frontDoorWebApplicationFirewallPolicyName string) WebApplicationFirewallPolicyId { return WebApplicationFirewallPolicyId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + FrontDoorWebApplicationFirewallPolicyName: frontDoorWebApplicationFirewallPolicyName, + } +} + +func (id WebApplicationFirewallPolicyId) String() string { + segments := []string{ + fmt.Sprintf("Front Door Web Application Firewall Policy Name %q", id.FrontDoorWebApplicationFirewallPolicyName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Web Application Firewall Policy", segmentsStr) } -func (id WebApplicationFirewallPolicyId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id WebApplicationFirewallPolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.FrontDoorWebApplicationFirewallPolicyName) } +// WebApplicationFirewallPolicyID parses a WebApplicationFirewallPolicy ID into an WebApplicationFirewallPolicyId struct func WebApplicationFirewallPolicyID(input string) (*WebApplicationFirewallPolicyId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Web Application Firewall Policy ID %q: %+v", input, err) + return nil, err + } + + resourceId := WebApplicationFirewallPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.FrontDoorWebApplicationFirewallPolicyName, err = id.PopSegment("frontDoorWebApplicationFirewallPolicies"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} + +// WebApplicationFirewallPolicyIDInsensitively parses an WebApplicationFirewallPolicy ID into an WebApplicationFirewallPolicyId struct, insensitively +// This should only be used to parse an ID for rewriting, the WebApplicationFirewallPolicyID +// method should be used instead for validation etc. +// +// Whilst this may seem strange, this enables Terraform have consistent casing +// which works around issues in Core, whilst handling broken API responses. +func WebApplicationFirewallPolicyIDInsensitively(input string) (*WebApplicationFirewallPolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := WebApplicationFirewallPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - policy := WebApplicationFirewallPolicyId{ - ResourceGroup: id.ResourceGroup, + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if policy.Name, err = id.PopSegment("frontDoorWebApplicationFirewallPolicies"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + // find the correct casing for the 'frontDoorWebApplicationFirewallPolicies' segment + frontDoorWebApplicationFirewallPoliciesKey := "frontDoorWebApplicationFirewallPolicies" + for key := range id.Path { + if strings.EqualFold(key, frontDoorWebApplicationFirewallPoliciesKey) { + frontDoorWebApplicationFirewallPoliciesKey = key + break + } + } + if resourceId.FrontDoorWebApplicationFirewallPolicyName, err = id.PopSegment(frontDoorWebApplicationFirewallPoliciesKey); err != nil { return nil, err } @@ -40,5 +109,5 @@ func WebApplicationFirewallPolicyID(input string) (*WebApplicationFirewallPolicy return nil, err } - return &policy, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy_test.go b/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy_test.go index ff7a93df1595..8c14e38aaf3e 100644 --- a/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy_test.go +++ b/azurerm/internal/services/frontdoor/parse/web_application_firewall_policy_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,62 +11,219 @@ import ( var _ resourceid.Formatter = WebApplicationFirewallPolicyId{} func TestWebApplicationFirewallPolicyIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewWebApplicationFirewallPolicyID("group1", "policy1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1" + actual := NewWebApplicationFirewallPolicyID("12345678-1234-9876-4563-123456789012", "resGroup1", "policy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } } -func TestWebApplicationFirewallPolicyIDParser(t *testing.T) { +func TestWebApplicationFirewallPolicyID(t *testing.T) { testData := []struct { - input string - expected *WebApplicationFirewallPolicyId + Input string + Error bool + Expected *WebApplicationFirewallPolicyId }{ + { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontdoorwebapplicationfirewallpolicies/policy1", - expected: nil, + // empty + Input: "", + Error: true, }, + { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1", - expected: &WebApplicationFirewallPolicyId{ - ResourceGroup: "group1", - Name: "policy1", - }, + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorWebApplicationFirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorWebApplicationFirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/", + Error: true, + }, + { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/FrontDoorWebApplicationFirewallPolicies/policy1", - expected: nil, + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1", + Expected: &WebApplicationFirewallPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorWebApplicationFirewallPolicyName: "policy1", + }, }, + { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Frontdoorwebapplicationfirewallpolicies/policy1", - expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORWEBAPPLICATIONFIREWALLPOLICIES/POLICY1", + Error: true, }, } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := WebApplicationFirewallPolicyID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := WebApplicationFirewallPolicyID(v.Input) + if err != nil { + if v.Error { + continue } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorWebApplicationFirewallPolicyName != v.Expected.FrontDoorWebApplicationFirewallPolicyName { + t.Fatalf("Expected %q but got %q for FrontDoorWebApplicationFirewallPolicyName", v.Expected.FrontDoorWebApplicationFirewallPolicyName, actual.FrontDoorWebApplicationFirewallPolicyName) } + } +} - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) +func TestWebApplicationFirewallPolicyIDInsensitively(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *WebApplicationFirewallPolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing FrontDoorWebApplicationFirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for FrontDoorWebApplicationFirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1", + Expected: &WebApplicationFirewallPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorWebApplicationFirewallPolicyName: "policy1", + }, + }, + + { + // lower-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontdoorwebapplicationfirewallpolicies/policy1", + Expected: &WebApplicationFirewallPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorWebApplicationFirewallPolicyName: "policy1", + }, + }, + + { + // upper-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FRONTDOORWEBAPPLICATIONFIREWALLPOLICIES/policy1", + Expected: &WebApplicationFirewallPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorWebApplicationFirewallPolicyName: "policy1", + }, + }, + + { + // mixed-cased segment names + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/FrOnTdOoRwEbApPlIcAtIoNfIrEwAlLpOlIcIeS/policy1", + Expected: &WebApplicationFirewallPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + FrontDoorWebApplicationFirewallPolicyName: "policy1", + }, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := WebApplicationFirewallPolicyIDInsensitively(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.FrontDoorWebApplicationFirewallPolicyName != v.Expected.FrontDoorWebApplicationFirewallPolicyName { + t.Fatalf("Expected %q but got %q for FrontDoorWebApplicationFirewallPolicyName", v.Expected.FrontDoorWebApplicationFirewallPolicyName, actual.FrontDoorWebApplicationFirewallPolicyName) } } } diff --git a/azurerm/internal/services/frontdoor/registration.go b/azurerm/internal/services/frontdoor/registration.go index e2a62bdc11a6..3b2f4096e375 100644 --- a/azurerm/internal/services/frontdoor/registration.go +++ b/azurerm/internal/services/frontdoor/registration.go @@ -28,7 +28,8 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_frontdoor": resourceArmFrontDoor(), - "azurerm_frontdoor_firewall_policy": resourceArmFrontDoorFirewallPolicy(), - "azurerm_frontdoor_custom_https_configuration": resourceArmFrontDoorCustomHttpsConfiguration()} + "azurerm_frontdoor": resourceFrontDoor(), + "azurerm_frontdoor_firewall_policy": resourceFrontDoorFirewallPolicy(), + "azurerm_frontdoor_custom_https_configuration": resourceFrontDoorCustomHttpsConfiguration(), + } } diff --git a/azurerm/internal/services/frontdoor/resourceids.go b/azurerm/internal/services/frontdoor/resourceids.go new file mode 100644 index 000000000000..6c7e75bcb749 --- /dev/null +++ b/azurerm/internal/services/frontdoor/resourceids.go @@ -0,0 +1,9 @@ +package frontdoor + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=BackendPool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/pool1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=FrontDoor -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=FrontendEndpoint -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/endpoint1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=HealthProbe -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/probe1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancing -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/setting1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=RoutingRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/rule1 -rewrite=true +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=WebApplicationFirewallPolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1 -rewrite=true diff --git a/azurerm/internal/services/frontdoor/tests/frontdoor_custom_https_configuration_resource_test.go b/azurerm/internal/services/frontdoor/tests/frontdoor_custom_https_configuration_resource_test.go deleted file mode 100644 index d9f6a99ab324..000000000000 --- a/azurerm/internal/services/frontdoor/tests/frontdoor_custom_https_configuration_resource_test.go +++ /dev/null @@ -1,161 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMFrontDoorCustomHttpsConfiguration_CustomHttps(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor_custom_https_configuration", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoorCustomHttpsConfiguration_CustomHttpsEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorCustomHttpsConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "custom_https_provisioning_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "custom_https_configuration.0.certificate_source", "FrontDoor"), - ), - }, - { - Config: testAccAzureRMFrontDoorCustomHttpsConfiguration_CustomHttpsDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorCustomHttpsConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "custom_https_provisioning_enabled", "false"), - ), - }, - }, - }) -} - -func testCheckAzureRMFrontDoorCustomHttpsConfigurationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Frontdoor.FrontDoorsFrontendClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Front Door Custom Https Configuration not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - id, err := parse.FrontendEndpointID(rs.Primary.Attributes["frontend_endpoint_id"]) - if err != nil { - return fmt.Errorf("Bad: cannot parse frontend_endpoint_id for %q", resourceName) - } - - resp, err := client.Get(ctx, resourceGroup, id.FrontDoorName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Frontend Endpoint %q (Front Door %q / Resource Group %q) does not exist", id.Name, id.FrontDoorName, resourceGroup) - } - return fmt.Errorf("Bad: Get on FrontDoorsFrontendClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMFrontDoorCustomHttpsConfiguration_CustomHttpsEnabled(data acceptance.TestData) string { - template := testAccAzureRMFrontDoorCustomHttpsConfiguration_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_frontdoor_custom_https_configuration" "test" { - frontend_endpoint_id = azurerm_frontdoor.test.frontend_endpoint[0].id - resource_group_name = azurerm_resource_group.test.name - custom_https_provisioning_enabled = true - - custom_https_configuration { - certificate_source = "FrontDoor" - } -} -`, template) -} - -func testAccAzureRMFrontDoorCustomHttpsConfiguration_CustomHttpsDisabled(data acceptance.TestData) string { - template := testAccAzureRMFrontDoorCustomHttpsConfiguration_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_frontdoor_custom_https_configuration" "test" { - frontend_endpoint_id = azurerm_frontdoor.test.frontend_endpoint[0].id - resource_group_name = azurerm_resource_group.test.name - custom_https_provisioning_enabled = false -} -`, template) -} - -func testAccAzureRMFrontDoorCustomHttpsConfiguration_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing-custom" - endpoint_name = "frontend-endpoint-custom" - health_probe_name = "health-probe-custom" - load_balancing_name = "load-balancing-setting-custom" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/frontdoor/tests/frontdoor_firewall_policy_resource_test.go b/azurerm/internal/services/frontdoor/tests/frontdoor_firewall_policy_resource_test.go deleted file mode 100644 index bc35caa7973f..000000000000 --- a/azurerm/internal/services/frontdoor/tests/frontdoor_firewall_policy_resource_test.go +++ /dev/null @@ -1,414 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMFrontDoorFirewallPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoorFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorFirewallPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "mode", "Prevention"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoorFirewallPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoorFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorFirewallPolicyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMFrontDoorFirewallPolicy_requiresImport), - }, - }) -} - -func TestAccAzureRMFrontDoorFirewallPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoorFirewallPolicy_update(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorFirewallPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "mode", "Prevention"), - ), - }, - { - Config: testAccAzureRMFrontDoorFirewallPolicy_update(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorFirewallPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "mode", "Prevention"), - resource.TestCheckResourceAttr(data.ResourceName, "custom_rule.1.name", "Rule2"), - resource.TestCheckResourceAttr(data.ResourceName, "custom_rule.2.name", "Rule3"), - ), - }, - { - Config: testAccAzureRMFrontDoorFirewallPolicy_update(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorFirewallPolicyExists(data.ResourceName), - testCheckAzureRMFrontDoorFirewallPolicyAttrNotExists(data.ResourceName, "custom_rule.1.name"), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "mode", "Prevention"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoorFirewallPolicy_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor_firewall_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoorFirewallPolicy_update(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorFirewallPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("testAccFrontDoorWAF%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "mode", "Prevention"), - resource.TestCheckResourceAttr(data.ResourceName, "redirect_url", "https://www.contoso.com"), - resource.TestCheckResourceAttr(data.ResourceName, "custom_block_response_status_code", "403"), - resource.TestCheckResourceAttr(data.ResourceName, "custom_rule.0.name", "Rule1"), - resource.TestCheckResourceAttr(data.ResourceName, "custom_rule.1.name", "Rule2"), - resource.TestCheckResourceAttr(data.ResourceName, "managed_rule.0.type", "DefaultRuleSet"), - resource.TestCheckResourceAttr(data.ResourceName, "managed_rule.0.exclusion.0.match_variable", "QueryStringArgNames"), - resource.TestCheckResourceAttr(data.ResourceName, "managed_rule.0.override.1.exclusion.0.selector", "really_not_suspicious"), - resource.TestCheckResourceAttr(data.ResourceName, "managed_rule.0.override.1.rule.0.exclusion.0.selector", "innocent"), - resource.TestCheckResourceAttr(data.ResourceName, "managed_rule.1.type", "Microsoft_BotManagerRuleSet"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMFrontDoorFirewallPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Frontdoor.FrontDoorsPolicyClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Front Door Firewall Policy not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Front Door Firewall Policy %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on FrontDoorsPolicyClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMFrontDoorFirewallPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Frontdoor.FrontDoorsPolicyClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_frontdoor_firewall_policy" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on FrontDoorsPolicyClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testCheckAzureRMFrontDoorFirewallPolicyAttrNotExists(name string, attribute string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - if testAttr := rs.Primary.Attributes[attribute]; testAttr != "" { - return fmt.Errorf("Attribute still exists: %s", attribute) - } - - return nil - } -} - -func testAccAzureRMFrontDoorFirewallPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "testaccRG-%d" - location = "%s" -} - -resource "azurerm_frontdoor_firewall_policy" "test" { - name = "testAccFrontDoorWAF%[1]d" - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMFrontDoorFirewallPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFrontDoorFirewallPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_frontdoor_firewall_policy" "import" { - name = azurerm_frontdoor_firewall_policy.test.name - resource_group_name = azurerm_frontdoor_firewall_policy.test.resource_group_name -} -`, template) -} - -func testAccAzureRMFrontDoorFirewallPolicy_update(data acceptance.TestData, update bool) string { - if update { - return testAccAzureRMFrontDoorFirewallPolicy_updated(data) - } - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "testaccRG-%d" - location = "%s" -} - -resource "azurerm_frontdoor_firewall_policy" "test" { - name = "testAccFrontDoorWAF%[1]d" - resource_group_name = azurerm_resource_group.test.name - enabled = true - mode = "Prevention" - redirect_url = "https://www.contoso.com" - custom_block_response_status_code = 403 - custom_block_response_body = "PGh0bWw+CjxoZWFkZXI+PHRpdGxlPkhlbGxvPC90aXRsZT48L2hlYWRlcj4KPGJvZHk+CkhlbGxvIHdvcmxkCjwvYm9keT4KPC9odG1sPg==" - - custom_rule { - name = "Rule1" - enabled = true - priority = 1 - rate_limit_duration_in_minutes = 1 - rate_limit_threshold = 10 - type = "MatchRule" - action = "Block" - - match_condition { - match_variable = "RemoteAddr" - operator = "IPMatch" - negation_condition = false - match_values = ["192.168.1.0/24", "10.0.0.0/24"] - } - } - - managed_rule { - type = "DefaultRuleSet" - version = "preview-0.1" - - override { - rule_group_name = "PHP" - - rule { - rule_id = "933111" - enabled = false - action = "Block" - } - } - } - - managed_rule { - type = "BotProtection" - version = "preview-0.1" - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMFrontDoorFirewallPolicy_updated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "testaccRG-%d" - location = "%[2]s" -} - -resource "azurerm_frontdoor_firewall_policy" "test" { - name = "testAccFrontDoorWAF%[1]d" - resource_group_name = azurerm_resource_group.test.name - enabled = true - mode = "Prevention" - redirect_url = "https://www.contoso.com" - custom_block_response_status_code = 403 - custom_block_response_body = "PGh0bWw+CjxoZWFkZXI+PHRpdGxlPkhlbGxvPC90aXRsZT48L2hlYWRlcj4KPGJvZHk+CkhlbGxvIHdvcmxkCjwvYm9keT4KPC9odG1sPg==" - - custom_rule { - name = "Rule1" - enabled = true - priority = 1 - rate_limit_duration_in_minutes = 1 - rate_limit_threshold = 10 - type = "MatchRule" - action = "Block" - - match_condition { - match_variable = "RemoteAddr" - operator = "IPMatch" - negation_condition = false - match_values = ["192.168.1.0/24", "10.0.0.0/24"] - } - } - - custom_rule { - name = "Rule2" - enabled = true - priority = 2 - rate_limit_duration_in_minutes = 1 - rate_limit_threshold = 10 - type = "MatchRule" - action = "Block" - - match_condition { - match_variable = "RemoteAddr" - operator = "IPMatch" - negation_condition = false - match_values = ["192.168.1.0/24"] - } - - match_condition { - match_variable = "RequestHeader" - selector = "UserAgent" - operator = "Contains" - negation_condition = false - match_values = ["windows"] - transforms = ["Lowercase", "Trim"] - } - } - - custom_rule { - name = "Rule3" - enabled = true - priority = 3 - rate_limit_duration_in_minutes = 1 - rate_limit_threshold = 10 - type = "MatchRule" - action = "Block" - - match_condition { - match_variable = "SocketAddr" - operator = "IPMatch" - negation_condition = false - match_values = ["192.168.1.0/24"] - } - - match_condition { - match_variable = "RequestHeader" - selector = "UserAgent" - operator = "Contains" - negation_condition = false - match_values = ["windows"] - transforms = ["Lowercase", "Trim"] - } - } - - managed_rule { - type = "DefaultRuleSet" - version = "1.0" - - exclusion { - match_variable = "QueryStringArgNames" - operator = "Equals" - selector = "not_suspicious" - } - - override { - rule_group_name = "PHP" - - rule { - rule_id = "933100" - enabled = false - action = "Block" - } - } - - override { - rule_group_name = "SQLI" - - exclusion { - match_variable = "QueryStringArgNames" - operator = "Equals" - selector = "really_not_suspicious" - } - - rule { - rule_id = "942200" - action = "Block" - - exclusion { - match_variable = "QueryStringArgNames" - operator = "Equals" - selector = "innocent" - } - } - } - } - - managed_rule { - type = "Microsoft_BotManagerRuleSet" - version = "1.0" - } -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/frontdoor/tests/frontdoor_resource_test.go b/azurerm/internal/services/frontdoor/tests/frontdoor_resource_test.go deleted file mode 100644 index 669e1ffa8c3c..000000000000 --- a/azurerm/internal/services/frontdoor/tests/frontdoor_resource_test.go +++ /dev/null @@ -1,1044 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMFrontDoor_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool_health_probe.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool_health_probe.0.probe_method", "GET"), - ), - }, - { - Config: testAccAzureRMFrontDoor_basicDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool_health_probe.0.enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool_health_probe.0.probe_method", "HEAD"), - ), - }, - data.ImportStep(), - }, - }) -} - -// remove in 3.0 -func TestAccAzureRMFrontDoor_global(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_global(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "location", "global"), - ), - ExpectNonEmptyPlan: true, - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoor_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMFrontDoor_requiresImport), - }, - }) -} - -func TestAccAzureRMFrontDoor_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFrontDoor_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFrontDoor_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoor_multiplePools(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_multiplePools(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool_health_probe.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "backend_pool_load_balancing.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoor_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoor_waf(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_waf(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoor_EnableDisableCache(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_EnableCache(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_use_dynamic_compression", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_query_parameter_strip_directive", "StripAll"), - ), - }, - { - Config: testAccAzureRMFrontDoor_DisableCache(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_use_dynamic_compression", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_query_parameter_strip_directive", "StripAll"), - ), - }, - { - Config: testAccAzureRMFrontDoor_EnableCache(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_use_dynamic_compression", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "routing_rule.0.forwarding_configuration.0.cache_query_parameter_strip_directive", "StripAll"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFrontDoor_CustomHttps(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_frontdoor", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFrontDoorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFrontDoor_CustomHttpsEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_endpoint.0.custom_https_provisioning_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_endpoint.0.custom_https_configuration.0.certificate_source", "FrontDoor"), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_endpoint.0.custom_https_configuration.0.minimum_tls_version", "1.2"), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_endpoint.0.custom_https_configuration.0.provisioning_state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_endpoint.0.custom_https_configuration.0.provisioning_substate", "CertificateDeployed"), - ), - }, - { - Config: testAccAzureRMFrontDoor_CustomHttpsDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFrontDoorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_endpoint.0.custom_https_provisioning_enabled", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMFrontDoorExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Frontdoor.FrontDoorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Front Door not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Front Door %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on FrontDoorsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMFrontDoorDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Frontdoor.FrontDoorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_frontdoor" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on FrontDoorsClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMFrontDoor_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_basicDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - enabled = false - probe_method = "HEAD" - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -// remove in 3.0 -func testAccAzureRMFrontDoor_global(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - location = "%s" - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFrontDoor_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_frontdoor" "import" { - name = azurerm_frontdoor.test.name - resource_group_name = azurerm_frontdoor.test.resource_group_name - enforce_backend_pools_certificate_name_check = azurerm_frontdoor.test.enforce_backend_pools_certificate_name_check - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - backend_pools_send_receive_timeout_seconds = 45 - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_waf(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor_firewall_policy" "test" { - name = "acctestwafp%d" - resource_group_name = azurerm_resource_group.test.name - mode = "Prevention" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - web_application_firewall_policy_link_id = azurerm_frontdoor_firewall_policy.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_DisableCache(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_EnableCache(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - cache_enabled = true - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_CustomHttpsEnabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = true - custom_https_configuration { - certificate_source = "FrontDoor" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_CustomHttpsDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%d" - location = "%s" -} - -locals { - backend_name = "backend-bing" - endpoint_name = "frontend-endpoint" - health_probe_name = "health-probe" - load_balancing_name = "load-balancing-setting" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - routing_rule { - name = "routing-rule" - accepted_protocols = ["Http", "Https"] - patterns_to_match = ["/*"] - frontend_endpoints = [local.endpoint_name] - - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = local.backend_name - } - } - - backend_pool_load_balancing { - name = local.load_balancing_name - } - - backend_pool_health_probe { - name = local.health_probe_name - } - - backend_pool { - name = local.backend_name - backend { - host_header = "www.bing.com" - address = "www.bing.com" - http_port = 80 - https_port = 443 - } - - load_balancing_name = local.load_balancing_name - health_probe_name = local.health_probe_name - } - - frontend_endpoint { - name = local.endpoint_name - host_name = "acctest-FD-%d.azurefd.net" - custom_https_provisioning_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFrontDoor_multiplePools(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-frontdoor-%[1]d" - location = "%s" -} - -resource "azurerm_frontdoor" "test" { - name = "acctest-FD-%[1]d" - resource_group_name = azurerm_resource_group.test.name - enforce_backend_pools_certificate_name_check = false - - frontend_endpoint { - name = "acctest-FD-%[1]d-default-FE" - host_name = "acctest-FD-%[1]d.azurefd.net" - custom_https_provisioning_enabled = false - } - - # --- Pool 1 - - routing_rule { - name = "acctest-FD-%[1]d-bing-RR" - accepted_protocols = ["Https"] - patterns_to_match = ["/poolBing/*"] - frontend_endpoints = ["acctest-FD-%[1]d-default-FE"] - - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = "acctest-FD-%[1]d-pool-bing" - cache_enabled = true - } - } - - backend_pool_load_balancing { - name = "acctest-FD-%[1]d-bing-LB" - additional_latency_milliseconds = 0 - sample_size = 4 - successful_samples_required = 2 - } - - backend_pool_health_probe { - name = "acctest-FD-%[1]d-bing-HP" - protocol = "Https" - enabled = true - probe_method = "HEAD" - } - - backend_pool { - name = "acctest-FD-%[1]d-pool-bing" - load_balancing_name = "acctest-FD-%[1]d-bing-LB" - health_probe_name = "acctest-FD-%[1]d-bing-HP" - - backend { - host_header = "bing.com" - address = "bing.com" - http_port = 80 - https_port = 443 - weight = 75 - enabled = true - } - } - - # --- Pool 2 - - routing_rule { - name = "acctest-FD-%[1]d-google-RR" - accepted_protocols = ["Https"] - patterns_to_match = ["/poolGoogle/*"] - frontend_endpoints = ["acctest-FD-%[1]d-default-FE"] - - forwarding_configuration { - forwarding_protocol = "MatchRequest" - backend_pool_name = "acctest-FD-%[1]d-pool-google" - cache_enabled = true - } - } - - backend_pool_load_balancing { - name = "acctest-FD-%[1]d-google-LB" - additional_latency_milliseconds = 0 - sample_size = 4 - successful_samples_required = 2 - } - - backend_pool_health_probe { - name = "acctest-FD-%[1]d-google-HP" - protocol = "Https" - } - - backend_pool { - name = "acctest-FD-%[1]d-pool-google" - load_balancing_name = "acctest-FD-%[1]d-google-LB" - health_probe_name = "acctest-FD-%[1]d-google-HP" - - backend { - host_header = "google.com" - address = "google.com" - http_port = 80 - https_port = 443 - weight = 75 - enabled = true - } - } -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/frontdoor/validate/backend_pool_id.go b/azurerm/internal/services/frontdoor/validate/backend_pool_id.go new file mode 100644 index 000000000000..7b4163b60cd2 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/backend_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" +) + +func BackendPoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.BackendPoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/frontdoor/validate/backend_pool_id_test.go b/azurerm/internal/services/frontdoor/validate/backend_pool_id_test.go new file mode 100644 index 000000000000..16744e722f2c --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/backend_pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestBackendPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/backendPools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/BACKENDPOOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := BackendPoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/backend_pool_routing_rule_name.go b/azurerm/internal/services/frontdoor/validate/backend_pool_routing_rule_name.go new file mode 100644 index 000000000000..3ebf53ccc222 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/backend_pool_routing_rule_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func BackendPoolRoutingRuleName(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `(^[\da-zA-Z])([-\da-zA-Z]{1,88})([\da-zA-Z]$)`); !m { + return nil, append(regexErrs, fmt.Errorf(`%q must be between 1 and 90 characters in length and begin with a letter or number, end with a letter or number and may contain only letters, numbers or hyphens.`, k)) + } + + return nil, nil +} diff --git a/azurerm/internal/services/frontdoor/validate/custom_block_response_body.go b/azurerm/internal/services/frontdoor/validate/custom_block_response_body.go new file mode 100644 index 000000000000..f6d83ae4d604 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/custom_block_response_body.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func CustomBlockResponseBody(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{4})$`); !m { + return nil, append(regexErrs, fmt.Errorf(`%q contains invalid characters, %q must contain only alphanumeric and equals sign characters.`, k, k)) + } + + return nil, nil +} diff --git a/azurerm/internal/services/frontdoor/validate/front_door_id.go b/azurerm/internal/services/frontdoor/validate/front_door_id.go new file mode 100644 index 000000000000..9be7c75fce5e --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/front_door_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" +) + +func FrontDoorID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.FrontDoorID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/frontdoor/validate/front_door_id_test.go b/azurerm/internal/services/frontdoor/validate/front_door_id_test.go new file mode 100644 index 000000000000..8a82e0a139a6 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/front_door_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestFrontDoorID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := FrontDoorID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/front_door_name.go b/azurerm/internal/services/frontdoor/validate/front_door_name.go new file mode 100644 index 000000000000..357a002ea2ee --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/front_door_name.go @@ -0,0 +1,14 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func FrontDoorName(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `(^[\da-zA-Z])([-\da-zA-Z]{3,61})([\da-zA-Z]$)`); !m { + return nil, append(regexErrs, fmt.Errorf(`%q must be between 5 and 63 characters in length and begin with a letter or number, end with a letter or number and may contain only letters, numbers or hyphens.`, k)) + } + return nil, nil +} diff --git a/azurerm/internal/services/frontdoor/validate/front_door_waf_name.go b/azurerm/internal/services/frontdoor/validate/front_door_waf_name.go new file mode 100644 index 000000000000..da41ae25e8ae --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/front_door_waf_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func FrontDoorWAFName(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `(^[a-zA-Z])([\da-zA-Z]{0,127})$`); !m { + return nil, append(regexErrs, fmt.Errorf(`%q must be between 1 and 128 characters in length, must begin with a letter and may only contain letters and numbers.`, k)) + } + + return nil, nil +} diff --git a/azurerm/internal/services/frontdoor/validate/front_door_waf_name_test.go b/azurerm/internal/services/frontdoor/validate/front_door_waf_name_test.go new file mode 100644 index 000000000000..acf2b38716e8 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/front_door_waf_name_test.go @@ -0,0 +1,89 @@ +package validate + +import ( + "testing" +) + +func TestAccFrontDoorFirewallPolicy_validateName(t *testing.T) { + cases := []struct { + Name string + Input string + ExpectError bool + }{ + { + Name: "Empty String", + Input: "", + ExpectError: true, + }, + { + Name: "Starts with Numeric", + Input: "1WellThisIsAllWrong", + ExpectError: true, + }, + { + Name: "Has Spaces", + Input: "What part of no spaces do you not understand", + ExpectError: true, + }, + { + Name: "Has Hyphens", + Input: "What-part-of-no-hyphens-do-you-not-understand", + ExpectError: true, + }, + { + Name: "Special Characters", + Input: "WellArn`tTheseSpecialCharacters?!", + ExpectError: true, + }, + { + Name: "Mixed Case Alpha and Numeric", + Input: "ThisNameIsAPerfectlyFine1", + ExpectError: false, + }, + { + Name: "Too Long", + Input: "OhMyLordyThisNameIsWayToLooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog", + ExpectError: true, + }, + { + Name: "Max Length", + Input: "NowThisNameIsThePerfectLengthForAFrontdoorFireWallPolicyDontYouThinkAnyLongerWouldBeJustWayToLoooooooooooooooooooongDontYouThink", + ExpectError: false, + }, + { + Name: "Minimum Length Upper", + Input: "A", + ExpectError: false, + }, + { + Name: "Minimum Length Lower", + Input: "a", + ExpectError: false, + }, + { + Name: "Mixed Case Alpha no Numeric", + Input: "LookMomNoNumbers", + ExpectError: false, + }, + { + Name: "All Upper Alpha with Numeric", + Input: "OU812", + ExpectError: false, + }, + { + Name: "All lower no Numeric", + Input: "heythisisalllowercase", + ExpectError: false, + }, + } + + for _, tc := range cases { + _, errors := FrontDoorWAFName(tc.Input, tc.Name) + + hasError := len(errors) > 0 + + if tc.ExpectError && !hasError { + t.Fatalf("Expected the FrontDoor WAF Name to trigger a validation error for '%s'", tc.Name) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id.go b/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id.go index 819c8929877b..47ee9be373d9 100644 --- a/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id.go +++ b/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id.go @@ -1,22 +1,23 @@ package validate +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" ) -func FrontendEndpointID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) +func FrontendEndpointID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) return } - if _, err := parse.FrontendEndpointIDForImport(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return + if _, err := parse.FrontendEndpointID(v); err != nil { + errors = append(errors, err) } - return warnings, errors + return } diff --git a/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id_test.go b/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id_test.go new file mode 100644 index 000000000000..0eeb456c9e48 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/frontend_endpoint_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestFrontendEndpointID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/frontendEndpoints/endpoint1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/FRONTENDENDPOINTS/ENDPOINT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := FrontendEndpointID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/health_probe_id.go b/azurerm/internal/services/frontdoor/validate/health_probe_id.go new file mode 100644 index 000000000000..3aa863a667f7 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/health_probe_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" +) + +func HealthProbeID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.HealthProbeID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/frontdoor/validate/health_probe_id_test.go b/azurerm/internal/services/frontdoor/validate/health_probe_id_test.go new file mode 100644 index 000000000000..97a38fd28575 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/health_probe_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestHealthProbeID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Valid: false, + }, + + { + // missing HealthProbeSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Valid: false, + }, + + { + // missing value for HealthProbeSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/healthProbeSettings/probe1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/HEALTHPROBESETTINGS/PROBE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := HealthProbeID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/load_balancing_id.go b/azurerm/internal/services/frontdoor/validate/load_balancing_id.go new file mode 100644 index 000000000000..a85cb0a782f2 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/load_balancing_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" +) + +func LoadBalancingID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancingID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/frontdoor/validate/load_balancing_id_test.go b/azurerm/internal/services/frontdoor/validate/load_balancing_id_test.go new file mode 100644 index 000000000000..393e0c0fcd0e --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/load_balancing_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancingID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Valid: false, + }, + + { + // missing LoadBalancingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Valid: false, + }, + + { + // missing value for LoadBalancingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/loadBalancingSettings/setting1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/LOADBALANCINGSETTINGS/SETTING1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancingID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/name.go b/azurerm/internal/services/frontdoor/validate/name.go deleted file mode 100644 index 32698767bf9f..000000000000 --- a/azurerm/internal/services/frontdoor/validate/name.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" -) - -func FrontDoorName(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `(^[\da-zA-Z])([-\da-zA-Z]{3,61})([\da-zA-Z]$)`); !m { - return nil, append(regexErrs, fmt.Errorf(`%q must be between 5 and 63 characters in length and begin with a letter or number, end with a letter or number and may contain only letters, numbers or hyphens.`, k)) - } - return nil, nil -} - -func FrontDoorWAFName(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `(^[a-zA-Z])([\da-zA-Z]{0,127})$`); !m { - return nil, append(regexErrs, fmt.Errorf(`%q must be between 1 and 128 characters in length, must begin with a letter and may only contain letters and numbers.`, k)) - } - - return nil, nil -} diff --git a/azurerm/internal/services/frontdoor/validate/name_test.go b/azurerm/internal/services/frontdoor/validate/name_test.go deleted file mode 100644 index 481a747c86f6..000000000000 --- a/azurerm/internal/services/frontdoor/validate/name_test.go +++ /dev/null @@ -1,89 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestAccAzureRMFrontDoorFirewallPolicy_validateName(t *testing.T) { - cases := []struct { - Name string - Input string - ExpectError bool - }{ - { - Name: "Empty String", - Input: "", - ExpectError: true, - }, - { - Name: "Starts with Numeric", - Input: "1WellThisIsAllWrong", - ExpectError: true, - }, - { - Name: "Has Spaces", - Input: "What part of no spaces do you not understand", - ExpectError: true, - }, - { - Name: "Has Hyphens", - Input: "What-part-of-no-hyphens-do-you-not-understand", - ExpectError: true, - }, - { - Name: "Special Characters", - Input: "WellArn`tTheseSpecialCharacters?!", - ExpectError: true, - }, - { - Name: "Mixed Case Alpha and Numeric", - Input: "ThisNameIsAPerfectlyFine1", - ExpectError: false, - }, - { - Name: "Too Long", - Input: "OhMyLordyThisNameIsWayToLooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog", - ExpectError: true, - }, - { - Name: "Max Length", - Input: "NowThisNameIsThePerfectLengthForAFrontdoorFireWallPolicyDontYouThinkAnyLongerWouldBeJustWayToLoooooooooooooooooooongDontYouThink", - ExpectError: false, - }, - { - Name: "Minimum Length Upper", - Input: "A", - ExpectError: false, - }, - { - Name: "Minimum Length Lower", - Input: "a", - ExpectError: false, - }, - { - Name: "Mixed Case Alpha no Numeric", - Input: "LookMomNoNumbers", - ExpectError: false, - }, - { - Name: "All Upper Alpha with Numeric", - Input: "OU812", - ExpectError: false, - }, - { - Name: "All lower no Numeric", - Input: "heythisisalllowercase", - ExpectError: false, - }, - } - - for _, tc := range cases { - _, errors := FrontDoorWAFName(tc.Input, tc.Name) - - hasError := len(errors) > 0 - - if tc.ExpectError && !hasError { - t.Fatalf("Expected the FrontDoor WAF Name to trigger a validation error for '%s'", tc.Name) - } - } -} diff --git a/azurerm/internal/services/frontdoor/validate/routing_rule_id.go b/azurerm/internal/services/frontdoor/validate/routing_rule_id.go new file mode 100644 index 000000000000..2c65c2ac980e --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/routing_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" +) + +func RoutingRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.RoutingRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/frontdoor/validate/routing_rule_id_test.go b/azurerm/internal/services/frontdoor/validate/routing_rule_id_test.go new file mode 100644 index 000000000000..6a0b0ec4ca4a --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/routing_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestRoutingRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FrontDoorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoors/frontdoor1/routingRules/rule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORS/FRONTDOOR1/ROUTINGRULES/RULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := RoutingRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/frontdoor/validate/validate.go b/azurerm/internal/services/frontdoor/validate/validate.go deleted file mode 100644 index 4e0641441230..000000000000 --- a/azurerm/internal/services/frontdoor/validate/validate.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" -) - -func FrontDoorBackendPoolRoutingRuleName(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `(^[\da-zA-Z])([-\da-zA-Z]{1,88})([\da-zA-Z]$)`); !m { - return nil, append(regexErrs, fmt.Errorf(`%q must be between 1 and 90 characters in length and begin with a letter or number, end with a letter or number and may contain only letters, numbers or hyphens.`, k)) - } - - return nil, nil -} - -func FrontdoorCustomBlockResponseBody(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{4})$`); !m { - return nil, append(regexErrs, fmt.Errorf(`%q contains invalid characters, %q must contain only alphanumeric and equals sign characters.`, k, k)) - } - - return nil, nil -} diff --git a/azurerm/internal/services/frontdoor/validate/web_application_firewall_policy_id.go b/azurerm/internal/services/frontdoor/validate/web_application_firewall_policy_id.go new file mode 100644 index 000000000000..5f533dd68d22 --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/web_application_firewall_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/frontdoor/parse" +) + +func WebApplicationFirewallPolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.WebApplicationFirewallPolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/frontdoor/validate/web_application_firewall_policy_id_test.go b/azurerm/internal/services/frontdoor/validate/web_application_firewall_policy_id_test.go new file mode 100644 index 000000000000..076ec024088c --- /dev/null +++ b/azurerm/internal/services/frontdoor/validate/web_application_firewall_policy_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestWebApplicationFirewallPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing FrontDoorWebApplicationFirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for FrontDoorWebApplicationFirewallPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/FRONTDOORWEBAPPLICATIONFIREWALLPOLICIES/POLICY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := WebApplicationFirewallPolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/hdinsight/client/client.go b/azurerm/internal/services/hdinsight/client/client.go index afb902779c0c..2d81759d60db 100644 --- a/azurerm/internal/services/hdinsight/client/client.go +++ b/azurerm/internal/services/hdinsight/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/hdinsight/common_hdinsight.go b/azurerm/internal/services/hdinsight/common_hdinsight.go index 8505b36a7874..ade6c3322973 100644 --- a/azurerm/internal/services/hdinsight/common_hdinsight.go +++ b/azurerm/internal/services/hdinsight/common_hdinsight.go @@ -6,10 +6,11 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" @@ -23,13 +24,13 @@ func hdinsightClusterUpdate(clusterKind string, readFunc schema.ReadFunc) schema ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name if d.HasChange("tags") { t := d.Get("tags").(map[string]interface{}) @@ -131,7 +132,6 @@ func hdinsightClusterUpdate(clusterKind string, readFunc schema.ReadFunc) schema UserName: utils.String(username), Password: utils.String(password), }) - if err != nil { return err } @@ -151,13 +151,13 @@ func hdinsightClusterDelete(clusterKind string) schema.DeleteFunc { ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name future, err := client.Delete(ctx, resourceGroup, name) if err != nil { @@ -173,10 +173,11 @@ func hdinsightClusterDelete(clusterKind string) schema.DeleteFunc { } type hdInsightRoleDefinition struct { - HeadNodeDef HDInsightNodeDefinition - WorkerNodeDef HDInsightNodeDefinition - ZookeeperNodeDef HDInsightNodeDefinition - EdgeNodeDef *HDInsightNodeDefinition + HeadNodeDef HDInsightNodeDefinition + WorkerNodeDef HDInsightNodeDefinition + ZookeeperNodeDef HDInsightNodeDefinition + KafkaManagementNodeDef *HDInsightNodeDefinition + EdgeNodeDef *HDInsightNodeDefinition } func expandHDInsightRoles(input []interface{}, definition hdInsightRoleDefinition) (*[]hdinsight.Role, error) { @@ -215,6 +216,18 @@ func expandHDInsightRoles(input []interface{}, definition hdInsightRoleDefinitio roles = append(roles, *edgeNode) } + if definition.KafkaManagementNodeDef != nil { + kafkaManagementNodeRaw := v["kafka_management_node"].([]interface{}) + // "kafka_management_node" is optional, we expand it only when user has specified it. + if len(kafkaManagementNodeRaw) != 0 { + kafkaManagementNode, err := ExpandHDInsightNodeDefinition("kafkamanagementnode", kafkaManagementNodeRaw, *definition.KafkaManagementNodeDef) + if err != nil { + return nil, fmt.Errorf("Error expanding `kafka_management_node`: %+v", err) + } + roles = append(roles, *kafkaManagementNode) + } + } + return &roles, nil } @@ -223,7 +236,7 @@ func flattenHDInsightRoles(d *schema.ResourceData, input *hdinsight.ComputeProfi return []interface{}{} } - var existingEdgeNodes, existingHeadNodes, existingWorkerNodes, existingZookeeperNodes []interface{} + var existingKafkaManagementNodes, existingEdgeNodes, existingHeadNodes, existingWorkerNodes, existingZookeeperNodes []interface{} existingVs := d.Get("roles").([]interface{}) if len(existingVs) > 0 { @@ -233,6 +246,10 @@ func flattenHDInsightRoles(d *schema.ResourceData, input *hdinsight.ComputeProfi existingEdgeNodes = existingV["edge_node"].([]interface{}) } + if definition.KafkaManagementNodeDef != nil { + existingKafkaManagementNodes = existingV["kafka_management_node"].([]interface{}) + } + existingHeadNodes = existingV["head_node"].([]interface{}) existingWorkerNodes = existingV["worker_node"].([]interface{}) existingZookeeperNodes = existingV["zookeeper_node"].([]interface{}) @@ -259,6 +276,12 @@ func flattenHDInsightRoles(d *schema.ResourceData, input *hdinsight.ComputeProfi result["edge_node"] = edgeNodes } + if definition.KafkaManagementNodeDef != nil { + kafkaManagementNode := FindHDInsightRole(input.Roles, "kafkamanagementnode") + kafkaManagementNodes := FlattenHDInsightNodeDefinition(kafkaManagementNode, existingKafkaManagementNodes, *definition.KafkaManagementNodeDef) + result["kafka_management_node"] = kafkaManagementNodes + } + return []interface{}{ result, } @@ -296,7 +319,6 @@ func createHDInsightEdgeNodes(ctx context.Context, client *hdinsight.Application func deleteHDInsightEdgeNodes(ctx context.Context, client *hdinsight.ApplicationsClient, resourceGroup string, name string) error { future, err := client.Delete(ctx, resourceGroup, name, name) - if err != nil { return fmt.Errorf("Error deleting edge nodes for HDInsight Hadoop Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) } @@ -370,7 +392,8 @@ func flattenHDInsightMonitoring(monitor hdinsight.ClusterMonitoringResponse) []i map[string]string{ "log_analytics_workspace_id": *monitor.WorkspaceID, "primary_key": "*****", - }} + }, + } } return nil diff --git a/azurerm/internal/services/hdinsight/hdinsight.go b/azurerm/internal/services/hdinsight/hdinsight.go index 44267b438ee4..b6aa97d9a8e5 100644 --- a/azurerm/internal/services/hdinsight/hdinsight.go +++ b/azurerm/internal/services/hdinsight/hdinsight.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/go-getter/helper/url" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -525,7 +525,6 @@ func ExpandHDInsightsStorageAccounts(storageAccounts []interface{}, gen2storageA isDefault := v["is_default"].(bool) uri, err := url.Parse(storageContainerID) - if err != nil { return nil, nil, fmt.Errorf("Error parsing %q: %s", storageContainerID, err) } @@ -637,6 +636,7 @@ func ValidateSchemaHDInsightNodeDefinitionVMSize() schema.SchemaValidateFunc { "Standard_DS12_V2", "Standard_DS13_V2", "Standard_DS14_V2", + "Standard_D4a_V4", "Standard_E2_V3", "Standard_E4_V3", "Standard_E8_V3", @@ -674,7 +674,7 @@ func ValidateSchemaHDInsightNodeDefinitionVMSize() schema.SchemaValidateFunc { }, true) } -func SchemaHDInsightNodeDefinition(schemaLocation string, definition HDInsightNodeDefinition) *schema.Schema { +func SchemaHDInsightNodeDefinition(schemaLocation string, definition HDInsightNodeDefinition, required bool) *schema.Schema { result := map[string]*schema.Schema{ "vm_size": { Type: schema.TypeString, @@ -754,14 +754,17 @@ func SchemaHDInsightNodeDefinition(schemaLocation string, definition HDInsightNo } } - return &schema.Schema{ + s := &schema.Schema{ Type: schema.TypeList, - Required: true, MaxItems: 1, + Required: required, + Optional: !required, Elem: &schema.Resource{ Schema: result, }, } + + return s } func ExpandHDInsightNodeDefinition(name string, input []interface{}, definition HDInsightNodeDefinition) (*hdinsight.Role, error) { diff --git a/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source.go b/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source.go index d1a6c907ea16..39eb81b9e6ea 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source.go +++ b/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmHDInsightSparkCluster() *schema.Resource { +func dataSourceHDInsightSparkCluster() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmHDInsightClusterRead, + Read: dataSourceHDInsightClusterRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -90,6 +90,11 @@ func dataSourceArmHDInsightSparkCluster() *schema.Resource { Computed: true, }, + "kafka_rest_proxy_endpoint": { + Type: schema.TypeString, + Computed: true, + }, + "ssh_endpoint": { Type: schema.TypeString, Computed: true, @@ -98,7 +103,7 @@ func dataSourceArmHDInsightSparkCluster() *schema.Resource { } } -func dataSourceArmHDInsightClusterRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceHDInsightClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -150,6 +155,8 @@ func dataSourceArmHDInsightClusterRead(d *schema.ResourceData, meta interface{}) d.Set("https_endpoint", httpEndpoint) sshEndpoint := FindHDInsightConnectivityEndpoint("SSH", props.ConnectivityEndpoints) d.Set("ssh_endpoint", sshEndpoint) + kafkaRestProxyEndpoint := FindHDInsightConnectivityEndpoint("KafkaRestProxyPublicEndpoint", props.ConnectivityEndpoints) + d.Set("kafka_rest_proxy_endpoint", kafkaRestProxyEndpoint) } return tags.FlattenAndSet(d, resp.Tags) diff --git a/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source_test.go b/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source_test.go new file mode 100644 index 000000000000..6d6d96697053 --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_cluster_data_source_test.go @@ -0,0 +1,267 @@ +package hdinsight_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type HDInsightClusterDataSourceResource struct { +} + +func TestAccDataSourceHDInsightCluster_hadoop(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.hadoop(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("hadoop"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_hbase(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.hbase(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("hbase"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_interactiveQuery(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.interactiveQuery(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("interactivehive"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_kafka(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.kafka(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("kafka"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_kafkaWithRestProxy(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.kafkaWithRestProxy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("kafka"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("kafka_rest_proxy_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_mlServices(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.mlServices(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("mlservices"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_rserver(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.rserver(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("rserver"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_spark(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.spark(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("spark"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccDataSourceHDInsightCluster_storm(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") + r := HDInsightClusterDataSourceResource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.storm(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("kind").HasValue("storm"), + check.That(data.ResourceName).Key("tier").HasValue("standard"), + check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func (HDInsightClusterDataSourceResource) hadoop(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_hadoop_cluster.test.name + resource_group_name = azurerm_hdinsight_hadoop_cluster.test.resource_group_name +} +`, HDInsightHadoopClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) hbase(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_hbase_cluster.test.name + resource_group_name = azurerm_hdinsight_hbase_cluster.test.resource_group_name +} +`, HDInsightHBaseClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) interactiveQuery(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_interactive_query_cluster.test.name + resource_group_name = azurerm_hdinsight_interactive_query_cluster.test.resource_group_name +} +`, HDInsightInteractiveQueryClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) kafka(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_kafka_cluster.test.name + resource_group_name = azurerm_hdinsight_kafka_cluster.test.resource_group_name +} +`, HDInsightKafkaClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) kafkaWithRestProxy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_kafka_cluster.test.name + resource_group_name = azurerm_hdinsight_kafka_cluster.test.resource_group_name +} +`, HDInsightKafkaClusterResource{}.restProxy(data)) +} + +func (HDInsightClusterDataSourceResource) mlServices(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_ml_services_cluster.test.name + resource_group_name = azurerm_hdinsight_ml_services_cluster.test.resource_group_name +} +`, HDInsightMLServicesClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) rserver(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_rserver_cluster.test.name + resource_group_name = azurerm_hdinsight_rserver_cluster.test.resource_group_name +} +`, HDInsightRServerClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) spark(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_spark_cluster.test.name + resource_group_name = azurerm_hdinsight_spark_cluster.test.resource_group_name +} +`, HDInsightSparkClusterResource{}.basic(data)) +} + +func (HDInsightClusterDataSourceResource) storm(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_hdinsight_cluster" "test" { + name = azurerm_hdinsight_storm_cluster.test.name + resource_group_name = azurerm_hdinsight_storm_cluster.test.resource_group_name +} +`, HDInsightStormClusterResource{}.basic(data)) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go index c5db654037fb..2ed556c48b49 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go @@ -6,7 +6,9 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -45,11 +47,11 @@ var hdInsightHadoopClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ FixedTargetInstanceCount: utils.Int32(int32(3)), } -func resourceArmHDInsightHadoopCluster() *schema.Resource { +func resourceHDInsightHadoopCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmHDInsightHadoopClusterCreate, - Read: resourceArmHDInsightHadoopClusterRead, - Update: hdinsightClusterUpdate("Hadoop", resourceArmHDInsightHadoopClusterRead), + Create: resourceHDInsightHadoopClusterCreate, + Read: resourceHDInsightHadoopClusterRead, + Update: hdinsightClusterUpdate("Hadoop", resourceHDInsightHadoopClusterRead), Delete: hdinsightClusterDelete("Hadoop"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -103,11 +105,11 @@ func resourceArmHDInsightHadoopCluster() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightHadoopClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightHadoopClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightHadoopClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightHadoopClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightHadoopClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightHadoopClusterZookeeperNodeDefinition, true), "edge_node": { Type: schema.TypeList, @@ -171,14 +173,16 @@ func resourceArmHDInsightHadoopCluster() *schema.Resource { } } -func resourceArmHDInsightHadoopClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightHadoopClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -266,7 +270,7 @@ func resourceArmHDInsightHadoopClusterCreate(d *schema.ResourceData, meta interf return fmt.Errorf("failure reading ID for HDInsight Hadoop Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) // We can only add an edge node after creation if v, ok := d.GetOk("roles.0.edge_node"); ok { @@ -302,23 +306,23 @@ func resourceArmHDInsightHadoopClusterCreate(d *schema.ResourceData, meta interf } } - return resourceArmHDInsightHadoopClusterRead(d, meta) + return resourceHDInsightHadoopClusterRead(d, meta) } -func resourceArmHDInsightHadoopClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightHadoopClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource_test.go new file mode 100644 index 000000000000..7cf9e7db9e49 --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_hadoop_cluster_resource_test.go @@ -0,0 +1,1513 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightHadoopClusterResource struct { +} + +func TestAccHDInsightHadoopCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightHadoopCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size"), + }) +} + +func TestAccHDInsightHadoopCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_edgeNodeBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.edgeNodeBasic(data, 2, "Standard_D3_V2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_addEdgeNodeBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.edgeNodeBasic(data, 1, "Standard_D3_V2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + { + Config: r.edgeNodeBasic(data, 3, "Standard_D4_V2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_gen2storage(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gen2storage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_gen2AndBlobStorage(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gen2AndBlobStorage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_allMetastores(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightHadoopCluster_hiveMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccHDInsightHadoopCluster_updateMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightHadoopCluster_monitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_updateGateway(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updateGateway(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHadoopCluster_updateMonitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") + r := HDInsightHadoopClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + // No monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Add monitor + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Change Log Analytics Workspace for the monitor + { + PreConfig: func() { + data.RandomString += "new" + }, + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Remove monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightHadoopClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight Hadoop Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightHadoopClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hadoop_cluster" "import" { + name = azurerm_hdinsight_hadoop_cluster.test.name + resource_group_name = azurerm_hdinsight_hadoop_cluster.test.resource_group_name + location = azurerm_hdinsight_hadoop_cluster.test.location + cluster_version = azurerm_hdinsight_hadoop_cluster.test.cluster_version + tier = azurerm_hdinsight_hadoop_cluster.test.tier + dynamic "component_version" { + for_each = azurerm_hdinsight_hadoop_cluster.test.component_version + content { + hadoop = component_version.value.hadoop + } + } + dynamic "gateway" { + for_each = azurerm_hdinsight_hadoop_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_hadoop_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_hadoop_cluster.test.roles + content { + dynamic "edge_node" { + for_each = lookup(roles.value, "edge_node", []) + content { + target_instance_count = edge_node.value.target_instance_count + vm_size = edge_node.value.vm_size + + dynamic "install_script_action" { + for_each = lookup(edge_node.value, "install_script_action", []) + content { + name = install_script_action.value.name + uri = install_script_action.value.uri + } + } + } + } + + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightHadoopClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_D4_v2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D4_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D4_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D4_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) edgeNodeBasic(data acceptance.TestData, numEdgeNodes int, instanceType string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + target_instance_count = %d + vm_size = "%s" + install_script_action { + name = "script1" + uri = "https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-hdinsight-linux-with-edge-node/scripts/EmptyNodeSetup.sh" + } + } + } +} +`, r.template(data), data.RandomInteger, numEdgeNodes, instanceType) +} + +func (r HDInsightHadoopClusterResource) gen2storage(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_hdinsight_hadoop_cluster" "test" { + depends_on = [azurerm_role_assignment.test] + + name = "acctesthdi-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + cluster_version = "4.0" + tier = "Standard" + component_version { + hadoop = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account_gen2 { + storage_resource_id = azurerm_storage_account.gen2test.id + filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id + managed_identity_resource_id = azurerm_user_assigned_identity.test.id + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.gen2template(data), data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) gen2AndBlobStorage(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = "${azurerm_storage_account.test.name}" + container_access_type = "private" +} + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + depends_on = [azurerm_role_assignment.test] + + name = "acctesthdi-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + cluster_version = "4.0" + tier = "Standard" + component_version { + hadoop = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account_gen2 { + storage_resource_id = azurerm_storage_account.gen2test.id + filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id + managed_identity_resource_id = azurerm_user_assigned_identity.test.id + is_default = true + } + storage_account { + storage_container_id = "${azurerm_storage_container.test.id}" + storage_account_key = "${azurerm_storage_account.test.primary_access_key}" + is_default = false + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.gen2template(data), data.RandomString, data.RandomInteger) +} + +func (HDInsightHadoopClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (HDInsightHadoopClusterResource) gen2template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "gen2test" { + name = "accgen2test%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "StorageV2" + account_tier = "Standard" + account_replication_type = "LRS" + is_hns_enabled = true +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { + name = "acctest" + storage_account_id = azurerm_storage_account.gen2test.id +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + name = "test-identity" +} + +data "azurerm_subscription" "primary" {} + + +resource "azurerm_role_assignment" "test" { + scope = "${data.azurerm_subscription.primary.id}" + role_definition_name = "Storage Blob Data Owner" + principal_id = "${azurerm_user_assigned_identity.test.principal_id}" +} + +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightHadoopClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + tls_min_version = "1.2" + component_version { + hadoop = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) allMetastores(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "oozie" { + name = "oozie" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "ambari" { + name = "ambari" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + hadoop = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + oozie { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.oozie.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + ambari { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.ambari.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) hiveMetastore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + hadoop = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) monitor(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%s-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hadoop = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + monitor { + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id + primary_key = azurerm_log_analytics_workspace.test.primary_shared_key + } +} +`, r.template(data), data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHadoopClusterResource) updateGateway(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_hdinsight_hadoop_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + hadoop = "3.1" + } + gateway { + username = "acctestusrgw" + password = "TerrAformne3!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go index fbbf223bdd92..4223b1b85207 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go @@ -5,7 +5,9 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -39,11 +41,11 @@ var hdInsightHBaseClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ FixedTargetInstanceCount: utils.Int32(int32(3)), } -func resourceArmHDInsightHBaseCluster() *schema.Resource { +func resourceHDInsightHBaseCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmHDInsightHBaseClusterCreate, - Read: resourceArmHDInsightHBaseClusterRead, - Update: hdinsightClusterUpdate("HBase", resourceArmHDInsightHBaseClusterRead), + Create: resourceHDInsightHBaseClusterCreate, + Read: resourceHDInsightHBaseClusterRead, + Update: hdinsightClusterUpdate("HBase", resourceHDInsightHBaseClusterRead), Delete: hdinsightClusterDelete("HBase"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -98,11 +100,11 @@ func resourceArmHDInsightHBaseCluster() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightHBaseClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightHBaseClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightHBaseClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightHBaseClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightHBaseClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightHBaseClusterZookeeperNodeDefinition, true), }, }, }, @@ -124,14 +126,16 @@ func resourceArmHDInsightHBaseCluster() *schema.Resource { } } -func resourceArmHDInsightHBaseClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightHBaseClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -219,7 +223,7 @@ func resourceArmHDInsightHBaseClusterCreate(d *schema.ResourceData, meta interfa return fmt.Errorf("failure reading ID for HDInsight HBase Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) // We can only enable monitoring after creation if v, ok := d.GetOk("monitor"); ok { @@ -229,23 +233,23 @@ func resourceArmHDInsightHBaseClusterCreate(d *schema.ResourceData, meta interfa } } - return resourceArmHDInsightHBaseClusterRead(d, meta) + return resourceHDInsightHBaseClusterRead(d, meta) } -func resourceArmHDInsightHBaseClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightHBaseClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource_test.go new file mode 100644 index 000000000000..7c6495418c3d --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_hbase_cluster_resource_test.go @@ -0,0 +1,1202 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightHBaseClusterResource struct { +} + +func TestAccHDInsightHBaseCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_gen2basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gen2basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightHBaseCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size"), + }) +} + +func TestAccHDInsightHBaseCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_allMetastores(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightHBaseCluster_hiveMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccHDInsightHBaseCluster_updateMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightHBaseCluster_monitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightHBaseCluster_updateMonitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") + r := HDInsightHBaseClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + // No monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Add monitor + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Change Log Analytics Workspace for the monitor + { + PreConfig: func() { + data.RandomString += "new" + }, + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Remove monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightHBaseClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight HBase Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightHBaseClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) gen2basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account_gen2 { + storage_resource_id = azurerm_storage_account.gen2test.id + filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id + managed_identity_resource_id = azurerm_user_assigned_identity.test.id + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.gen2template(data), data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hbase_cluster" "import" { + name = azurerm_hdinsight_hbase_cluster.test.name + resource_group_name = azurerm_hdinsight_hbase_cluster.test.resource_group_name + location = azurerm_hdinsight_hbase_cluster.test.location + cluster_version = azurerm_hdinsight_hbase_cluster.test.cluster_version + tier = azurerm_hdinsight_hbase_cluster.test.tier + dynamic "component_version" { + for_each = azurerm_hdinsight_hbase_cluster.test.component_version + content { + hbase = component_version.value.hbase + } + } + dynamic "gateway" { + for_each = azurerm_hdinsight_hbase_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_hbase_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_hbase_cluster.test.roles + content { + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightHBaseClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightHBaseClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (HDInsightHBaseClusterResource) gen2template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "gen2test" { + depends_on = [azurerm_role_assignment.test] + + name = "accgen2test%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "StorageV2" + account_tier = "Standard" + account_replication_type = "LRS" + is_hns_enabled = true +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { + name = "acctest" + storage_account_id = azurerm_storage_account.gen2test.id +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + name = "test-identity" +} + +data "azurerm_subscription" "primary" {} + +resource "azurerm_role_assignment" "test" { + scope = "${data.azurerm_subscription.primary.id}" + role_definition_name = "Storage Blob Data Owner" + principal_id = "${azurerm_user_assigned_identity.test.principal_id}" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightHBaseClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + tls_min_version = "1.2" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) allMetastores(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "oozie" { + name = "oozie" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "ambari" { + name = "ambari" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + hbase = "2.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + oozie { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.oozie.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + ambari { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.ambari.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) hiveMetastore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + hbase = "2.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightHBaseClusterResource) monitor(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%s-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_hdinsight_hbase_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + hbase = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + monitor { + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id + primary_key = azurerm_log_analytics_workspace.test.primary_shared_key + } +} +`, r.template(data), data.RandomString, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go index 9c326cd75128..d8d2098963f3 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go @@ -5,7 +5,9 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -39,11 +41,11 @@ var hdInsightInteractiveQueryClusterZookeeperNodeDefinition = HDInsightNodeDefin FixedTargetInstanceCount: utils.Int32(int32(3)), } -func resourceArmHDInsightInteractiveQueryCluster() *schema.Resource { +func resourceHDInsightInteractiveQueryCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmHDInsightInteractiveQueryClusterCreate, - Read: resourceArmHDInsightInteractiveQueryClusterRead, - Update: hdinsightClusterUpdate("Interactive Query", resourceArmHDInsightInteractiveQueryClusterRead), + Create: resourceHDInsightInteractiveQueryClusterCreate, + Read: resourceHDInsightInteractiveQueryClusterRead, + Update: hdinsightClusterUpdate("Interactive Query", resourceHDInsightInteractiveQueryClusterRead), Delete: hdinsightClusterDelete("Interactive Query"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -98,11 +100,11 @@ func resourceArmHDInsightInteractiveQueryCluster() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightInteractiveQueryClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightInteractiveQueryClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightInteractiveQueryClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightInteractiveQueryClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightInteractiveQueryClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightInteractiveQueryClusterZookeeperNodeDefinition, true), }, }, }, @@ -124,14 +126,16 @@ func resourceArmHDInsightInteractiveQueryCluster() *schema.Resource { } } -func resourceArmHDInsightInteractiveQueryClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightInteractiveQueryClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -219,7 +223,7 @@ func resourceArmHDInsightInteractiveQueryClusterCreate(d *schema.ResourceData, m return fmt.Errorf("failure reading ID for HDInsight Interactive Query Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) // We can only enable monitoring after creation if v, ok := d.GetOk("monitor"); ok { @@ -229,23 +233,23 @@ func resourceArmHDInsightInteractiveQueryClusterCreate(d *schema.ResourceData, m } } - return resourceArmHDInsightInteractiveQueryClusterRead(d, meta) + return resourceHDInsightInteractiveQueryClusterRead(d, meta) } -func resourceArmHDInsightInteractiveQueryClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightInteractiveQueryClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource_test.go new file mode 100644 index 000000000000..c602dc9a431a --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource_test.go @@ -0,0 +1,1202 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightInteractiveQueryClusterResource struct { +} + +func TestAccHDInsightInteractiveQueryCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_gen2basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gen2basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_allMetastores(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_hiveMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccHDInsightInteractiveQueryCluster_updateMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_monitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightInteractiveQueryCluster_updateMonitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") + r := HDInsightInteractiveQueryClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + // No monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Add monitor + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Change Log Analytics Workspace for the monitor + { + PreConfig: func() { + data.RandomString += "new" + }, + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Remove monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightInteractiveQueryClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight Interactive Query Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightInteractiveQueryClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) gen2basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + depends_on = [azurerm_role_assignment.test] + + name = "acctesthdi-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account_gen2 { + storage_resource_id = azurerm_storage_account.gen2test.id + filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id + managed_identity_resource_id = azurerm_user_assigned_identity.test.id + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.gen2template(data), data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_interactive_query_cluster" "import" { + name = azurerm_hdinsight_interactive_query_cluster.test.name + resource_group_name = azurerm_hdinsight_interactive_query_cluster.test.resource_group_name + location = azurerm_hdinsight_interactive_query_cluster.test.location + cluster_version = azurerm_hdinsight_interactive_query_cluster.test.cluster_version + tier = azurerm_hdinsight_interactive_query_cluster.test.tier + dynamic "component_version" { + for_each = azurerm_hdinsight_interactive_query_cluster.test.component_version + content { + interactive_hive = component_version.value.interactive_hive + } + } + dynamic "gateway" { + for_each = azurerm_hdinsight_interactive_query_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_interactive_query_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_interactive_query_cluster.test.roles + content { + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightInteractiveQueryClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightInteractiveQueryClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (HDInsightInteractiveQueryClusterResource) gen2template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "gen2test" { + name = "accgen2test%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "StorageV2" + account_tier = "Standard" + account_replication_type = "LRS" + is_hns_enabled = true +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { + name = "acctest" + storage_account_id = azurerm_storage_account.gen2test.id +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + name = "test-identity" +} + +data "azurerm_subscription" "primary" {} + +resource "azurerm_role_assignment" "test" { + scope = "${data.azurerm_subscription.primary.id}" + role_definition_name = "Storage Blob Data Owner" + principal_id = "${azurerm_user_assigned_identity.test.principal_id}" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightInteractiveQueryClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + tls_min_version = "1.2" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) allMetastores(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "oozie" { + name = "oozie" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "ambari" { + name = "ambari" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + interactive_hive = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + oozie { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.oozie.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + ambari { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.ambari.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) hiveMetastore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + interactive_hive = "3.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightInteractiveQueryClusterResource) monitor(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%s-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_hdinsight_interactive_query_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + interactive_hive = "3.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D13_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D14_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + monitor { + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id + primary_key = azurerm_log_analytics_workspace.test.primary_shared_key + } +} +`, r.template(data), data.RandomString, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go index da074f7980c0..652ccd3e4b8a 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go @@ -1,11 +1,18 @@ package hdinsight import ( + "context" "fmt" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/graphrbac/1.6/graphrbac" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -40,11 +47,18 @@ var hdInsightKafkaClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ FixedTargetInstanceCount: utils.Int32(int32(3)), } -func resourceArmHDInsightKafkaCluster() *schema.Resource { +var hdInsightKafkaClusterKafkaManagementNodeDefinition = HDInsightNodeDefinition{ + CanSpecifyInstanceCount: false, + MinInstanceCount: 2, + CanSpecifyDisks: false, + FixedTargetInstanceCount: utils.Int32(int32(2)), +} + +func resourceHDInsightKafkaCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmHDInsightKafkaClusterCreate, - Read: resourceArmHDInsightKafkaClusterRead, - Update: hdinsightClusterUpdate("Kafka", resourceArmHDInsightKafkaClusterRead), + Create: resourceHDInsightKafkaClusterCreate, + Read: resourceHDInsightKafkaClusterRead, + Update: hdinsightClusterUpdate("Kafka", resourceHDInsightKafkaClusterRead), Delete: hdinsightClusterDelete("Kafka"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -99,13 +113,32 @@ func resourceArmHDInsightKafkaCluster() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightKafkaClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightKafkaClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightKafkaClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightKafkaClusterWorkerNodeDefinition, true), + + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightKafkaClusterZookeeperNodeDefinition, true), + + "kafka_management_node": SchemaHDInsightNodeDefinition("roles.0.kafka_management_node", hdInsightKafkaClusterKafkaManagementNodeDefinition, false), + }, + }, + }, - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightKafkaClusterZookeeperNodeDefinition), + "rest_proxy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "security_group_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, }, }, + RequiredWith: []string{"roles.0.kafka_management_node"}, }, "tags": tags.Schema(), @@ -115,6 +148,11 @@ func resourceArmHDInsightKafkaCluster() *schema.Resource { Computed: true, }, + "kafka_rest_proxy_endpoint": { + Type: schema.TypeString, + Computed: true, + }, + "ssh_endpoint": { Type: schema.TypeString, Computed: true, @@ -125,14 +163,17 @@ func resourceArmHDInsightKafkaCluster() *schema.Resource { } } -func resourceArmHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interface{}) error { + groupClient := meta.(*clients.Client).AzureAD.GroupsClient client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -159,9 +200,10 @@ func resourceArmHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interfa } kafkaRoles := hdInsightRoleDefinition{ - HeadNodeDef: hdInsightKafkaClusterHeadNodeDefinition, - WorkerNodeDef: hdInsightKafkaClusterWorkerNodeDefinition, - ZookeeperNodeDef: hdInsightKafkaClusterZookeeperNodeDefinition, + HeadNodeDef: hdInsightKafkaClusterHeadNodeDefinition, + WorkerNodeDef: hdInsightKafkaClusterWorkerNodeDefinition, + ZookeeperNodeDef: hdInsightKafkaClusterZookeeperNodeDefinition, + KafkaManagementNodeDef: &hdInsightKafkaClusterKafkaManagementNodeDefinition, } rolesRaw := d.Get("roles").([]interface{}) roles, err := expandHDInsightRoles(rolesRaw, kafkaRoles) @@ -180,6 +222,11 @@ func resourceArmHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interfa return tf.ImportAsExistsError("azurerm_hdinsight_kafka_cluster", *existing.ID) } + kafkaRestProperty, err := expandKafkaRestProxyProperty(ctx, groupClient, d.Get("rest_proxy").([]interface{})) + if err != nil { + return fmt.Errorf("expanding kafka rest proxy property") + } + params := hdinsight.ClusterCreateParametersExtended{ Location: utils.String(location), Properties: &hdinsight.ClusterCreateProperties{ @@ -198,10 +245,12 @@ func resourceArmHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interfa ComputeProfile: &hdinsight.ComputeProfile{ Roles: roles, }, + KafkaRestProperties: kafkaRestProperty, }, Tags: tags.Expand(t), Identity: identity, } + future, err := client.Create(ctx, resourceGroup, name, params) if err != nil { return fmt.Errorf("failure creating HDInsight Kafka Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) @@ -220,7 +269,7 @@ func resourceArmHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interfa return fmt.Errorf("failure reading ID for HDInsight Kafka Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) // We can only enable monitoring after creation if v, ok := d.GetOk("monitor"); ok { @@ -230,23 +279,23 @@ func resourceArmHDInsightKafkaClusterCreate(d *schema.ResourceData, meta interfa } } - return resourceArmHDInsightKafkaClusterRead(d, meta) + return resourceHDInsightKafkaClusterRead(d, meta) } -func resourceArmHDInsightKafkaClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightKafkaClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { @@ -295,9 +344,10 @@ func resourceArmHDInsightKafkaClusterRead(d *schema.ResourceData, meta interface } kafkaRoles := hdInsightRoleDefinition{ - HeadNodeDef: hdInsightKafkaClusterHeadNodeDefinition, - WorkerNodeDef: hdInsightKafkaClusterWorkerNodeDefinition, - ZookeeperNodeDef: hdInsightKafkaClusterZookeeperNodeDefinition, + HeadNodeDef: hdInsightKafkaClusterHeadNodeDefinition, + WorkerNodeDef: hdInsightKafkaClusterWorkerNodeDefinition, + ZookeeperNodeDef: hdInsightKafkaClusterZookeeperNodeDefinition, + KafkaManagementNodeDef: &hdInsightKafkaClusterKafkaManagementNodeDefinition, } flattenedRoles := flattenHDInsightRoles(d, props.ComputeProfile, kafkaRoles) if err := d.Set("roles", flattenedRoles); err != nil { @@ -308,6 +358,8 @@ func resourceArmHDInsightKafkaClusterRead(d *schema.ResourceData, meta interface d.Set("https_endpoint", httpEndpoint) sshEndpoint := FindHDInsightConnectivityEndpoint("SSH", props.ConnectivityEndpoints) d.Set("ssh_endpoint", sshEndpoint) + kafkaRestProxyEndpoint := FindHDInsightConnectivityEndpoint("KafkaRestProxyPublicEndpoint", props.ConnectivityEndpoints) + d.Set("kafka_rest_proxy_endpoint", kafkaRestProxyEndpoint) monitor, err := extensionsClient.GetMonitoringStatus(ctx, resourceGroup, name) if err != nil { @@ -315,6 +367,9 @@ func resourceArmHDInsightKafkaClusterRead(d *schema.ResourceData, meta interface } d.Set("monitor", flattenHDInsightMonitoring(monitor)) + if err := d.Set("rest_proxy", flattenKafkaRestProxyProperty(props.KafkaRestProperties)); err != nil { + return fmt.Errorf(`failed setting "rest_proxy" for HDInsight Kafka Cluster %q (Resource Group %q): %+v`, name, resourceGroup, err) + } } return tags.FlattenAndSet(d, resp.Tags) @@ -340,3 +395,45 @@ func flattenHDInsightKafkaComponentVersion(input map[string]*string) []interface }, } } + +func expandKafkaRestProxyProperty(ctx context.Context, client *graphrbac.GroupsClient, input []interface{}) (*hdinsight.KafkaRestProperties, error) { + if len(input) == 0 || input[0] == nil { + return nil, nil + } + + raw := input[0].(map[string]interface{}) + groupId := raw["security_group_id"].(string) + + // Current API requires users further specify the "security_group_name" in the client group info of the kafka rest property, + // which is unnecessary as user already specify the "security_group_id". + // https://github.com/Azure/azure-rest-api-specs/issues/10667 + res, err := client.Get(ctx, groupId) + if err != nil { + return nil, fmt.Errorf("retrieving AAD gruop %s: %v", groupId, err) + } + + return &hdinsight.KafkaRestProperties{ + ClientGroupInfo: &hdinsight.ClientGroupInfo{ + GroupID: &groupId, + GroupName: res.DisplayName, + }, + }, nil +} + +func flattenKafkaRestProxyProperty(input *hdinsight.KafkaRestProperties) []interface{} { + if input == nil || input.ClientGroupInfo == nil { + return []interface{}{} + } + + groupInfo := input.ClientGroupInfo + groupId := "" + if groupInfo.GroupID != nil { + groupId = *groupInfo.GroupID + } + + return []interface{}{ + map[string]interface{}{ + "security_group_id": groupId, + }, + } +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource_test.go new file mode 100644 index 000000000000..abb15a05b9b0 --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_kafka_cluster_resource_test.go @@ -0,0 +1,1305 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightKafkaClusterResource struct { +} + +func TestAccHDInsightKafkaCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_gen2storage(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gen2storage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_hdinsight_kafka_cluster"), + }, + }) +} + +func TestAccHDInsightKafkaCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size"), + }) +} + +func TestAccHDInsightKafkaCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_allMetastores(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightKafkaCluster_hiveMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccHDInsightKafkaCluster_updateMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightKafkaCluster_monitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_updateMonitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + // No monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Add monitor + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Change Log Analytics Workspace for the monitor + { + PreConfig: func() { + data.RandomString += "new" + }, + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Remove monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightKafkaCluster_restProxy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") + r := HDInsightKafkaClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.restProxy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("kafka_rest_proxy_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.kafka_management_node.0.password", + "roles.0.kafka_management_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightKafkaClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight Kafka Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightKafkaClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) gen2storage(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_kafka_cluster" "test" { + depends_on = [azurerm_role_assignment.test] + + name = "acctesthdi-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account_gen2 { + storage_resource_id = azurerm_storage_account.gen2test.id + filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id + managed_identity_resource_id = azurerm_user_assigned_identity.test.id + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.gen2template(data), data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_kafka_cluster" "import" { + name = azurerm_hdinsight_kafka_cluster.test.name + resource_group_name = azurerm_hdinsight_kafka_cluster.test.resource_group_name + location = azurerm_hdinsight_kafka_cluster.test.location + cluster_version = azurerm_hdinsight_kafka_cluster.test.cluster_version + tier = azurerm_hdinsight_kafka_cluster.test.tier + dynamic "component_version" { + for_each = azurerm_hdinsight_kafka_cluster.test.component_version + content { + kafka = component_version.value.kafka + } + } + dynamic "gateway" { + for_each = azurerm_hdinsight_kafka_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_kafka_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_kafka_cluster.test.roles + content { + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + number_of_disks_per_node = worker_node.value.number_of_disks_per_node + password = lookup(worker_node.value, "password", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightKafkaClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightKafkaClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (HDInsightKafkaClusterResource) gen2template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "gen2test" { + name = "accgen2test%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "StorageV2" + account_tier = "Standard" + account_replication_type = "LRS" + is_hns_enabled = true +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { + name = "acctest" + storage_account_id = azurerm_storage_account.gen2test.id +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + name = "test-identity" +} + +data "azurerm_subscription" "primary" {} + +resource "azurerm_role_assignment" "test" { + scope = "${data.azurerm_subscription.primary.id}" + role_definition_name = "Storage Blob Data Owner" + principal_id = "${azurerm_user_assigned_identity.test.principal_id}" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightKafkaClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + tls_min_version = "1.2" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) allMetastores(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "oozie" { + name = "oozie" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "ambari" { + name = "ambari" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + kafka = "2.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + oozie { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.oozie.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + ambari { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.ambari.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) hiveMetastore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + kafka = "2.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) monitor(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%s-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + monitor { + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id + primary_key = azurerm_log_analytics_workspace.test.primary_shared_key + } +} +`, r.template(data), data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightKafkaClusterResource) restProxy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azuread_group" "test" { + name = "acctesthdi-%d" +} + +resource "azurerm_hdinsight_kafka_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + kafka = "2.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + number_of_disks_per_node = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + kafka_management_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + rest_proxy { + security_group_id = azuread_group.test.id + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource.go index 9b659c8bcc71..6dccce276d7c 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource.go @@ -6,7 +6,9 @@ import ( "strconv" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -50,16 +52,16 @@ var hdInsightMLServicesClusterEdgeNodeDefinition = HDInsightNodeDefinition{ FixedTargetInstanceCount: utils.Int32(int32(1)), } -func resourceArmHDInsightMLServicesCluster() *schema.Resource { +func resourceHDInsightMLServicesCluster() *schema.Resource { return &schema.Resource{ DeprecationMessage: `HDInsight 3.6 will be retired on 2020-12-31 - MLServices is not supported in HDInsight 4.0 and so this resource will be removed in the next major version of the AzureRM Terraform Provider. More information on the HDInsight 3.6 deprecation can be found at: https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning#available-versions`, - Create: resourceArmHDInsightMLServicesClusterCreate, - Read: resourceArmHDInsightMLServicesClusterRead, - Update: hdinsightClusterUpdate("MLServices", resourceArmHDInsightMLServicesClusterRead), + Create: resourceHDInsightMLServicesClusterCreate, + Read: resourceHDInsightMLServicesClusterRead, + Update: hdinsightClusterUpdate("MLServices", resourceHDInsightMLServicesClusterRead), Delete: hdinsightClusterDelete("MLServices"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -101,13 +103,13 @@ https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning# MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightMLServicesClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightMLServicesClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightMLServicesClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightMLServicesClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightMLServicesClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightMLServicesClusterZookeeperNodeDefinition, true), - "edge_node": SchemaHDInsightNodeDefinition("roles.0.edge_node", hdInsightMLServicesClusterEdgeNodeDefinition), + "edge_node": SchemaHDInsightNodeDefinition("roles.0.edge_node", hdInsightMLServicesClusterEdgeNodeDefinition, true), }, }, }, @@ -142,13 +144,15 @@ func expandHDInsightsMLServicesConfigurations(gateway []interface{}, rStudio boo return config } -func resourceArmHDInsightMLServicesClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightMLServicesClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -227,24 +231,24 @@ func resourceArmHDInsightMLServicesClusterCreate(d *schema.ResourceData, meta in return fmt.Errorf("Error reading ID for HDInsight MLServices Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) - return resourceArmHDInsightMLServicesClusterRead(d, meta) + return resourceHDInsightMLServicesClusterRead(d, meta) } -func resourceArmHDInsightMLServicesClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightMLServicesClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource_test.go new file mode 100644 index 000000000000..d54c31a6026f --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_ml_services_cluster_resource_test.go @@ -0,0 +1,712 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightMLServicesClusterResource struct { +} + +func TestAccHDInsightMLServicesCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightMLServicesCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightMLServicesCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightMLServicesCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.ssh_keys", + "roles.0.edge_node.0.vm_size"), + }) +} + +func TestAccHDInsightMLServicesCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightMLServicesCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightMLServicesCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") + r := HDInsightMLServicesClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightMLServicesClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight ML Services Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightMLServicesClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_ml_services_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightMLServicesClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_ml_services_cluster" "import" { + name = azurerm_hdinsight_ml_services_cluster.test.name + resource_group_name = azurerm_hdinsight_ml_services_cluster.test.resource_group_name + location = azurerm_hdinsight_ml_services_cluster.test.location + cluster_version = azurerm_hdinsight_ml_services_cluster.test.cluster_version + tier = azurerm_hdinsight_ml_services_cluster.test.tier + dynamic "gateway" { + for_each = azurerm_hdinsight_ml_services_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_ml_services_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_ml_services_cluster.test.roles + content { + dynamic "edge_node" { + for_each = lookup(roles.value, "edge_node", []) + content { + password = lookup(edge_node.value, "password", null) + ssh_keys = lookup(edge_node.value, "ssh_keys", null) + subnet_id = lookup(edge_node.value, "subnet_id", null) + username = edge_node.value.username + virtual_network_id = lookup(edge_node.value, "virtual_network_id", null) + vm_size = edge_node.value.vm_size + } + } + + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + ssh_keys = lookup(head_node.value, "ssh_keys", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + ssh_keys = lookup(worker_node.value, "ssh_keys", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + ssh_keys = lookup(zookeeper_node.value, "ssh_keys", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightMLServicesClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_ml_services_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightMLServicesClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_ml_services_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightMLServicesClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_ml_services_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightMLServicesClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_ml_services_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightMLServicesClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightMLServicesClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_ml_services_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + tls_min_version = "1.2" + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource.go index d130d0075a52..f1d36bd278fc 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource.go @@ -6,7 +6,9 @@ import ( "strconv" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -50,16 +52,16 @@ var hdInsightRServerClusterEdgeNodeDefinition = HDInsightNodeDefinition{ FixedTargetInstanceCount: utils.Int32(int32(1)), } -func resourceArmHDInsightRServerCluster() *schema.Resource { +func resourceHDInsightRServerCluster() *schema.Resource { return &schema.Resource{ DeprecationMessage: `HDInsight 3.6 will be retired on 2020-12-31 - R Server is not supported in HDInsight 4.0 and so this resource will be removed in the next major version of the AzureRM Terraform Provider. More information on the HDInsight 3.6 deprecation can be found at: https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning#available-versions`, - Create: resourceArmHDInsightRServerClusterCreate, - Read: resourceArmHDInsightRServerClusterRead, - Update: hdinsightClusterUpdate("RServer", resourceArmHDInsightRServerClusterRead), + Create: resourceHDInsightRServerClusterCreate, + Read: resourceHDInsightRServerClusterRead, + Update: hdinsightClusterUpdate("RServer", resourceHDInsightRServerClusterRead), Delete: hdinsightClusterDelete("RServer"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -101,13 +103,13 @@ https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning# MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightRServerClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightRServerClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightRServerClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightRServerClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightRServerClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightRServerClusterZookeeperNodeDefinition, true), - "edge_node": SchemaHDInsightNodeDefinition("roles.0.edge_node", hdInsightRServerClusterEdgeNodeDefinition), + "edge_node": SchemaHDInsightNodeDefinition("roles.0.edge_node", hdInsightRServerClusterEdgeNodeDefinition, true), }, }, }, @@ -142,13 +144,15 @@ func expandHDInsightsRServerConfigurations(gateway []interface{}, rStudio bool) return config } -func resourceArmHDInsightRServerClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightRServerClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -227,24 +231,24 @@ func resourceArmHDInsightRServerClusterCreate(d *schema.ResourceData, meta inter return fmt.Errorf("Error reading ID for HDInsight RServer Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) - return resourceArmHDInsightRServerClusterRead(d, meta) + return resourceHDInsightRServerClusterRead(d, meta) } -func resourceArmHDInsightRServerClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightRServerClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource_test.go new file mode 100644 index 000000000000..26ce539d85cf --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_rserver_cluster_resource_test.go @@ -0,0 +1,712 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightRServerClusterResource struct { +} + +func TestAccHDInsightRServerCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightRServerCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightRServerCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightRServerCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.ssh_keys", + "roles.0.edge_node.0.vm_size"), + }) +} + +func TestAccHDInsightRServerCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightRServerCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightRServerCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") + r := HDInsightRServerClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("edge_ssh_endpoint").Exists(), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "roles.0.edge_node.0.password", + "roles.0.edge_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightRServerClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight R Server Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightRServerClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_rserver_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightRServerClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_rserver_cluster" "import" { + name = azurerm_hdinsight_rserver_cluster.test.name + resource_group_name = azurerm_hdinsight_rserver_cluster.test.resource_group_name + location = azurerm_hdinsight_rserver_cluster.test.location + cluster_version = azurerm_hdinsight_rserver_cluster.test.cluster_version + tier = azurerm_hdinsight_rserver_cluster.test.tier + dynamic "gateway" { + for_each = azurerm_hdinsight_rserver_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_rserver_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_rserver_cluster.test.roles + content { + dynamic "edge_node" { + for_each = lookup(roles.value, "edge_node", []) + content { + password = lookup(edge_node.value, "password", null) + ssh_keys = lookup(edge_node.value, "ssh_keys", null) + subnet_id = lookup(edge_node.value, "subnet_id", null) + username = edge_node.value.username + virtual_network_id = lookup(edge_node.value, "virtual_network_id", null) + vm_size = edge_node.value.vm_size + } + } + + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + ssh_keys = lookup(head_node.value, "ssh_keys", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + ssh_keys = lookup(worker_node.value, "ssh_keys", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + ssh_keys = lookup(zookeeper_node.value, "ssh_keys", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightRServerClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_rserver_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightRServerClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_rserver_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightRServerClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_rserver_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightRServerClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_rserver_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightRServerClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightRServerClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_rserver_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + tls_min_version = "1.2" + rstudio = true + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + + zookeeper_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + edge_node { + vm_size = "Standard_D3_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource.go index 85d729e0f927..f48e3ac6fcfc 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource.go @@ -5,7 +5,9 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -39,11 +41,11 @@ var hdInsightSparkClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ CanSpecifyDisks: false, } -func resourceArmHDInsightSparkCluster() *schema.Resource { +func resourceHDInsightSparkCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmHDInsightSparkClusterCreate, - Read: resourceArmHDInsightSparkClusterRead, - Update: hdinsightClusterUpdate("Spark", resourceArmHDInsightSparkClusterRead), + Create: resourceHDInsightSparkClusterCreate, + Read: resourceHDInsightSparkClusterRead, + Update: hdinsightClusterUpdate("Spark", resourceHDInsightSparkClusterRead), Delete: hdinsightClusterDelete("Spark"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -98,11 +100,11 @@ func resourceArmHDInsightSparkCluster() *schema.Resource { MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightSparkClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightSparkClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightSparkClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightSparkClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightSparkClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightSparkClusterZookeeperNodeDefinition, true), }, }, }, @@ -124,14 +126,16 @@ func resourceArmHDInsightSparkCluster() *schema.Resource { } } -func resourceArmHDInsightSparkClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightSparkClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -219,7 +223,7 @@ func resourceArmHDInsightSparkClusterCreate(d *schema.ResourceData, meta interfa return fmt.Errorf("failure reading ID for HDInsight Spark Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) // We can only enable monitoring after creation if v, ok := d.GetOk("monitor"); ok { @@ -229,23 +233,23 @@ func resourceArmHDInsightSparkClusterCreate(d *schema.ResourceData, meta interfa } } - return resourceArmHDInsightSparkClusterRead(d, meta) + return resourceHDInsightSparkClusterRead(d, meta) } -func resourceArmHDInsightSparkClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightSparkClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource_test.go new file mode 100644 index 000000000000..d2ff3132f459 --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_spark_cluster_resource_test.go @@ -0,0 +1,1202 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightSparkClusterResource struct { +} + +func TestAccHDInsightSparkCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_gen2basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gen2basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightSparkCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size"), + }) +} + +func TestAccHDInsightSparkCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_allMetastores(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightSparkCluster_hiveMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccHDInsightSparkCluster_updateMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightSparkCluster_monitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightSparkCluster_updateMonitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") + r := HDInsightSparkClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + // No monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Add monitor + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Change Log Analytics Workspace for the monitor + { + PreConfig: func() { + data.RandomString += "new" + }, + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Remove monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightSparkClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight Spark Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightSparkClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) gen2basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_spark_cluster" "test" { + depends_on = [azurerm_role_assignment.test] + + name = "acctesthdi-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account_gen2 { + storage_resource_id = azurerm_storage_account.gen2test.id + filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id + managed_identity_resource_id = azurerm_user_assigned_identity.test.id + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.gen2template(data), data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_spark_cluster" "import" { + name = azurerm_hdinsight_spark_cluster.test.name + resource_group_name = azurerm_hdinsight_spark_cluster.test.resource_group_name + location = azurerm_hdinsight_spark_cluster.test.location + cluster_version = azurerm_hdinsight_spark_cluster.test.cluster_version + tier = azurerm_hdinsight_spark_cluster.test.tier + dynamic "component_version" { + for_each = azurerm_hdinsight_spark_cluster.test.component_version + content { + spark = component_version.value.spark + } + } + dynamic "gateway" { + for_each = azurerm_hdinsight_spark_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_spark_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_spark_cluster.test.roles + content { + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightSparkClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightSparkClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (HDInsightSparkClusterResource) gen2template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "gen2test" { + name = "accgen2test%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_kind = "StorageV2" + account_tier = "Standard" + account_replication_type = "LRS" + is_hns_enabled = true +} + +resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { + name = "acctest" + storage_account_id = azurerm_storage_account.gen2test.id +} + +resource "azurerm_user_assigned_identity" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + + name = "test-identity" +} + +data "azurerm_subscription" "primary" {} + +resource "azurerm_role_assignment" "test" { + scope = "${data.azurerm_subscription.primary.id}" + role_definition_name = "Storage Blob Data Owner" + principal_id = "${azurerm_user_assigned_identity.test.principal_id}" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightSparkClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + tls_min_version = "1.2" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) allMetastores(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "oozie" { + name = "oozie" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "ambari" { + name = "ambari" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + spark = "2.4" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + oozie { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.oozie.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + ambari { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.ambari.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) hiveMetastore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + component_version { + spark = "2.4" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightSparkClusterResource) monitor(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%s-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_hdinsight_spark_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "4.0" + tier = "Standard" + + component_version { + spark = "2.4" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Medium" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + monitor { + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id + primary_key = azurerm_log_analytics_workspace.test.primary_shared_key + } +} +`, r.template(data), data.RandomString, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource.go b/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource.go index 3b42bd5e6efc..1127ee49f92d 100644 --- a/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource.go +++ b/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource.go @@ -5,7 +5,9 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/hdinsight/mgmt/2018-06-01-preview/hdinsight" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + + "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -39,16 +41,16 @@ var hdInsightStormClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ FixedTargetInstanceCount: utils.Int32(int32(3)), } -func resourceArmHDInsightStormCluster() *schema.Resource { +func resourceHDInsightStormCluster() *schema.Resource { return &schema.Resource{ DeprecationMessage: `HDInsight 3.6 will be retired on 2020-12-31 - Storm is not supported in HDInsight 4.0 and so this resource will be removed in the next major version of the AzureRM Terraform Provider. More information on the HDInsight 3.6 deprecation can be found at: https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning#available-versions`, - Create: resourceArmHDInsightStormClusterCreate, - Read: resourceArmHDInsightStormClusterRead, - Update: hdinsightClusterUpdate("Storm", resourceArmHDInsightStormClusterRead), + Create: resourceHDInsightStormClusterCreate, + Read: resourceHDInsightStormClusterRead, + Update: hdinsightClusterUpdate("Storm", resourceHDInsightStormClusterRead), Delete: hdinsightClusterDelete("Storm"), Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -101,11 +103,11 @@ https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning# MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightStormClusterHeadNodeDefinition), + "head_node": SchemaHDInsightNodeDefinition("roles.0.head_node", hdInsightStormClusterHeadNodeDefinition, true), - "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightStormClusterWorkerNodeDefinition), + "worker_node": SchemaHDInsightNodeDefinition("roles.0.worker_node", hdInsightStormClusterWorkerNodeDefinition, true), - "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightStormClusterZookeeperNodeDefinition), + "zookeeper_node": SchemaHDInsightNodeDefinition("roles.0.zookeeper_node", hdInsightStormClusterZookeeperNodeDefinition, true), }, }, }, @@ -127,14 +129,16 @@ https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-component-versioning# } } -func resourceArmHDInsightStormClusterCreate(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightStormClusterCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HDInsight.ClustersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewClusterID(subscriptionId, resourceGroup, name) location := azure.NormalizeLocation(d.Get("location").(string)) clusterVersion := d.Get("cluster_version").(string) t := d.Get("tags").(map[string]interface{}) @@ -221,7 +225,7 @@ func resourceArmHDInsightStormClusterCreate(d *schema.ResourceData, meta interfa return fmt.Errorf("failure reading ID for HDInsight Storm Cluster %q (Resource Group %q)", name, resourceGroup) } - d.SetId(*read.ID) + d.SetId(id.ID()) // We can only enable monitoring after creation if v, ok := d.GetOk("monitor"); ok { @@ -231,23 +235,23 @@ func resourceArmHDInsightStormClusterCreate(d *schema.ResourceData, meta interfa } } - return resourceArmHDInsightStormClusterRead(d, meta) + return resourceHDInsightStormClusterRead(d, meta) } -func resourceArmHDInsightStormClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceHDInsightStormClusterRead(d *schema.ResourceData, meta interface{}) error { clustersClient := meta.(*clients.Client).HDInsight.ClustersClient configurationsClient := meta.(*clients.Client).HDInsight.ConfigurationsClient extensionsClient := meta.(*clients.Client).HDInsight.ExtensionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } resourceGroup := id.ResourceGroup - name := id.Path["clusters"] + name := id.Name resp, err := clustersClient.Get(ctx, resourceGroup, name) if err != nil { diff --git a/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource_test.go b/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource_test.go new file mode 100644 index 000000000000..0cf500027821 --- /dev/null +++ b/azurerm/internal/services/hdinsight/hdinsight_storm_cluster_resource_test.go @@ -0,0 +1,1084 @@ +package hdinsight_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HDInsightStormClusterResource struct { +} + +func TestAccHDInsightStormCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightStormCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHDInsightStormCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep( + "roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightStormCluster_sshKeys(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sshKeys(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("storage_account", + "roles.0.head_node.0.ssh_keys", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.ssh_keys", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.ssh_keys", + "roles.0.zookeeper_node.0.vm_size"), + }) +} + +func TestAccHDInsightStormCluster_virtualNetwork(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.virtualNetwork(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightStormCluster_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightStormCluster_tls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.tls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightStormCluster_allMetastores(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightStormCluster_hiveMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + }) +} + +func TestAccHDInsightStormCluster_updateMetastore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hiveMetastore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + { + Config: r.allMetastores(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account", + "metastores.0.hive.0.password", + "metastores.0.oozie.0.password", + "metastores.0.ambari.0.password"), + }) +} + +func TestAccHDInsightStormCluster_monitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func TestAccHDInsightStormCluster_updateMonitor(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") + r := HDInsightStormClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + // No monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Add monitor + { + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Change Log Analytics Workspace for the monitor + { + PreConfig: func() { + data.RandomString += "new" + }, + Config: r.monitor(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + // Remove monitor + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("https_endpoint").Exists(), + check.That(data.ResourceName).Key("ssh_endpoint").Exists(), + ), + }, + data.ImportStep("roles.0.head_node.0.password", + "roles.0.head_node.0.vm_size", + "roles.0.worker_node.0.password", + "roles.0.worker_node.0.vm_size", + "roles.0.zookeeper_node.0.password", + "roles.0.zookeeper_node.0.vm_size", + "storage_account"), + }) +} + +func (t HDInsightStormClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + name := id.Name + + resp, err := clients.HDInsight.ClustersClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading HDInsight Storm Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r HDInsightStormClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightStormClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_storm_cluster" "import" { + name = azurerm_hdinsight_storm_cluster.test.name + resource_group_name = azurerm_hdinsight_storm_cluster.test.resource_group_name + location = azurerm_hdinsight_storm_cluster.test.location + cluster_version = azurerm_hdinsight_storm_cluster.test.cluster_version + tier = azurerm_hdinsight_storm_cluster.test.tier + dynamic "component_version" { + for_each = azurerm_hdinsight_storm_cluster.test.component_version + content { + storm = component_version.value.storm + } + } + dynamic "gateway" { + for_each = azurerm_hdinsight_storm_cluster.test.gateway + content { + enabled = gateway.value.enabled + password = gateway.value.password + username = gateway.value.username + } + } + dynamic "storage_account" { + for_each = azurerm_hdinsight_storm_cluster.test.storage_account + content { + is_default = storage_account.value.is_default + storage_account_key = storage_account.value.storage_account_key + storage_container_id = storage_account.value.storage_container_id + } + } + dynamic "roles" { + for_each = azurerm_hdinsight_storm_cluster.test.roles + content { + dynamic "head_node" { + for_each = lookup(roles.value, "head_node", []) + content { + password = lookup(head_node.value, "password", null) + subnet_id = lookup(head_node.value, "subnet_id", null) + username = head_node.value.username + virtual_network_id = lookup(head_node.value, "virtual_network_id", null) + vm_size = head_node.value.vm_size + } + } + + dynamic "worker_node" { + for_each = lookup(roles.value, "worker_node", []) + content { + password = lookup(worker_node.value, "password", null) + subnet_id = lookup(worker_node.value, "subnet_id", null) + target_instance_count = worker_node.value.target_instance_count + username = worker_node.value.username + virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) + vm_size = worker_node.value.vm_size + } + } + + dynamic "zookeeper_node" { + for_each = lookup(roles.value, "zookeeper_node", []) + content { + password = lookup(zookeeper_node.value, "password", null) + subnet_id = lookup(zookeeper_node.value, "subnet_id", null) + username = zookeeper_node.value.username + virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) + vm_size = zookeeper_node.value.vm_size + } + } + } + } +} +`, r.basic(data)) +} + +func (r HDInsightStormClusterResource) sshKeys(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +variable "ssh_key" { + default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" +} + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + ssh_keys = [var.ssh_key] + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightStormClusterResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 5 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightStormClusterResource) virtualNetwork(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightStormClusterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + subnet_id = azurerm_subnet.test.id + virtual_network_id = azurerm_virtual_network.test.id + } + } + + tags = { + Hello = "World" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (HDInsightStormClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r HDInsightStormClusterResource) tls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + tls_min_version = "1.2" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r HDInsightStormClusterResource) allMetastores(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "oozie" { + name = "oozie" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_database" "ambari" { + name = "ambari" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + component_version { + storm = "1.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + oozie { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.oozie.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + ambari { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.ambari.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightStormClusterResource) hiveMetastore(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_sql_server" "test" { + name = "acctestsql-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + administrator_login = "sql_admin" + administrator_login_password = "TerrAform123!" + version = "12.0" +} +resource "azurerm_sql_database" "hive" { + name = "hive" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + collation = "SQL_Latin1_General_CP1_CI_AS" + create_mode = "Default" + requested_service_objective_name = "GP_Gen5_2" +} +resource "azurerm_sql_firewall_rule" "AzureServices" { + name = "allow-azure-services" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "0.0.0.0" +} +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + component_version { + storm = "1.1" + } + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + roles { + head_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + worker_node { + vm_size = "Standard_D4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 2 + } + zookeeper_node { + vm_size = "Standard_D3_v2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + metastores { + hive { + server = azurerm_sql_server.test.fully_qualified_domain_name + database_name = azurerm_sql_database.hive.name + username = azurerm_sql_server.test.administrator_login + password = azurerm_sql_server.test.administrator_login_password + } + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r HDInsightStormClusterResource) monitor(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%s-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_hdinsight_storm_cluster" "test" { + name = "acctesthdi-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_version = "3.6" + tier = "Standard" + + component_version { + storm = "1.1" + } + + gateway { + enabled = true + username = "acctestusrgw" + password = "TerrAform123!" + } + + storage_account { + storage_container_id = azurerm_storage_container.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key + is_default = true + } + + roles { + head_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + + worker_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + target_instance_count = 3 + } + + zookeeper_node { + vm_size = "Standard_A4_V2" + username = "acctestusrvm" + password = "AccTestvdSC4daf986!" + } + } + + monitor { + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id + primary_key = azurerm_log_analytics_workspace.test.primary_shared_key + } +} +`, r.template(data), data.RandomString, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/hdinsight/parse/cluster.go b/azurerm/internal/services/hdinsight/parse/cluster.go new file mode 100644 index 000000000000..f53dc1cae7d0 --- /dev/null +++ b/azurerm/internal/services/hdinsight/parse/cluster.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ClusterId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewClusterID(subscriptionId, resourceGroup, name string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ClusterId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) +} + +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.HDInsight/clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ClusterID parses a Cluster ID into an ClusterId struct +func ClusterID(input string) (*ClusterId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ClusterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("clusters"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/hdinsight/parse/cluster_test.go b/azurerm/internal/services/hdinsight/parse/cluster_test.go new file mode 100644 index 000000000000..61a92a649d26 --- /dev/null +++ b/azurerm/internal/services/hdinsight/parse/cluster_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ClusterId{} + +func TestClusterIDFormatter(t *testing.T) { + actual := NewClusterID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/clusters/cluster1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestClusterID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ClusterId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/clusters/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/clusters/cluster1", + Expected: &ClusterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "cluster1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.HDINSIGHT/CLUSTERS/CLUSTER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ClusterID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/hdinsight/registration.go b/azurerm/internal/services/hdinsight/registration.go index 91804ab3788c..788085b85f98 100644 --- a/azurerm/internal/services/hdinsight/registration.go +++ b/azurerm/internal/services/hdinsight/registration.go @@ -21,19 +21,20 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_hdinsight_cluster": dataSourceArmHDInsightSparkCluster(), + "azurerm_hdinsight_cluster": dataSourceHDInsightSparkCluster(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_hdinsight_hadoop_cluster": resourceArmHDInsightHadoopCluster(), - "azurerm_hdinsight_hbase_cluster": resourceArmHDInsightHBaseCluster(), - "azurerm_hdinsight_interactive_query_cluster": resourceArmHDInsightInteractiveQueryCluster(), - "azurerm_hdinsight_kafka_cluster": resourceArmHDInsightKafkaCluster(), - "azurerm_hdinsight_ml_services_cluster": resourceArmHDInsightMLServicesCluster(), - "azurerm_hdinsight_rserver_cluster": resourceArmHDInsightRServerCluster(), - "azurerm_hdinsight_spark_cluster": resourceArmHDInsightSparkCluster(), - "azurerm_hdinsight_storm_cluster": resourceArmHDInsightStormCluster()} + "azurerm_hdinsight_hadoop_cluster": resourceHDInsightHadoopCluster(), + "azurerm_hdinsight_hbase_cluster": resourceHDInsightHBaseCluster(), + "azurerm_hdinsight_interactive_query_cluster": resourceHDInsightInteractiveQueryCluster(), + "azurerm_hdinsight_kafka_cluster": resourceHDInsightKafkaCluster(), + "azurerm_hdinsight_ml_services_cluster": resourceHDInsightMLServicesCluster(), + "azurerm_hdinsight_rserver_cluster": resourceHDInsightRServerCluster(), + "azurerm_hdinsight_spark_cluster": resourceHDInsightSparkCluster(), + "azurerm_hdinsight_storm_cluster": resourceHDInsightStormCluster(), + } } diff --git a/azurerm/internal/services/hdinsight/resourceids.go b/azurerm/internal/services/hdinsight/resourceids.go new file mode 100644 index 000000000000..0bfdfaad5e62 --- /dev/null +++ b/azurerm/internal/services/hdinsight/resourceids.go @@ -0,0 +1,3 @@ +package hdinsight + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/clusters/cluster1 diff --git a/azurerm/internal/services/hdinsight/tests/common_hdinsight_test.go b/azurerm/internal/services/hdinsight/tests/common_hdinsight_test.go deleted file mode 100644 index 2fa5b13752f0..000000000000 --- a/azurerm/internal/services/hdinsight/tests/common_hdinsight_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package tests - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func testCheckAzureRMHDInsightClusterDestroy(terraformResourceName string) func(s *terraform.State) error { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HDInsight.ClustersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != terraformResourceName { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - } - - return nil - } -} - -func testCheckAzureRMHDInsightClusterExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HDInsight.ClustersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - clusterName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, clusterName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: HDInsight Cluster %q (Resource Group: %q) does not exist", clusterName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on hdinsightClustersClient: %+v", err) - } - - return nil - } -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_cluster_data_source_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_cluster_data_source_test.go deleted file mode 100644 index 76d2bb6cef8b..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_cluster_data_source_test.go +++ /dev/null @@ -1,266 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMHDInsightCluster_hadoop(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_hadoop(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "hadoop"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttr(data.ResourceName, "edge_ssh_endpoint", ""), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_hbase(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_hbase(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "hbase"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttr(data.ResourceName, "edge_ssh_endpoint", ""), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_interactiveQuery(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_interactiveQuery(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "interactivehive"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttr(data.ResourceName, "edge_ssh_endpoint", ""), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_kafka(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_kafka(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "kafka"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttr(data.ResourceName, "edge_ssh_endpoint", ""), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_mlServices(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_mlServices(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "mlservices"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_rserver(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_rserver(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "rserver"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_spark(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_spark(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "spark"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttr(data.ResourceName, "edge_ssh_endpoint", ""), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMHDInsightCluster_storm(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_hdinsight_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceHDInsightCluster_storm(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "kind", "storm"), - resource.TestCheckResourceAttr(data.ResourceName, "tier", "standard"), - resource.TestCheckResourceAttr(data.ResourceName, "edge_ssh_endpoint", ""), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func testAccDataSourceHDInsightCluster_hadoop(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_hadoop_cluster.test.name - resource_group_name = azurerm_hdinsight_hadoop_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_hbase(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_hbase_cluster.test.name - resource_group_name = azurerm_hdinsight_hbase_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_interactiveQuery(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_interactive_query_cluster.test.name - resource_group_name = azurerm_hdinsight_interactive_query_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_kafka(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_kafka_cluster.test.name - resource_group_name = azurerm_hdinsight_kafka_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_mlServices(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_ml_services_cluster.test.name - resource_group_name = azurerm_hdinsight_ml_services_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_rserver(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_rserver_cluster.test.name - resource_group_name = azurerm_hdinsight_rserver_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_spark(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_spark_cluster.test.name - resource_group_name = azurerm_hdinsight_spark_cluster.test.resource_group_name -} -`, template) -} - -func testAccDataSourceHDInsightCluster_storm(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_hdinsight_cluster" "test" { - name = azurerm_hdinsight_storm_cluster.test.name - resource_group_name = azurerm_hdinsight_storm_cluster.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_hadoop_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_hadoop_cluster_resource_test.go deleted file mode 100644 index 7e4a4b40cca6..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_hadoop_cluster_resource_test.go +++ /dev/null @@ -1,1570 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightHadoopCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightHadoopCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightHadoopCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_edgeNodeBasic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_edgeNodeBasic(data, 2, "Standard_D3_V2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_addEdgeNodeBasic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightHadoopCluster_edgeNodeBasic(data, 1, "Standard_D3_V2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightHadoopCluster_edgeNodeBasic(data, 3, "Standard_D4_V2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_gen2storage(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_gen2storage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_gen2AndBlobStorage(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_gen2AndBlobStorage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_allMetastores(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_hiveMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_updateMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - { - Config: testAccAzureRMHDInsightHadoopCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_monitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_updateGateway(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightHadoopCluster_updateGateway(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHadoopCluster_updateMonitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hadoop_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - // No monitor - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Add monitor - { - Config: testAccAzureRMHDInsightHadoopCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Change Log Analytics Workspace for the monitor - { - PreConfig: func() { - data.RandomString += "new" - }, - Config: testAccAzureRMHDInsightHadoopCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Remove monitor - { - Config: testAccAzureRMHDInsightHadoopCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightHadoopCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hadoop_cluster" "import" { - name = azurerm_hdinsight_hadoop_cluster.test.name - resource_group_name = azurerm_hdinsight_hadoop_cluster.test.resource_group_name - location = azurerm_hdinsight_hadoop_cluster.test.location - cluster_version = azurerm_hdinsight_hadoop_cluster.test.cluster_version - tier = azurerm_hdinsight_hadoop_cluster.test.tier - dynamic "component_version" { - for_each = azurerm_hdinsight_hadoop_cluster.test.component_version - content { - hadoop = component_version.value.hadoop - } - } - dynamic "gateway" { - for_each = azurerm_hdinsight_hadoop_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_hadoop_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_hadoop_cluster.test.roles - content { - dynamic "edge_node" { - for_each = lookup(roles.value, "edge_node", []) - content { - target_instance_count = edge_node.value.target_instance_count - vm_size = edge_node.value.vm_size - - dynamic "install_script_action" { - for_each = lookup(edge_node.value, "install_script_action", []) - content { - name = install_script_action.value.name - uri = install_script_action.value.uri - } - } - } - } - - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightHadoopCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_D4_v2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D4_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D4_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D4_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_edgeNodeBasic(data acceptance.TestData, numEdgeNodes int, instanceType string) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - target_instance_count = %d - vm_size = "%s" - install_script_action { - name = "script1" - uri = "https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-hdinsight-linux-with-edge-node/scripts/EmptyNodeSetup.sh" - } - } - } -} -`, template, data.RandomInteger, numEdgeNodes, instanceType) -} - -func testAccAzureRMHDInsightHadoopCluster_gen2storage(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_gen2template(data) - return fmt.Sprintf(` -%s -resource "azurerm_hdinsight_hadoop_cluster" "test" { - depends_on = [azurerm_role_assignment.test] - - name = "acctesthdi-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - cluster_version = "4.0" - tier = "Standard" - component_version { - hadoop = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account_gen2 { - storage_resource_id = azurerm_storage_account.gen2test.id - filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id - managed_identity_resource_id = azurerm_user_assigned_identity.test.id - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_gen2AndBlobStorage(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_gen2template(data) - - return fmt.Sprintf(` -%s -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = "${azurerm_storage_account.test.name}" - container_access_type = "private" -} - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - depends_on = [azurerm_role_assignment.test] - - name = "acctesthdi-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - cluster_version = "4.0" - tier = "Standard" - component_version { - hadoop = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account_gen2 { - storage_resource_id = azurerm_storage_account.gen2test.id - filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id - managed_identity_resource_id = azurerm_user_assigned_identity.test.id - is_default = true - } - storage_account { - storage_container_id = "${azurerm_storage_container.test.id}" - storage_account_key = "${azurerm_storage_account.test.primary_access_key}" - is_default = false - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightHadoopCluster_gen2template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "gen2test" { - name = "accgen2test%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_kind = "StorageV2" - account_tier = "Standard" - account_replication_type = "LRS" - is_hns_enabled = true -} - -resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { - name = "acctest" - storage_account_id = azurerm_storage_account.gen2test.id -} - -resource "azurerm_user_assigned_identity" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - - name = "test-identity" -} - -data "azurerm_subscription" "primary" {} - - -resource "azurerm_role_assignment" "test" { - scope = "${data.azurerm_subscription.primary.id}" - role_definition_name = "Storage Blob Data Owner" - principal_id = "${azurerm_user_assigned_identity.test.principal_id}" -} - -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightHadoopCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - tls_min_version = "1.2" - component_version { - hadoop = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_allMetastores(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "oozie" { - name = "oozie" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "ambari" { - name = "ambari" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - hadoop = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - oozie { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.oozie.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - ambari { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.ambari.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_hiveMetastore(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - hadoop = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_monitor(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%s-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hadoop = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - monitor { - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id - primary_key = azurerm_log_analytics_workspace.test.primary_shared_key - } -} -`, template, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHadoopCluster_updateGateway(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHadoopCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_hdinsight_hadoop_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - hadoop = "3.1" - } - gateway { - username = "acctestusrgw" - password = "TerrAformne3!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_hbase_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_hbase_cluster_resource_test.go deleted file mode 100644 index 98b67907ef30..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_hbase_cluster_resource_test.go +++ /dev/null @@ -1,1239 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightHBaseCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_gen2basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_gen2basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightHBaseCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightHBaseCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_allMetastores(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_hiveMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_updateMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - { - Config: testAccAzureRMHDInsightHBaseCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_monitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightHBaseCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightHBaseCluster_updateMonitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_hbase_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - // No monitor - { - Config: testAccAzureRMHDInsightHBaseCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Add monitor - { - Config: testAccAzureRMHDInsightHBaseCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Change Log Analytics Workspace for the monitor - { - PreConfig: func() { - data.RandomString += "new" - }, - Config: testAccAzureRMHDInsightHBaseCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Remove monitor - { - Config: testAccAzureRMHDInsightHBaseCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightHBaseCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_gen2basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_gen2template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account_gen2 { - storage_resource_id = azurerm_storage_account.gen2test.id - filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id - managed_identity_resource_id = azurerm_user_assigned_identity.test.id - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hbase_cluster" "import" { - name = azurerm_hdinsight_hbase_cluster.test.name - resource_group_name = azurerm_hdinsight_hbase_cluster.test.resource_group_name - location = azurerm_hdinsight_hbase_cluster.test.location - cluster_version = azurerm_hdinsight_hbase_cluster.test.cluster_version - tier = azurerm_hdinsight_hbase_cluster.test.tier - dynamic "component_version" { - for_each = azurerm_hdinsight_hbase_cluster.test.component_version - content { - hbase = component_version.value.hbase - } - } - dynamic "gateway" { - for_each = azurerm_hdinsight_hbase_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_hbase_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_hbase_cluster.test.roles - content { - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightHBaseCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightHBaseCluster_gen2template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "gen2test" { - depends_on = [azurerm_role_assignment.test] - - name = "accgen2test%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_kind = "StorageV2" - account_tier = "Standard" - account_replication_type = "LRS" - is_hns_enabled = true -} - -resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { - name = "acctest" - storage_account_id = azurerm_storage_account.gen2test.id -} - -resource "azurerm_user_assigned_identity" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - - name = "test-identity" -} - -data "azurerm_subscription" "primary" {} - -resource "azurerm_role_assignment" "test" { - scope = "${data.azurerm_subscription.primary.id}" - role_definition_name = "Storage Blob Data Owner" - principal_id = "${azurerm_user_assigned_identity.test.principal_id}" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightHBaseCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - tls_min_version = "1.2" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_allMetastores(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "oozie" { - name = "oozie" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "ambari" { - name = "ambari" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - hbase = "2.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - oozie { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.oozie.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - ambari { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.ambari.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_hiveMetastore(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - hbase = "2.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightHBaseCluster_monitor(data acceptance.TestData) string { - template := testAccAzureRMHDInsightHBaseCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%s-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_hdinsight_hbase_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - hbase = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - monitor { - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id - primary_key = azurerm_log_analytics_workspace.test.primary_shared_key - } -} -`, template, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_interactive_query_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_interactive_query_cluster_resource_test.go deleted file mode 100644 index d7eab06cccc4..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_interactive_query_cluster_resource_test.go +++ /dev/null @@ -1,1239 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightInteractiveQueryCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_gen2basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_gen2basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightInteractiveQueryCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_allMetastores(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_hiveMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_updateMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_monitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightInteractiveQueryCluster_updateMonitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_interactive_query_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - // No monitor - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Add monitor - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Change Log Analytics Workspace for the monitor - { - PreConfig: func() { - data.RandomString += "new" - }, - Config: testAccAzureRMHDInsightInteractiveQueryCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Remove monitor - { - Config: testAccAzureRMHDInsightInteractiveQueryCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_gen2basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_gen2template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - depends_on = [azurerm_role_assignment.test] - - name = "acctesthdi-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account_gen2 { - storage_resource_id = azurerm_storage_account.gen2test.id - filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id - managed_identity_resource_id = azurerm_user_assigned_identity.test.id - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_interactive_query_cluster" "import" { - name = azurerm_hdinsight_interactive_query_cluster.test.name - resource_group_name = azurerm_hdinsight_interactive_query_cluster.test.resource_group_name - location = azurerm_hdinsight_interactive_query_cluster.test.location - cluster_version = azurerm_hdinsight_interactive_query_cluster.test.cluster_version - tier = azurerm_hdinsight_interactive_query_cluster.test.tier - dynamic "component_version" { - for_each = azurerm_hdinsight_interactive_query_cluster.test.component_version - content { - interactive_hive = component_version.value.interactive_hive - } - } - dynamic "gateway" { - for_each = azurerm_hdinsight_interactive_query_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_interactive_query_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_interactive_query_cluster.test.roles - content { - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_gen2template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "gen2test" { - name = "accgen2test%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_kind = "StorageV2" - account_tier = "Standard" - account_replication_type = "LRS" - is_hns_enabled = true -} - -resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { - name = "acctest" - storage_account_id = azurerm_storage_account.gen2test.id -} - -resource "azurerm_user_assigned_identity" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - - name = "test-identity" -} - -data "azurerm_subscription" "primary" {} - -resource "azurerm_role_assignment" "test" { - scope = "${data.azurerm_subscription.primary.id}" - role_definition_name = "Storage Blob Data Owner" - principal_id = "${azurerm_user_assigned_identity.test.principal_id}" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - tls_min_version = "1.2" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_allMetastores(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "oozie" { - name = "oozie" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "ambari" { - name = "ambari" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - interactive_hive = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - oozie { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.oozie.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - ambari { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.ambari.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_hiveMetastore(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - interactive_hive = "3.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightInteractiveQueryCluster_monitor(data acceptance.TestData) string { - template := testAccAzureRMHDInsightInteractiveQueryCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%s-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_hdinsight_interactive_query_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - interactive_hive = "3.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D13_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D14_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - monitor { - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id - primary_key = azurerm_log_analytics_workspace.test.primary_shared_key - } -} -`, template, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_kafka_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_kafka_cluster_resource_test.go deleted file mode 100644 index 25ad89b0bdde..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_kafka_cluster_resource_test.go +++ /dev/null @@ -1,1253 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightKafkaCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_gen2storage(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_gen2storage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - { - Config: testAccAzureRMHDInsightKafkaCluster_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_hdinsight_kafka_cluster"), - }, - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightKafkaCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_allMetastores(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_hiveMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_updateMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - { - Config: testAccAzureRMHDInsightKafkaCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_monitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightKafkaCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightKafkaCluster_updateMonitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_kafka_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - // No monitor - { - Config: testAccAzureRMHDInsightKafkaCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Add monitor - { - Config: testAccAzureRMHDInsightKafkaCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Change Log Analytics Workspace for the monitor - { - PreConfig: func() { - data.RandomString += "new" - }, - Config: testAccAzureRMHDInsightKafkaCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Remove monitor - { - Config: testAccAzureRMHDInsightKafkaCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightKafkaCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_gen2storage(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_gen2template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_kafka_cluster" "test" { - depends_on = [azurerm_role_assignment.test] - - name = "acctesthdi-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account_gen2 { - storage_resource_id = azurerm_storage_account.gen2test.id - filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id - managed_identity_resource_id = azurerm_user_assigned_identity.test.id - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_kafka_cluster" "import" { - name = azurerm_hdinsight_kafka_cluster.test.name - resource_group_name = azurerm_hdinsight_kafka_cluster.test.resource_group_name - location = azurerm_hdinsight_kafka_cluster.test.location - cluster_version = azurerm_hdinsight_kafka_cluster.test.cluster_version - tier = azurerm_hdinsight_kafka_cluster.test.tier - dynamic "component_version" { - for_each = azurerm_hdinsight_kafka_cluster.test.component_version - content { - kafka = component_version.value.kafka - } - } - dynamic "gateway" { - for_each = azurerm_hdinsight_kafka_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_kafka_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_kafka_cluster.test.roles - content { - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - number_of_disks_per_node = worker_node.value.number_of_disks_per_node - password = lookup(worker_node.value, "password", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightKafkaCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - number_of_disks_per_node = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - number_of_disks_per_node = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightKafkaCluster_gen2template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "gen2test" { - name = "accgen2test%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_kind = "StorageV2" - account_tier = "Standard" - account_replication_type = "LRS" - is_hns_enabled = true -} - -resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { - name = "acctest" - storage_account_id = azurerm_storage_account.gen2test.id -} - -resource "azurerm_user_assigned_identity" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - - name = "test-identity" -} - -data "azurerm_subscription" "primary" {} - -resource "azurerm_role_assignment" "test" { - scope = "${data.azurerm_subscription.primary.id}" - role_definition_name = "Storage Blob Data Owner" - principal_id = "${azurerm_user_assigned_identity.test.principal_id}" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightKafkaCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - tls_min_version = "1.2" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_allMetastores(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "oozie" { - name = "oozie" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "ambari" { - name = "ambari" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - kafka = "2.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - oozie { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.oozie.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - ambari { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.ambari.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_hiveMetastore(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - kafka = "2.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightKafkaCluster_monitor(data acceptance.TestData) string { - template := testAccAzureRMHDInsightKafkaCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%s-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_hdinsight_kafka_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - kafka = "2.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - number_of_disks_per_node = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - monitor { - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id - primary_key = azurerm_log_analytics_workspace.test.primary_shared_key - } -} -`, template, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_ml_services_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_ml_services_cluster_resource_test.go deleted file mode 100644 index 0e42059044de..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_ml_services_cluster_resource_test.go +++ /dev/null @@ -1,721 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightMLServicesCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightMLServicesCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightMLServicesCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightMLServicesCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightMLServicesCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightMLServicesCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.ssh_keys", - "roles.0.edge_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightMLServicesCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightMLServicesCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightMLServicesCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_ml_services_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightMLServicesCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightMLServicesCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_ml_services_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightMLServicesCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_ml_services_cluster" "import" { - name = azurerm_hdinsight_ml_services_cluster.test.name - resource_group_name = azurerm_hdinsight_ml_services_cluster.test.resource_group_name - location = azurerm_hdinsight_ml_services_cluster.test.location - cluster_version = azurerm_hdinsight_ml_services_cluster.test.cluster_version - tier = azurerm_hdinsight_ml_services_cluster.test.tier - dynamic "gateway" { - for_each = azurerm_hdinsight_ml_services_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_ml_services_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_ml_services_cluster.test.roles - content { - dynamic "edge_node" { - for_each = lookup(roles.value, "edge_node", []) - content { - password = lookup(edge_node.value, "password", null) - ssh_keys = lookup(edge_node.value, "ssh_keys", null) - subnet_id = lookup(edge_node.value, "subnet_id", null) - username = edge_node.value.username - virtual_network_id = lookup(edge_node.value, "virtual_network_id", null) - vm_size = edge_node.value.vm_size - } - } - - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - ssh_keys = lookup(head_node.value, "ssh_keys", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - ssh_keys = lookup(worker_node.value, "ssh_keys", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - ssh_keys = lookup(zookeeper_node.value, "ssh_keys", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightMLServicesCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_ml_services_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightMLServicesCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_ml_services_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightMLServicesCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_ml_services_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightMLServicesCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_ml_services_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightMLServicesCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightMLServicesCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightMLServicesCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_ml_services_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - tls_min_version = "1.2" - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_rserver_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_rserver_cluster_resource_test.go deleted file mode 100644 index 00fbc4610350..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_rserver_cluster_resource_test.go +++ /dev/null @@ -1,721 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightRServerCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightRServerCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightRServerCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightRServerCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightRServerCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightRServerCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.ssh_keys", - "roles.0.edge_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightRServerCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightRServerCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightRServerCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_rserver_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightRServerCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "edge_ssh_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "roles.0.edge_node.0.password", - "roles.0.edge_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightRServerCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_rserver_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightRServerCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_rserver_cluster" "import" { - name = azurerm_hdinsight_rserver_cluster.test.name - resource_group_name = azurerm_hdinsight_rserver_cluster.test.resource_group_name - location = azurerm_hdinsight_rserver_cluster.test.location - cluster_version = azurerm_hdinsight_rserver_cluster.test.cluster_version - tier = azurerm_hdinsight_rserver_cluster.test.tier - dynamic "gateway" { - for_each = azurerm_hdinsight_rserver_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_rserver_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_rserver_cluster.test.roles - content { - dynamic "edge_node" { - for_each = lookup(roles.value, "edge_node", []) - content { - password = lookup(edge_node.value, "password", null) - ssh_keys = lookup(edge_node.value, "ssh_keys", null) - subnet_id = lookup(edge_node.value, "subnet_id", null) - username = edge_node.value.username - virtual_network_id = lookup(edge_node.value, "virtual_network_id", null) - vm_size = edge_node.value.vm_size - } - } - - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - ssh_keys = lookup(head_node.value, "ssh_keys", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - ssh_keys = lookup(worker_node.value, "ssh_keys", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - ssh_keys = lookup(zookeeper_node.value, "ssh_keys", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightRServerCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_rserver_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightRServerCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_rserver_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightRServerCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_rserver_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightRServerCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_rserver_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightRServerCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightRServerCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightRServerCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_rserver_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - tls_min_version = "1.2" - rstudio = true - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - - zookeeper_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - edge_node { - vm_size = "Standard_D3_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_spark_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_spark_cluster_resource_test.go deleted file mode 100644 index a341de2ff7f7..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_spark_cluster_resource_test.go +++ /dev/null @@ -1,1239 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightSparkCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_gen2basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_gen2basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightSparkCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightSparkCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_allMetastores(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_hiveMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_updateMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - { - Config: testAccAzureRMHDInsightSparkCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_monitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightSparkCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightSparkCluster_updateMonitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_spark_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - // No monitor - { - Config: testAccAzureRMHDInsightSparkCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Add monitor - { - Config: testAccAzureRMHDInsightSparkCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Change Log Analytics Workspace for the monitor - { - PreConfig: func() { - data.RandomString += "new" - }, - Config: testAccAzureRMHDInsightSparkCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Remove monitor - { - Config: testAccAzureRMHDInsightSparkCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightSparkCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_gen2basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_gen2template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_spark_cluster" "test" { - depends_on = [azurerm_role_assignment.test] - - name = "acctesthdi-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account_gen2 { - storage_resource_id = azurerm_storage_account.gen2test.id - filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id - managed_identity_resource_id = azurerm_user_assigned_identity.test.id - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_spark_cluster" "import" { - name = azurerm_hdinsight_spark_cluster.test.name - resource_group_name = azurerm_hdinsight_spark_cluster.test.resource_group_name - location = azurerm_hdinsight_spark_cluster.test.location - cluster_version = azurerm_hdinsight_spark_cluster.test.cluster_version - tier = azurerm_hdinsight_spark_cluster.test.tier - dynamic "component_version" { - for_each = azurerm_hdinsight_spark_cluster.test.component_version - content { - spark = component_version.value.spark - } - } - dynamic "gateway" { - for_each = azurerm_hdinsight_spark_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_spark_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_spark_cluster.test.roles - content { - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightSparkCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightSparkCluster_gen2template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "gen2test" { - name = "accgen2test%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_kind = "StorageV2" - account_tier = "Standard" - account_replication_type = "LRS" - is_hns_enabled = true -} - -resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" { - name = "acctest" - storage_account_id = azurerm_storage_account.gen2test.id -} - -resource "azurerm_user_assigned_identity" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - - name = "test-identity" -} - -data "azurerm_subscription" "primary" {} - -resource "azurerm_role_assignment" "test" { - scope = "${data.azurerm_subscription.primary.id}" - role_definition_name = "Storage Blob Data Owner" - principal_id = "${azurerm_user_assigned_identity.test.principal_id}" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightSparkCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - tls_min_version = "1.2" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_allMetastores(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "oozie" { - name = "oozie" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "ambari" { - name = "ambari" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - spark = "2.4" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - oozie { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.oozie.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - ambari { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.ambari.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_hiveMetastore(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - component_version { - spark = "2.4" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightSparkCluster_monitor(data acceptance.TestData) string { - template := testAccAzureRMHDInsightSparkCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%s-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_hdinsight_spark_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "4.0" - tier = "Standard" - - component_version { - spark = "2.4" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Medium" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - monitor { - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id - primary_key = azurerm_log_analytics_workspace.test.primary_shared_key - } -} -`, template, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/tests/hdinsight_storm_cluster_resource_test.go b/azurerm/internal/services/hdinsight/tests/hdinsight_storm_cluster_resource_test.go deleted file mode 100644 index 002537e5e732..000000000000 --- a/azurerm/internal/services/hdinsight/tests/hdinsight_storm_cluster_resource_test.go +++ /dev/null @@ -1,1116 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMHDInsightStormCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHDInsightStormCluster_requiresImport), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - { - Config: testAccAzureRMHDInsightStormCluster_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep( - "roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_sshKeys(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_sshKeys(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("storage_account", - "roles.0.head_node.0.ssh_keys", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.ssh_keys", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.ssh_keys", - "roles.0.zookeeper_node.0.vm_size"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_virtualNetwork(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_virtualNetwork(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_tls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_tls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_allMetastores(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_hiveMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_updateMetastore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_hiveMetastore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - { - Config: testAccAzureRMHDInsightStormCluster_allMetastores(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account", - "metastores.0.hive.0.password", - "metastores.0.oozie.0.password", - "metastores.0.ambari.0.password"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_monitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHDInsightStormCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func TestAccAzureRMHDInsightStormCluster_updateMonitor(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hdinsight_storm_cluster", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHDInsightClusterDestroy(data.ResourceType), - Steps: []resource.TestStep{ - // No monitor - { - Config: testAccAzureRMHDInsightStormCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Add monitor - { - Config: testAccAzureRMHDInsightStormCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Change Log Analytics Workspace for the monitor - { - PreConfig: func() { - data.RandomString += "new" - }, - Config: testAccAzureRMHDInsightStormCluster_monitor(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - // Remove monitor - { - Config: testAccAzureRMHDInsightStormCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHDInsightClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "https_endpoint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ssh_endpoint"), - ), - }, - data.ImportStep("roles.0.head_node.0.password", - "roles.0.head_node.0.vm_size", - "roles.0.worker_node.0.password", - "roles.0.worker_node.0.vm_size", - "roles.0.zookeeper_node.0.password", - "roles.0.zookeeper_node.0.vm_size", - "storage_account"), - }, - }) -} - -func testAccAzureRMHDInsightStormCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_storm_cluster" "import" { - name = azurerm_hdinsight_storm_cluster.test.name - resource_group_name = azurerm_hdinsight_storm_cluster.test.resource_group_name - location = azurerm_hdinsight_storm_cluster.test.location - cluster_version = azurerm_hdinsight_storm_cluster.test.cluster_version - tier = azurerm_hdinsight_storm_cluster.test.tier - dynamic "component_version" { - for_each = azurerm_hdinsight_storm_cluster.test.component_version - content { - storm = component_version.value.storm - } - } - dynamic "gateway" { - for_each = azurerm_hdinsight_storm_cluster.test.gateway - content { - enabled = gateway.value.enabled - password = gateway.value.password - username = gateway.value.username - } - } - dynamic "storage_account" { - for_each = azurerm_hdinsight_storm_cluster.test.storage_account - content { - is_default = storage_account.value.is_default - storage_account_key = storage_account.value.storage_account_key - storage_container_id = storage_account.value.storage_container_id - } - } - dynamic "roles" { - for_each = azurerm_hdinsight_storm_cluster.test.roles - content { - dynamic "head_node" { - for_each = lookup(roles.value, "head_node", []) - content { - password = lookup(head_node.value, "password", null) - subnet_id = lookup(head_node.value, "subnet_id", null) - username = head_node.value.username - virtual_network_id = lookup(head_node.value, "virtual_network_id", null) - vm_size = head_node.value.vm_size - } - } - - dynamic "worker_node" { - for_each = lookup(roles.value, "worker_node", []) - content { - password = lookup(worker_node.value, "password", null) - subnet_id = lookup(worker_node.value, "subnet_id", null) - target_instance_count = worker_node.value.target_instance_count - username = worker_node.value.username - virtual_network_id = lookup(worker_node.value, "virtual_network_id", null) - vm_size = worker_node.value.vm_size - } - } - - dynamic "zookeeper_node" { - for_each = lookup(roles.value, "zookeeper_node", []) - content { - password = lookup(zookeeper_node.value, "password", null) - subnet_id = lookup(zookeeper_node.value, "subnet_id", null) - username = zookeeper_node.value.username - virtual_network_id = lookup(zookeeper_node.value, "virtual_network_id", null) - vm_size = zookeeper_node.value.vm_size - } - } - } - } -} -`, template) -} - -func testAccAzureRMHDInsightStormCluster_sshKeys(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -variable "ssh_key" { - default = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCqaZoyiz1qbdOQ8xEf6uEu1cCwYowo5FHtsBhqLoDnnp7KUTEBN+L2NxRIfQ781rxV6Iq5jSav6b2Q8z5KiseOlvKA/RF2wqU0UPYqQviQhLmW6THTpmrv/YkUCuzxDpsH7DUDhZcwySLKVVe0Qm3+5N2Ta6UYH3lsDf9R9wTP2K/+vAnflKebuypNlmocIvakFWoZda18FOmsOoIVXQ8HWFNCuw9ZCunMSN62QGamCe3dL5cXlkgHYv7ekJE15IA9aOJcM7e90oeTqo+7HTcWfdu0qQqPWY5ujyMw/llas8tsXY85LFqRnr3gJ02bAscjc477+X+j/gkpFoN1QEmt terraform@demo.tld" -} - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - ssh_keys = [var.ssh_key] - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_updated(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 5 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_virtualNetwork(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_complete(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - subnet_id = azurerm_subnet.test.id - virtual_network_id = azurerm_virtual_network.test.id - } - } - - tags = { - Hello = "World" - } -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMHDInsightStormCluster_tls(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - tls_min_version = "1.2" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_allMetastores(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "oozie" { - name = "oozie" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_database" "ambari" { - name = "ambari" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - component_version { - storm = "1.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - oozie { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.oozie.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - ambari { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.ambari.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_hiveMetastore(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_sql_server" "test" { - name = "acctestsql-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - administrator_login = "sql_admin" - administrator_login_password = "TerrAform123!" - version = "12.0" -} -resource "azurerm_sql_database" "hive" { - name = "hive" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - collation = "SQL_Latin1_General_CP1_CI_AS" - create_mode = "Default" - requested_service_objective_name = "GP_Gen5_2" -} -resource "azurerm_sql_firewall_rule" "AzureServices" { - name = "allow-azure-services" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "0.0.0.0" -} -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - component_version { - storm = "1.1" - } - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - roles { - head_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - worker_node { - vm_size = "Standard_D4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 2 - } - zookeeper_node { - vm_size = "Standard_D3_v2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - metastores { - hive { - server = azurerm_sql_server.test.fully_qualified_domain_name - database_name = azurerm_sql_database.hive.name - username = azurerm_sql_server.test.administrator_login - password = azurerm_sql_server.test.administrator_login_password - } - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMHDInsightStormCluster_monitor(data acceptance.TestData) string { - template := testAccAzureRMHDInsightStormCluster_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%s-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_hdinsight_storm_cluster" "test" { - name = "acctesthdi-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_version = "3.6" - tier = "Standard" - - component_version { - storm = "1.1" - } - - gateway { - enabled = true - username = "acctestusrgw" - password = "TerrAform123!" - } - - storage_account { - storage_container_id = azurerm_storage_container.test.id - storage_account_key = azurerm_storage_account.test.primary_access_key - is_default = true - } - - roles { - head_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - - worker_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - target_instance_count = 3 - } - - zookeeper_node { - vm_size = "Standard_A4_V2" - username = "acctestusrvm" - password = "AccTestvdSC4daf986!" - } - } - - monitor { - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.workspace_id - primary_key = azurerm_log_analytics_workspace.test.primary_shared_key - } -} -`, template, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/hdinsight/validate/cluster_id.go b/azurerm/internal/services/hdinsight/validate/cluster_id.go new file mode 100644 index 000000000000..6d38e6128d33 --- /dev/null +++ b/azurerm/internal/services/hdinsight/validate/cluster_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hdinsight/parse" +) + +func ClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/hdinsight/validate/cluster_id_test.go b/azurerm/internal/services/hdinsight/validate/cluster_id_test.go new file mode 100644 index 000000000000..51eead56b3be --- /dev/null +++ b/azurerm/internal/services/hdinsight/validate/cluster_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestClusterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/clusters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.HDInsight/clusters/cluster1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.HDINSIGHT/CLUSTERS/CLUSTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ClusterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/healthcare/healthcare_service_data_source.go b/azurerm/internal/services/healthcare/healthcare_service_data_source.go index c8984a588f7c..ff8e47f0ecd4 100644 --- a/azurerm/internal/services/healthcare/healthcare_service_data_source.go +++ b/azurerm/internal/services/healthcare/healthcare_service_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmHealthcareService() *schema.Resource { +func dataSourceHealthcareService() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmHealthcareServiceRead, + Read: dataSourceHealthcareServiceRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -114,7 +114,7 @@ func dataSourceArmHealthcareService() *schema.Resource { } } -func dataSourceArmHealthcareServiceRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceHealthcareServiceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HealthCare.HealthcareServiceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/healthcare/healthcare_service_data_source_test.go b/azurerm/internal/services/healthcare/healthcare_service_data_source_test.go new file mode 100644 index 000000000000..f8b2d95fa0a6 --- /dev/null +++ b/azurerm/internal/services/healthcare/healthcare_service_data_source_test.go @@ -0,0 +1,43 @@ +package healthcare_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type HealthCareServiceDataSource struct { +} + +func TestAccHealthCareServiceDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_healthcare_service", "test") + r := HealthCareServiceDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("kind").Exists(), + check.That(data.ResourceName).Key("cosmosdb_throughput").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func (HealthCareServiceDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_healthcare_service" "test" { + name = azurerm_healthcare_service.test.name + resource_group_name = azurerm_healthcare_service.test.resource_group_name + location = azurerm_resource_group.test.location +} +`, HealthCareServiceResource{}.basic(data)) +} diff --git a/azurerm/internal/services/healthcare/healthcare_service_resource.go b/azurerm/internal/services/healthcare/healthcare_service_resource.go index c1732076b2f6..6945bb32e8ef 100644 --- a/azurerm/internal/services/healthcare/healthcare_service_resource.go +++ b/azurerm/internal/services/healthcare/healthcare_service_resource.go @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmHealthcareService() *schema.Resource { +func resourceHealthcareService() *schema.Resource { return &schema.Resource{ - Create: resourceArmHealthcareServiceCreateUpdate, - Read: resourceArmHealthcareServiceRead, - Update: resourceArmHealthcareServiceCreateUpdate, - Delete: resourceArmHealthcareServiceDelete, + Create: resourceHealthcareServiceCreateUpdate, + Read: resourceHealthcareServiceRead, + Update: resourceHealthcareServiceCreateUpdate, + Delete: resourceHealthcareServiceDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -33,7 +33,7 @@ func resourceArmHealthcareService() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.HealthcareServiceID(id) + _, err := parse.ServiceID(id) return err }), @@ -137,7 +137,8 @@ func resourceArmHealthcareService() *schema.Resource { "MERGE", "POST", "OPTIONS", - "PUT"}, false), + "PUT", + }, false), }, }, "max_age_in_seconds": { @@ -158,7 +159,7 @@ func resourceArmHealthcareService() *schema.Resource { } } -func resourceArmHealthcareServiceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceHealthcareServiceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HealthCare.HealthcareServiceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -220,15 +221,15 @@ func resourceArmHealthcareServiceCreateUpdate(d *schema.ResourceData, meta inter d.SetId(*read.ID) - return resourceArmHealthcareServiceRead(d, meta) + return resourceHealthcareServiceRead(d, meta) } -func resourceArmHealthcareServiceRead(d *schema.ResourceData, meta interface{}) error { +func resourceHealthcareServiceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HealthCare.HealthcareServiceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.HealthcareServiceID(d.Id()) + id, err := parse.ServiceID(d.Id()) if err != nil { return err } @@ -276,12 +277,12 @@ func resourceArmHealthcareServiceRead(d *schema.ResourceData, meta interface{}) return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmHealthcareServiceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceHealthcareServiceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).HealthCare.HealthcareServiceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.HealthcareServiceID(d.Id()) + id, err := parse.ServiceID(d.Id()) if err != nil { return fmt.Errorf("Error Parsing Azure Resource ID: %+v", err) } diff --git a/azurerm/internal/services/healthcare/healthcare_service_resource_test.go b/azurerm/internal/services/healthcare/healthcare_service_resource_test.go new file mode 100644 index 000000000000..d2b76cb6c360 --- /dev/null +++ b/azurerm/internal/services/healthcare/healthcare_service_resource_test.go @@ -0,0 +1,170 @@ +package healthcare_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/healthcare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HealthCareServiceResource struct { +} + +func TestAccHealthCareService_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_healthcare_service", "test") + r := HealthCareServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHealthCareService_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_healthcare_service", "test") + r := HealthCareServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccHealthCareService_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_healthcare_service", "test") + r := HealthCareServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (HealthCareServiceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServiceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.HealthCare.HealthcareServiceClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Healthcare service %q (resource group: %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (HealthCareServiceResource) basic(data acceptance.TestData) string { + // currently only supported in "ukwest", "northcentralus", "westus2". + location := "westus2" + + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-health-%d" + location = "%s" +} + +resource "azurerm_healthcare_service" "test" { + name = "testacc%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + access_policy_object_ids = [ + data.azurerm_client_config.current.object_id, + ] +} +`, data.RandomInteger, location, data.RandomIntOfLength(17)) // name can only be 24 chars long +} + +func (r HealthCareServiceResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_healthcare_service" "import" { + name = azurerm_healthcare_service.test.name + location = azurerm_healthcare_service.test.location + resource_group_name = azurerm_healthcare_service.test.resource_group_name + + access_policy_object_ids = [ + "${data.azurerm_client_config.current.object_id}", + ] +} +`, r.basic(data)) +} + +func (HealthCareServiceResource) complete(data acceptance.TestData) string { + // currently only supported in "ukwest", "northcentralus", "westus2". + location := "westus2" + + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-health-%d" + location = "%s" +} + +resource "azurerm_healthcare_service" "test" { + name = "testacc%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "production" + purpose = "AcceptanceTests" + } + + access_policy_object_ids = [ + data.azurerm_client_config.current.object_id, + ] + + authentication_configuration { + authority = "https://login.microsoftonline.com/${data.azurerm_client_config.current.tenant_id}" + audience = "https://azurehealthcareapis.com" + smart_proxy_enabled = true + } + + cors_configuration { + allowed_origins = ["http://www.example.com", "http://www.example2.com"] + allowed_headers = ["*"] + allowed_methods = ["GET", "PUT"] + max_age_in_seconds = 500 + allow_credentials = true + } +} +`, data.RandomInteger, location, data.RandomIntOfLength(17)) // name can only be 24 chars long +} diff --git a/azurerm/internal/services/healthcare/parse/healthcare_service.go b/azurerm/internal/services/healthcare/parse/healthcare_service.go deleted file mode 100644 index 13c296234195..000000000000 --- a/azurerm/internal/services/healthcare/parse/healthcare_service.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type HealthcareServiceId struct { - ResourceGroup string - Name string -} - -func HealthcareServiceID(input string) (*HealthcareServiceId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Healthcare Service ID %q: %+v", input, err) - } - - service := HealthcareServiceId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("services"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/healthcare/parse/healthcare_service_test.go b/azurerm/internal/services/healthcare/parse/healthcare_service_test.go deleted file mode 100644 index 25168f87b146..000000000000 --- a/azurerm/internal/services/healthcare/parse/healthcare_service_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestHealthcareServiceId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *HealthcareServiceId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Service Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.HealthcareApis/services/", - Expected: nil, - }, - { - Name: "Healthcare Service ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.HealthcareApis/services/Service1", - Expected: &HealthcareServiceId{ - Name: "Service1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.HealthcareApis/Services/Service1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := HealthcareServiceID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/healthcare/parse/service.go b/azurerm/internal/services/healthcare/parse/service.go new file mode 100644 index 000000000000..2d99adfe06aa --- /dev/null +++ b/azurerm/internal/services/healthcare/parse/service.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServiceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServiceID(subscriptionId, resourceGroup, name string) ServiceId { + return ServiceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServiceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Service", segmentsStr) +} + +func (id ServiceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.HealthcareApis/services/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServiceID parses a Service ID into an ServiceId struct +func ServiceID(input string) (*ServiceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServiceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("services"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/healthcare/parse/service_test.go b/azurerm/internal/services/healthcare/parse/service_test.go new file mode 100644 index 000000000000..68563276c3c4 --- /dev/null +++ b/azurerm/internal/services/healthcare/parse/service_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServiceId{} + +func TestServiceIDFormatter(t *testing.T) { + actual := NewServiceID("12345678-1234-9876-4563-123456789012", "group1", "service1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/services/service1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServiceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServiceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/services/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/services/service1", + Expected: &ServiceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "service1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.HEALTHCAREAPIS/SERVICES/SERVICE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServiceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/healthcare/registration.go b/azurerm/internal/services/healthcare/registration.go index d1f842d84a38..61a139007573 100644 --- a/azurerm/internal/services/healthcare/registration.go +++ b/azurerm/internal/services/healthcare/registration.go @@ -21,13 +21,13 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_healthcare_service": dataSourceArmHealthcareService(), + "azurerm_healthcare_service": dataSourceHealthcareService(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_healthcare_service": resourceArmHealthcareService(), + "azurerm_healthcare_service": resourceHealthcareService(), } } diff --git a/azurerm/internal/services/healthcare/resourceids.go b/azurerm/internal/services/healthcare/resourceids.go new file mode 100644 index 000000000000..1b5375fbea28 --- /dev/null +++ b/azurerm/internal/services/healthcare/resourceids.go @@ -0,0 +1,3 @@ +package healthcare + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Service -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/services/service1 diff --git a/azurerm/internal/services/healthcare/tests/healthcare_service_data_source_test.go b/azurerm/internal/services/healthcare/tests/healthcare_service_data_source_test.go deleted file mode 100644 index 6d804288d947..000000000000 --- a/azurerm/internal/services/healthcare/tests/healthcare_service_data_source_test.go +++ /dev/null @@ -1,44 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccAzureRMDataSourceHealthCareService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_healthcare_service", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHealthCareServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataSourceHealthcareService_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "location"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "kind"), - resource.TestCheckResourceAttrSet(data.ResourceName, "cosmosdb_throughput"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func testAccAzureRMDataSourceHealthcareService_basic(data acceptance.TestData) string { - resource := testAccAzureRMHealthCareService_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_healthcare_service" "test" { - name = azurerm_healthcare_service.test.name - resource_group_name = azurerm_healthcare_service.test.resource_group_name - location = azurerm_resource_group.test.location -} -`, resource) -} diff --git a/azurerm/internal/services/healthcare/tests/healthcare_service_resource_test.go b/azurerm/internal/services/healthcare/tests/healthcare_service_resource_test.go deleted file mode 100644 index 58c2f7edbd99..000000000000 --- a/azurerm/internal/services/healthcare/tests/healthcare_service_resource_test.go +++ /dev/null @@ -1,219 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/healthcare/parse" -) - -func TestAccAzureRMHealthCareService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_healthcare_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHealthCareServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHealthCareService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHealthCareServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHealthCareService_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_healthcare_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHealthCareServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHealthCareService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHealthCareServiceExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHealthCareService_requiresImport), - }, - }) -} - -func TestAccAzureRMHealthCareService_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_healthcare_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHealthCareServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHealthCareService_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHealthCareServiceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMHealthCareServiceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HealthCare.HealthcareServiceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.HealthcareServiceID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Healthcare service %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get on healthcareServiceClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMHealthCareServiceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HealthCare.HealthcareServiceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_healthcare_service" { - continue - } - - id, err := parse.HealthcareServiceID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("HealthCare Service still exists:\n%#v", resp.Status) - } - } - - return nil -} - -func testAccAzureRMHealthCareService_basic(data acceptance.TestData) string { - // currently only supported in "ukwest", "northcentralus", "westus2". - location := "westus2" - - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-health-%d" - location = "%s" -} - -resource "azurerm_healthcare_service" "test" { - name = "testacc%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - access_policy_object_ids = [ - data.azurerm_client_config.current.object_id, - ] -} -`, data.RandomInteger, location, data.RandomIntOfLength(17)) // name can only be 24 chars long -} - -func testAccAzureRMHealthCareService_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHealthCareService_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_healthcare_service" "import" { - name = azurerm_healthcare_service.test.name - location = azurerm_healthcare_service.test.location - resource_group_name = azurerm_healthcare_service.test.resource_group_name - - access_policy_object_ids = [ - "${data.azurerm_client_config.current.object_id}", - ] -} -`, template) -} - -func testAccAzureRMHealthCareService_complete(data acceptance.TestData) string { - // currently only supported in "ukwest", "northcentralus", "westus2". - location := "westus2" - - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-health-%d" - location = "%s" -} - -resource "azurerm_healthcare_service" "test" { - name = "testacc%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - environment = "production" - purpose = "AcceptanceTests" - } - - access_policy_object_ids = [ - data.azurerm_client_config.current.object_id, - ] - - authentication_configuration { - authority = "https://login.microsoftonline.com/${data.azurerm_client_config.current.tenant_id}" - audience = "https://azurehealthcareapis.com" - smart_proxy_enabled = true - } - - cors_configuration { - allowed_origins = ["http://www.example.com", "http://www.example2.com"] - allowed_headers = ["*"] - allowed_methods = ["GET", "PUT"] - max_age_in_seconds = 500 - allow_credentials = true - } -} -`, data.RandomInteger, location, data.RandomIntOfLength(17)) // name can only be 24 chars long -} diff --git a/azurerm/internal/services/healthcare/validate/service_id.go b/azurerm/internal/services/healthcare/validate/service_id.go new file mode 100644 index 000000000000..88f28918efd0 --- /dev/null +++ b/azurerm/internal/services/healthcare/validate/service_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/healthcare/parse" +) + +func ServiceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServiceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/healthcare/validate/service_id_test.go b/azurerm/internal/services/healthcare/validate/service_id_test.go new file mode 100644 index 000000000000..ad785e3391e0 --- /dev/null +++ b/azurerm/internal/services/healthcare/validate/service_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServiceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/services/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HealthcareApis/services/service1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.HEALTHCAREAPIS/SERVICES/SERVICE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServiceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/hpccache/hpc_cache_blob_target_resource.go b/azurerm/internal/services/hpccache/hpc_cache_blob_target_resource.go new file mode 100644 index 000000000000..58f9aa048fff --- /dev/null +++ b/azurerm/internal/services/hpccache/hpc_cache_blob_target_resource.go @@ -0,0 +1,209 @@ +package hpccache + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2020-03-01/storagecache" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/validate" + storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceHPCCacheBlobTarget() *schema.Resource { + return &schema.Resource{ + Create: resourceHPCCacheBlobTargetCreateOrUpdate, + Update: resourceHPCCacheBlobTargetCreateOrUpdate, + Read: resourceHPCCacheBlobTargetRead, + Delete: resourceHPCCacheBlobTargetDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.StorageTargetID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.StorageTargetName, + }, + + "cache_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "namespace_path": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.CacheNamespacePath, + }, + + "storage_container_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: storageValidate.StorageContainerResourceManagerID, + }, + }, + } +} + +func resourceHPCCacheBlobTargetCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.StorageTargetsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure HPC Cache Blob Target creation.") + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + cache := d.Get("cache_name").(string) + + if d.IsNewResource() { + resp, err := client.Get(ctx, resourceGroup, cache, name) + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error checking for existing HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + if !utils.ResponseWasNotFound(resp.Response) { + return tf.ImportAsExistsError("azurerm_hpc_cache_blob_target", *resp.ID) + } + } + + namespacePath := d.Get("namespace_path").(string) + containerId := d.Get("storage_container_id").(string) + + // Construct parameters + namespaceJunction := []storagecache.NamespaceJunction{ + { + NamespacePath: &namespacePath, + TargetPath: utils.String("/"), + }, + } + param := &storagecache.StorageTarget{ + BasicStorageTargetProperties: &storagecache.ClfsTargetProperties{ + Junctions: &namespaceJunction, + TargetType: storagecache.TargetTypeClfs, + Clfs: &storagecache.ClfsTarget{ + Target: utils.String(containerId), + }, + }, + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, cache, name, param) + if err != nil { + return fmt.Errorf("Error creating HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation of HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, cache, name) + if err != nil { + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if read.ID == nil { + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q): `id` was nil", name, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceHPCCacheBlobTargetRead(d, meta) +} + +func resourceHPCCacheBlobTargetRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.StorageTargetsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StorageTargetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.CacheName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] HPC Cache Blob Target %q was not found (Resource Group %q, Cache %q) - removing from state!", id.Name, id.ResourceGroup, id.CacheName) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q, Cache %q): %+v", id.Name, id.ResourceGroup, id.CacheName, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("cache_name", id.CacheName) + + if props := resp.BasicStorageTargetProperties; props != nil { + props, ok := props.AsClfsTargetProperties() + if !ok { + return fmt.Errorf("The type of this HPC Cache Target %q (Resource Group %q, Cahe %q) is not a Blob Target", id.Name, id.ResourceGroup, id.CacheName) + } + + storageContainerId := "" + if props.Clfs != nil && props.Clfs.Target != nil { + storageContainerId = *props.Clfs.Target + } + d.Set("storage_container_id", storageContainerId) + + namespacePath := "" + // There is only one namespace path allowed for blob container storage target, + // which maps to the root path of it. + if props.Junctions != nil && len(*props.Junctions) == 1 && (*props.Junctions)[0].NamespacePath != nil { + namespacePath = *(*props.Junctions)[0].NamespacePath + } + d.Set("namespace_path", namespacePath) + } + + return nil +} + +func resourceHPCCacheBlobTargetDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.StorageTargetsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StorageTargetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.CacheName, id.Name) + if err != nil { + return fmt.Errorf("Error deleting HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.CacheName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.CacheName, err) + } + + return nil +} diff --git a/azurerm/internal/services/hpccache/hpc_cache_blob_target_resource_test.go b/azurerm/internal/services/hpccache/hpc_cache_blob_target_resource_test.go new file mode 100644 index 000000000000..cd65404ef4d9 --- /dev/null +++ b/azurerm/internal/services/hpccache/hpc_cache_blob_target_resource_test.go @@ -0,0 +1,191 @@ +package hpccache_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HPCCacheBlobTargetResource struct { +} + +func TestAccHPCCacheBlobTarget_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") + r := HPCCacheBlobTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCacheBlobTarget_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") + r := HPCCacheBlobTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.namespace(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCacheBlobTarget_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") + r := HPCCacheBlobTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (HPCCacheBlobTargetResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.StorageTargetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.HPCCache.StorageTargetsClient.Get(ctx, id.ResourceGroup, id.CacheName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving HPC Cache Blob Target (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.BasicStorageTargetProperties != nil), nil +} + +func (r HPCCacheBlobTargetResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + storage_container_id = azurerm_storage_container.test.resource_manager_id + namespace_path = "/blob_storage1" +} +`, r.template(data), data.RandomString) +} + +func (r HPCCacheBlobTargetResource) namespace(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + storage_container_id = azurerm_storage_container.test.resource_manager_id + namespace_path = "/blob_storage2" +} +`, r.template(data), data.RandomString) +} + +func (r HPCCacheBlobTargetResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_target" "import" { + name = azurerm_hpc_cache_blob_target.test.name + resource_group_name = azurerm_hpc_cache_blob_target.test.resource_group_name + cache_name = azurerm_hpc_cache_blob_target.test.cache_name + storage_container_id = azurerm_hpc_cache_blob_target.test.storage_container_id + namespace_path = azurerm_hpc_cache_blob_target.test.namespace_path +} +`, r.basic(data)) +} + +func (HPCCacheBlobTargetResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-storage-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VN-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsub-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_hpc_cache" "test" { + name = "acctest-HPCC-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cache_size_in_gb = 3072 + subnet_id = azurerm_subnet.test.id + sku_name = "Standard_2G" +} + +data "azuread_service_principal" "test" { + display_name = "HPC Cache Resource Provider" +} + +resource "azurerm_storage_account" "test" { + name = "accteststorgacc%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctest-strgctn-%s" + storage_account_name = azurerm_storage_account.test.name +} + +resource "azurerm_role_assignment" "test_storage_account_contrib" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Account Contributor" + principal_id = data.azuread_service_principal.test.object_id +} + +resource "azurerm_role_assignment" "test_storage_blob_data_contrib" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = data.azuread_service_principal.test.object_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/hpccache/hpc_cache_nfs_target_resource.go b/azurerm/internal/services/hpccache/hpc_cache_nfs_target_resource.go new file mode 100644 index 000000000000..09e78dd7ea52 --- /dev/null +++ b/azurerm/internal/services/hpccache/hpc_cache_nfs_target_resource.go @@ -0,0 +1,272 @@ +package hpccache + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2020-03-01/storagecache" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceHPCCacheNFSTarget() *schema.Resource { + return &schema.Resource{ + Create: resourceHPCCacheNFSTargetCreateOrUpdate, + Update: resourceHPCCacheNFSTargetCreateOrUpdate, + Read: resourceHPCCacheNFSTargetRead, + Delete: resourceHPCCacheNFSTargetDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.StorageTargetID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.StorageTargetName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "cache_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "namespace_junction": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + // Confirmed with service team that they have a mac of 10 that is enforced by the backend. + MaxItems: 10, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "namespace_path": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.CacheNamespacePath, + }, + "nfs_export": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.CacheNFSExport, + }, + "target_path": { + Type: schema.TypeString, + Optional: true, + Default: "", + ValidateFunc: validate.CacheNFSTargetPath, + }, + }, + }, + }, + + "target_host_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "usage_model": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + "READ_HEAVY_INFREQ", + "WRITE_WORKLOAD_15", + "WRITE_AROUND", + }, false), + }, + }, + } +} + +func resourceHPCCacheNFSTargetCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.StorageTargetsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure HPC Cache NFS Target creation.") + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + cache := d.Get("cache_name").(string) + + if d.IsNewResource() { + resp, err := client.Get(ctx, resourceGroup, cache, name) + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error checking for existing HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + if !utils.ResponseWasNotFound(resp.Response) { + return tf.ImportAsExistsError("azurerm_hpc_cache_nfs_target", *resp.ID) + } + } + + // Construct parameters + param := &storagecache.StorageTarget{ + BasicStorageTargetProperties: &storagecache.Nfs3TargetProperties{ + Junctions: expandNamespaceJunctions(d.Get("namespace_junction").(*schema.Set).List()), + TargetType: storagecache.TargetTypeNfs3, + Nfs3: &storagecache.Nfs3Target{ + Target: utils.String(d.Get("target_host_name").(string)), + UsageModel: utils.String(d.Get("usage_model").(string)), + }, + }, + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, cache, name, param) + if err != nil { + return fmt.Errorf("Error creating HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation of HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, cache, name) + if err != nil { + return fmt.Errorf("Error retrieving HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if read.ID == nil { + return fmt.Errorf("Error retrieving HPC Cache NFS Target %q (Resource Group %q): `id` was nil", name, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceHPCCacheNFSTargetRead(d, meta) +} + +func resourceHPCCacheNFSTargetRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.StorageTargetsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StorageTargetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.CacheName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] HPC Cache NFS Target %q was not found (Resource Group %q, Cache %q) - removing from state!", id.Name, id.ResourceGroup, id.CacheName) + d.SetId("") + return nil + } + return fmt.Errorf("Error retrieving HPC Cache NFS Target %q (Resource Group %q, Cache %q): %+v", id.Name, id.ResourceGroup, id.CacheName, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("cache_name", id.CacheName) + + if props := resp.BasicStorageTargetProperties; props != nil { + props, ok := props.AsNfs3TargetProperties() + if !ok { + return fmt.Errorf("The type of this HPC Cache Target %q (Resource Group %q, Cahe %q) is not a NFS Target", id.Name, id.ResourceGroup, id.CacheName) + } + if nfs3 := props.Nfs3; nfs3 != nil { + d.Set("target_host_name", nfs3.Target) + d.Set("usage_model", nfs3.UsageModel) + } + if err := d.Set("namespace_junction", flattenNamespaceJunctions(props.Junctions)); err != nil { + return fmt.Errorf(`Error setting "namespace_junction" %q (Resource Group %q, Cahe %q): %+v`, id.Name, id.ResourceGroup, id.CacheName, err) + } + } + + return nil +} + +func resourceHPCCacheNFSTargetDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.StorageTargetsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StorageTargetID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.CacheName, id.Name) + if err != nil { + return fmt.Errorf("Error deleting HPC Cache NFS Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.CacheName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of HPC Cache NFS Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.CacheName, err) + } + + return nil +} + +func expandNamespaceJunctions(input []interface{}) *[]storagecache.NamespaceJunction { + result := make([]storagecache.NamespaceJunction, 0) + + for _, v := range input { + b := v.(map[string]interface{}) + result = append(result, storagecache.NamespaceJunction{ + NamespacePath: utils.String(b["namespace_path"].(string)), + NfsExport: utils.String(b["nfs_export"].(string)), + TargetPath: utils.String(b["target_path"].(string)), + }) + } + + return &result +} + +func flattenNamespaceJunctions(input *[]storagecache.NamespaceJunction) []interface{} { + if input == nil { + return []interface{}{} + } + + output := make([]interface{}, 0) + + for _, e := range *input { + namespacePath := "" + if v := e.NamespacePath; v != nil { + namespacePath = *v + } + + nfsExport := "" + if v := e.NfsExport; v != nil { + nfsExport = *v + } + + targetPath := "" + if v := e.TargetPath; v != nil { + targetPath = *v + } + + output = append(output, map[string]interface{}{ + "namespace_path": namespacePath, + "nfs_export": nfsExport, + "target_path": targetPath, + }) + } + + return output +} diff --git a/azurerm/internal/services/hpccache/hpc_cache_nfs_target_resource_test.go b/azurerm/internal/services/hpccache/hpc_cache_nfs_target_resource_test.go new file mode 100644 index 000000000000..5605832299bf --- /dev/null +++ b/azurerm/internal/services/hpccache/hpc_cache_nfs_target_resource_test.go @@ -0,0 +1,279 @@ +package hpccache_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HPCCacheNFSTargetResource struct { +} + +func TestAccHPCCacheNFSTarget_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") + r := HPCCacheNFSTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCacheNFSTarget_usageModel(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") + r := HPCCacheNFSTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.usageModel(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCacheNFSTarget_namespaceJunction(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") + r := HPCCacheNFSTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.namespaceJunction(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCacheNFSTarget_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") + r := HPCCacheNFSTargetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (HPCCacheNFSTargetResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.StorageTargetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.HPCCache.StorageTargetsClient.Get(ctx, id.ResourceGroup, id.CacheName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving HPC Cache NFS Target (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.BasicStorageTargetProperties != nil), nil +} + +func (r HPCCacheNFSTargetResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_nfs_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + target_host_name = azurerm_linux_virtual_machine.test.private_ip_address + usage_model = "READ_HEAVY_INFREQ" + namespace_junction { + namespace_path = "/nfs/a1" + nfs_export = "/export/a" + target_path = "1" + } + namespace_junction { + namespace_path = "/nfs/b" + nfs_export = "/export/b" + } +} +`, r.template(data), data.RandomString) +} + +func (r HPCCacheNFSTargetResource) usageModel(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_nfs_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + target_host_name = azurerm_linux_virtual_machine.test.private_ip_address + usage_model = "WRITE_WORKLOAD_15" + namespace_junction { + namespace_path = "/nfs/a1" + nfs_export = "/export/a" + target_path = "1" + } + namespace_junction { + namespace_path = "/nfs/b" + nfs_export = "/export/b" + } +} +`, r.template(data), data.RandomString) +} + +func (r HPCCacheNFSTargetResource) namespaceJunction(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_nfs_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + target_host_name = azurerm_linux_virtual_machine.test.private_ip_address + usage_model = "WRITE_WORKLOAD_15" + namespace_junction { + namespace_path = "/nfs/a" + nfs_export = "/export/a" + target_path = "" + } +} +`, r.template(data), data.RandomString) +} + +func (r HPCCacheNFSTargetResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_nfs_target" "import" { + name = azurerm_hpc_cache_nfs_target.test.name + resource_group_name = azurerm_hpc_cache_nfs_target.test.resource_group_name + cache_name = azurerm_hpc_cache_nfs_target.test.cache_name + target_host_name = azurerm_hpc_cache_nfs_target.test.target_host_name + usage_model = azurerm_hpc_cache_nfs_target.test.usage_model + namespace_junction { + namespace_path = "/nfs/a1" + nfs_export = "/export/a" + target_path = "1" + } + namespace_junction { + namespace_path = "/nfs/b" + nfs_export = "/export/b" + } +} +`, r.basic(data)) +} + +func (HPCCacheNFSTargetResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_subnet" "testvm" { + name = "acctest-sub-vm-%[2]s" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.3.0/24" +} + +resource "azurerm_network_interface" "test" { + name = "acctest-nic-%[2]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "internal" + subnet_id = azurerm_subnet.testvm.id + private_ip_address_allocation = "Dynamic" + } +} + +locals { + custom_data = < /etc/exports +/export/a *(rw,fsid=0,insecure,no_subtree_check,async) +/export/b *(rw,fsid=0,insecure,no_subtree_check,async) +EOF +systemctl start nfs-server +exportfs -arv +CUSTOM_DATA +} + +resource "azurerm_linux_virtual_machine" "test" { + name = "acctest-vm-%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size = "Standard_F2" + admin_username = "adminuser" + admin_password = "P@$$w0rd1234!" + disable_password_authentication = false + + network_interface_ids = [ + azurerm_network_interface.test.id, + ] + + os_disk { + caching = "ReadWrite" + storage_account_type = "Standard_LRS" + } + + source_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "18.04-LTS" + version = "latest" + } + + custom_data = base64encode(local.custom_data) +} + +`, HPCCacheResource{}.basic(data), data.RandomString) +} diff --git a/azurerm/internal/services/hpccache/hpc_cache_resource.go b/azurerm/internal/services/hpccache/hpc_cache_resource.go new file mode 100644 index 000000000000..83181e374b51 --- /dev/null +++ b/azurerm/internal/services/hpccache/hpc_cache_resource.go @@ -0,0 +1,239 @@ +package hpccache + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2020-03-01/storagecache" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceHPCCache() *schema.Resource { + return &schema.Resource{ + Create: resourceHPCCacheCreateOrUpdate, + Update: resourceHPCCacheCreateOrUpdate, + Read: resourceHPCCacheRead, + Delete: resourceHPCCacheDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.CacheID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "cache_size_in_gb": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntInSlice([]int{ + 3072, + 6144, + 12288, + 24576, + 49152, + }), + }, + + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceIDOrEmpty, + }, + + "sku_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + "Standard_2G", + "Standard_4G", + "Standard_8G", + }, false), + }, + + "mtu": { + Type: schema.TypeInt, + Optional: true, + Default: 1500, + ValidateFunc: validation.IntBetween(576, 1500), + }, + + "root_squash_enabled": { + Type: schema.TypeBool, + Optional: true, + // TODO 3.0: remove "Computed: true" and add "Default: true" + // The old resource has no consistent default for the rootSquash setting. In order not to + // break users, we intentionally mark this property as Computed. + // https://docs.microsoft.com/en-us/azure/hpc-cache/configuration#configure-root-squash. + Computed: true, + }, + + "mount_addresses": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func resourceHPCCacheCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.CachesClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure HPC Cache creation.") + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing HPC Cache %q (Resource Group %q): %s", name, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_hpc_cache", *existing.ID) + } + } + + location := d.Get("location").(string) + cacheSize := d.Get("cache_size_in_gb").(int) + subnet := d.Get("subnet_id").(string) + skuName := d.Get("sku_name").(string) + rootSquash := d.Get("root_squash_enabled").(bool) + mtu := d.Get("mtu").(int) + + cache := &storagecache.Cache{ + Name: utils.String(name), + Location: utils.String(location), + CacheProperties: &storagecache.CacheProperties{ + CacheSizeGB: utils.Int32(int32(cacheSize)), + Subnet: utils.String(subnet), + NetworkSettings: &storagecache.CacheNetworkSettings{ + Mtu: utils.Int32(int32(mtu)), + }, + SecuritySettings: &storagecache.CacheSecuritySettings{ + RootSquash: &rootSquash, + }, + }, + Sku: &storagecache.CacheSku{ + Name: utils.String(skuName), + }, + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, cache) + if err != nil { + return fmt.Errorf("Error creating HPC Cache %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for HPC Cache %q (Resource Group %q) to finish provisioning: %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error retrieving HPC Cache %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if resp.ID == nil { + return fmt.Errorf("Cannot read ID for HPC Cache %q (Resource Group %q): %+v", name, resourceGroup, err) + } + d.SetId(*resp.ID) + + return resourceHPCCacheRead(d, meta) +} + +func resourceHPCCacheRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.CachesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.CacheID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] HPC Cache %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving HPC Cache %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", resp.Location) + + if props := resp.CacheProperties; props != nil { + d.Set("cache_size_in_gb", props.CacheSizeGB) + d.Set("subnet_id", props.Subnet) + d.Set("mount_addresses", utils.FlattenStringSlice(props.MountAddresses)) + if props.NetworkSettings != nil { + d.Set("mtu", props.NetworkSettings.Mtu) + } + if props.SecuritySettings != nil { + d.Set("root_squash_enabled", props.SecuritySettings.RootSquash) + } + } + + if sku := resp.Sku; sku != nil { + d.Set("sku_name", sku.Name) + } + + return nil +} + +func resourceHPCCacheDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HPCCache.CachesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.CacheID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("Error deleting HPC Cache %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of HPC Cache %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/hpccache/hpc_cache_resource_test.go b/azurerm/internal/services/hpccache/hpc_cache_resource_test.go new file mode 100644 index 000000000000..5745e1347b30 --- /dev/null +++ b/azurerm/internal/services/hpccache/hpc_cache_resource_test.go @@ -0,0 +1,215 @@ +package hpccache_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type HPCCacheResource struct { +} + +func TestAccHPCCache_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") + r := HPCCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCache_mtu(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") + r := HPCCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.mtu(data, 1000), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.mtu(data, 1500), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.mtu(data, 1000), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCache_rootSquash(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") + r := HPCCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.rootSquash(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.rootSquash(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.rootSquash(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("mount_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccHPCCache_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") + r := HPCCacheResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (HPCCacheResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CacheID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.HPCCache.CachesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving HPC Cache (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.CacheProperties != nil), nil +} + +func (r HPCCacheResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache" "test" { + name = "acctest-HPCC-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cache_size_in_gb = 3072 + subnet_id = azurerm_subnet.test.id + sku_name = "Standard_2G" +} +`, r.template(data), data.RandomInteger) +} + +func (r HPCCacheResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache" "import" { + name = azurerm_hpc_cache.test.name + resource_group_name = azurerm_hpc_cache.test.resource_group_name + location = azurerm_hpc_cache.test.location + cache_size_in_gb = azurerm_hpc_cache.test.cache_size_in_gb + subnet_id = azurerm_hpc_cache.test.subnet_id + sku_name = azurerm_hpc_cache.test.sku_name +} +`, r.template(data)) +} + +func (r HPCCacheResource) mtu(data acceptance.TestData, mtu int) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache" "test" { + name = "acctest-HPCC-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cache_size_in_gb = 3072 + subnet_id = azurerm_subnet.test.id + sku_name = "Standard_2G" + mtu = %d +} +`, r.template(data), data.RandomInteger, mtu) +} + +func (r HPCCacheResource) rootSquash(data acceptance.TestData, enable bool) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache" "test" { + name = "acctest-HPCC-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cache_size_in_gb = 3072 + subnet_id = azurerm_subnet.test.id + sku_name = "Standard_2G" + root_squash_enabled = %t +} +`, r.template(data), data.RandomInteger, enable) +} + +func (HPCCacheResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-storage-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VN-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsub-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/hpccache/parse/cache.go b/azurerm/internal/services/hpccache/parse/cache.go new file mode 100644 index 000000000000..62bd803ee442 --- /dev/null +++ b/azurerm/internal/services/hpccache/parse/cache.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CacheId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewCacheID(subscriptionId, resourceGroup, name string) CacheId { + return CacheId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id CacheId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cache", segmentsStr) +} + +func (id CacheId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StorageCache/caches/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// CacheID parses a Cache ID into an CacheId struct +func CacheID(input string) (*CacheId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CacheId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("caches"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/hpccache/parse/cache_test.go b/azurerm/internal/services/hpccache/parse/cache_test.go new file mode 100644 index 000000000000..3c94e718449d --- /dev/null +++ b/azurerm/internal/services/hpccache/parse/cache_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CacheId{} + +func TestCacheIDFormatter(t *testing.T) { + actual := NewCacheID("12345678-1234-9876-4563-123456789012", "group1", "cache1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCacheID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CacheId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1", + Expected: &CacheId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "cache1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.STORAGECACHE/CACHES/CACHE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CacheID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/hpccache/parse/storage_target.go b/azurerm/internal/services/hpccache/parse/storage_target.go new file mode 100644 index 000000000000..d4d638d9e4ea --- /dev/null +++ b/azurerm/internal/services/hpccache/parse/storage_target.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type StorageTargetId struct { + SubscriptionId string + ResourceGroup string + CacheName string + Name string +} + +func NewStorageTargetID(subscriptionId, resourceGroup, cacheName, name string) StorageTargetId { + return StorageTargetId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + CacheName: cacheName, + Name: name, + } +} + +func (id StorageTargetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Cache Name %q", id.CacheName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Storage Target", segmentsStr) +} + +func (id StorageTargetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StorageCache/caches/%s/storageTargets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.CacheName, id.Name) +} + +// StorageTargetID parses a StorageTarget ID into an StorageTargetId struct +func StorageTargetID(input string) (*StorageTargetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := StorageTargetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.CacheName, err = id.PopSegment("caches"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("storageTargets"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/hpccache/parse/storage_target_test.go b/azurerm/internal/services/hpccache/parse/storage_target_test.go new file mode 100644 index 000000000000..2384da6af2d4 --- /dev/null +++ b/azurerm/internal/services/hpccache/parse/storage_target_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = StorageTargetId{} + +func TestStorageTargetIDFormatter(t *testing.T) { + actual := NewStorageTargetID("12345678-1234-9876-4563-123456789012", "group1", "cache1", "target1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/target1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestStorageTargetID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *StorageTargetId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing CacheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/", + Error: true, + }, + + { + // missing value for CacheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/target1", + Expected: &StorageTargetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + CacheName: "cache1", + Name: "target1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.STORAGECACHE/CACHES/CACHE1/STORAGETARGETS/TARGET1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := StorageTargetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.CacheName != v.Expected.CacheName { + t.Fatalf("Expected %q but got %q for CacheName", v.Expected.CacheName, actual.CacheName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/hpccache/parsers/hpc_cache.go b/azurerm/internal/services/hpccache/parsers/hpc_cache.go deleted file mode 100644 index 88f00af5c947..000000000000 --- a/azurerm/internal/services/hpccache/parsers/hpc_cache.go +++ /dev/null @@ -1,31 +0,0 @@ -package parsers - -import ( - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type HPCCacheID struct { - Name string - ResourceGroup string -} - -func ParseHPCCacheID(input string) (*HPCCacheID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - cache := HPCCacheID{ - ResourceGroup: id.ResourceGroup, - } - - if cache.Name, err = id.PopSegment("caches"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &cache, nil -} diff --git a/azurerm/internal/services/hpccache/parsers/hpc_cache_target.go b/azurerm/internal/services/hpccache/parsers/hpc_cache_target.go deleted file mode 100644 index d1d490de33d6..000000000000 --- a/azurerm/internal/services/hpccache/parsers/hpc_cache_target.go +++ /dev/null @@ -1,38 +0,0 @@ -package parsers - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type HPCCacheTargetId struct { - ResourceGroup string - Cache string - Name string -} - -func HPCCacheTargetID(input string) (*HPCCacheTargetId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse HPC Cache Target ID %q: %+v", input, err) - } - - target := HPCCacheTargetId{ - ResourceGroup: id.ResourceGroup, - } - - if target.Cache, err = id.PopSegment("caches"); err != nil { - return nil, err - } - - if target.Name, err = id.PopSegment("storageTargets"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &target, nil -} diff --git a/azurerm/internal/services/hpccache/parsers/hpc_cache_target_test.go b/azurerm/internal/services/hpccache/parsers/hpc_cache_target_test.go deleted file mode 100644 index 11a055cea256..000000000000 --- a/azurerm/internal/services/hpccache/parsers/hpc_cache_target_test.go +++ /dev/null @@ -1,89 +0,0 @@ -package parsers - -import ( - "testing" -) - -func TestHPCCacheTargetID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *HPCCacheTargetId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/", - Error: true, - }, - { - Name: "Missing Cache Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/", - Error: true, - }, - { - Name: "With Cache Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1", - Error: true, - }, - { - Name: "Missing Storage Target Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1/storageTargets", - Error: true, - }, - { - Name: "With Storage Target Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/target1", - Expect: &HPCCacheTargetId{ - ResourceGroup: "resGroup1", - Cache: "cache1", - Name: "target1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/cache1/StorageTargets/target1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := HPCCacheTargetID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Cache != v.Expect.Cache { - t.Fatalf("Expected %q but got %q for Cache", v.Expect.Cache, actual.Cache) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/hpccache/parsers/hpc_cache_test.go b/azurerm/internal/services/hpccache/parsers/hpc_cache_test.go deleted file mode 100644 index 955777a5e869..000000000000 --- a/azurerm/internal/services/hpccache/parsers/hpc_cache_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parsers - -import ( - "testing" -) - -func TestParseHPCCacheID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *HPCCacheID - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Cache Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/", - Expected: nil, - }, - { - Name: "Cache ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/caches/mycache1", - Expected: &HPCCacheID{ - ResourceGroup: "resGroup1", - Name: "mycache1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.StorageCache/CACHES/mycache1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ParseHPCCacheID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/hpccache/registration.go b/azurerm/internal/services/hpccache/registration.go index 6aa6ea76a266..026b6808e723 100644 --- a/azurerm/internal/services/hpccache/registration.go +++ b/azurerm/internal/services/hpccache/registration.go @@ -26,8 +26,8 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_hpc_cache": resourceArmHPCCache(), - "azurerm_hpc_cache_blob_target": resourceArmHPCCacheBlobTarget(), - "azurerm_hpc_cache_nfs_target": resourceArmHPCCacheNFSTarget(), + "azurerm_hpc_cache": resourceHPCCache(), + "azurerm_hpc_cache_blob_target": resourceHPCCacheBlobTarget(), + "azurerm_hpc_cache_nfs_target": resourceHPCCacheNFSTarget(), } } diff --git a/azurerm/internal/services/hpccache/resource_arm_hpc_cache.go b/azurerm/internal/services/hpccache/resource_arm_hpc_cache.go deleted file mode 100644 index bf6a90871cda..000000000000 --- a/azurerm/internal/services/hpccache/resource_arm_hpc_cache.go +++ /dev/null @@ -1,239 +0,0 @@ -package hpccache - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2020-03-01/storagecache" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parsers" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmHPCCache() *schema.Resource { - return &schema.Resource{ - Create: resourceArmHPCCacheCreateOrUpdate, - Update: resourceArmHPCCacheCreateOrUpdate, - Read: resourceArmHPCCacheRead, - Delete: resourceArmHPCCacheDelete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parsers.ParseHPCCacheID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "location": azure.SchemaLocation(), - - "cache_size_in_gb": { - Type: schema.TypeInt, - Required: true, - ForceNew: true, - ValidateFunc: validation.IntInSlice([]int{ - 3072, - 6144, - 12288, - 24576, - 49152, - }), - }, - - "subnet_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceIDOrEmpty, - }, - - "sku_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - "Standard_2G", - "Standard_4G", - "Standard_8G", - }, false), - }, - - "mtu": { - Type: schema.TypeInt, - Optional: true, - Default: 1500, - ValidateFunc: validation.IntBetween(576, 1500), - }, - - "root_squash_enabled": { - Type: schema.TypeBool, - Optional: true, - // TODO 3.0: remove "Computed: true" and add "Default: true" - // The old resource has no consistent default for the rootSquash setting. In order not to - // break users, we intentionally mark this property as Computed. - // https://docs.microsoft.com/en-us/azure/hpc-cache/configuration#configure-root-squash. - Computed: true, - }, - - "mount_addresses": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - } -} - -func resourceArmHPCCacheCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.CachesClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for Azure HPC Cache creation.") - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing HPC Cache %q (Resource Group %q): %s", name, resourceGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_hpc_cache", *existing.ID) - } - } - - location := d.Get("location").(string) - cacheSize := d.Get("cache_size_in_gb").(int) - subnet := d.Get("subnet_id").(string) - skuName := d.Get("sku_name").(string) - rootSquash := d.Get("root_squash_enabled").(bool) - mtu := d.Get("mtu").(int) - - cache := &storagecache.Cache{ - Name: utils.String(name), - Location: utils.String(location), - CacheProperties: &storagecache.CacheProperties{ - CacheSizeGB: utils.Int32(int32(cacheSize)), - Subnet: utils.String(subnet), - NetworkSettings: &storagecache.CacheNetworkSettings{ - Mtu: utils.Int32(int32(mtu)), - }, - SecuritySettings: &storagecache.CacheSecuritySettings{ - RootSquash: &rootSquash, - }, - }, - Sku: &storagecache.CacheSku{ - Name: utils.String(skuName), - }, - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, name, cache) - if err != nil { - return fmt.Errorf("Error creating HPC Cache %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for HPC Cache %q (Resource Group %q) to finish provisioning: %+v", name, resourceGroup, err) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error retrieving HPC Cache %q (Resource Group %q): %+v", name, resourceGroup, err) - } - if resp.ID == nil { - return fmt.Errorf("Cannot read ID for HPC Cache %q (Resource Group %q): %+v", name, resourceGroup, err) - } - d.SetId(*resp.ID) - - return resourceArmHPCCacheRead(d, meta) -} - -func resourceArmHPCCacheRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.CachesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parsers.ParseHPCCacheID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] HPC Cache %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) - d.SetId("") - return nil - } - - return fmt.Errorf("Error retrieving HPC Cache %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("location", resp.Location) - - if props := resp.CacheProperties; props != nil { - d.Set("cache_size_in_gb", props.CacheSizeGB) - d.Set("subnet_id", props.Subnet) - d.Set("mount_addresses", utils.FlattenStringSlice(props.MountAddresses)) - if props.NetworkSettings != nil { - d.Set("mtu", props.NetworkSettings.Mtu) - } - if props.SecuritySettings != nil { - d.Set("root_squash_enabled", props.SecuritySettings.RootSquash) - } - } - - if sku := resp.Sku; sku != nil { - d.Set("sku_name", sku.Name) - } - - return nil -} - -func resourceArmHPCCacheDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.CachesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parsers.ParseHPCCacheID(d.Id()) - if err != nil { - return err - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Error deleting HPC Cache %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of HPC Cache %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - return nil -} diff --git a/azurerm/internal/services/hpccache/resource_arm_hpc_cache_blob_target.go b/azurerm/internal/services/hpccache/resource_arm_hpc_cache_blob_target.go deleted file mode 100644 index 1d784a802f25..000000000000 --- a/azurerm/internal/services/hpccache/resource_arm_hpc_cache_blob_target.go +++ /dev/null @@ -1,209 +0,0 @@ -package hpccache - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2020-03-01/storagecache" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parsers" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/validate" - storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmHPCCacheBlobTarget() *schema.Resource { - return &schema.Resource{ - Create: resourceArmHPCCacheBlobTargetCreateOrUpdate, - Update: resourceArmHPCCacheBlobTargetCreateOrUpdate, - Read: resourceArmHPCCacheBlobTargetRead, - Delete: resourceArmHPCCacheBlobTargetDelete, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parsers.HPCCacheTargetID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.HPCCacheTargetName, - }, - - "cache_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "namespace_path": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.HPCCacheNamespacePath, - }, - - "storage_container_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: storageValidate.StorageContainerResourceManagerID, - }, - }, - } -} - -func resourceArmHPCCacheBlobTargetCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.StorageTargetsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for Azure HPC Cache Blob Target creation.") - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - cache := d.Get("cache_name").(string) - - if d.IsNewResource() { - resp, err := client.Get(ctx, resourceGroup, cache, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error checking for existing HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return tf.ImportAsExistsError("azurerm_hpc_cache_blob_target", *resp.ID) - } - } - - namespacePath := d.Get("namespace_path").(string) - containerId := d.Get("storage_container_id").(string) - - // Construct parameters - namespaceJunction := []storagecache.NamespaceJunction{ - { - NamespacePath: &namespacePath, - TargetPath: utils.String("/"), - }, - } - param := &storagecache.StorageTarget{ - BasicStorageTargetProperties: &storagecache.ClfsTargetProperties{ - Junctions: &namespaceJunction, - TargetType: storagecache.TargetTypeClfs, - Clfs: &storagecache.ClfsTarget{ - Target: utils.String(containerId), - }, - }, - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, cache, name, param) - if err != nil { - return fmt.Errorf("Error creating HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for creation of HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - read, err := client.Get(ctx, resourceGroup, cache, name) - if err != nil { - return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if read.ID == nil { - return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q): `id` was nil", name, resourceGroup) - } - - d.SetId(*read.ID) - - return resourceArmHPCCacheBlobTargetRead(d, meta) -} - -func resourceArmHPCCacheBlobTargetRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.StorageTargetsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parsers.HPCCacheTargetID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] HPC Cache Blob Target %q was not found (Resource Group %q, Cahe %q) - removing from state!", id.Name, id.ResourceGroup, id.Cache) - d.SetId("") - return nil - } - - return fmt.Errorf("Error retrieving HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) - } - - d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("cache_name", id.Cache) - - if props := resp.BasicStorageTargetProperties; props != nil { - props, ok := props.AsClfsTargetProperties() - if !ok { - return fmt.Errorf("The type of this HPC Cache Target %q (Resource Group %q, Cahe %q) is not a Blob Target", id.Name, id.ResourceGroup, id.Cache) - } - - storageContainerId := "" - if props.Clfs != nil && props.Clfs.Target != nil { - storageContainerId = *props.Clfs.Target - } - d.Set("storage_container_id", storageContainerId) - - namespacePath := "" - // There is only one namespace path allowed for blob container storage target, - // which maps to the root path of it. - if props.Junctions != nil && len(*props.Junctions) == 1 && (*props.Junctions)[0].NamespacePath != nil { - namespacePath = *(*props.Junctions)[0].NamespacePath - } - d.Set("namespace_path", namespacePath) - } - - return nil -} - -func resourceArmHPCCacheBlobTargetDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.StorageTargetsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parsers.HPCCacheTargetID(d.Id()) - if err != nil { - return err - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.Cache, id.Name) - if err != nil { - return fmt.Errorf("Error deleting HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of HPC Cache Blob Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) - } - - return nil -} diff --git a/azurerm/internal/services/hpccache/resource_arm_hpc_cache_nfs_target.go b/azurerm/internal/services/hpccache/resource_arm_hpc_cache_nfs_target.go deleted file mode 100644 index 950f2e3cbbfa..000000000000 --- a/azurerm/internal/services/hpccache/resource_arm_hpc_cache_nfs_target.go +++ /dev/null @@ -1,272 +0,0 @@ -package hpccache - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2020-03-01/storagecache" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parsers" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmHPCCacheNFSTarget() *schema.Resource { - return &schema.Resource{ - Create: resourceArmHPCCacheNFSTargetCreateOrUpdate, - Update: resourceArmHPCCacheNFSTargetCreateOrUpdate, - Read: resourceArmHPCCacheNFSTargetRead, - Delete: resourceArmHPCCacheNFSTargetDelete, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parsers.HPCCacheTargetID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.HPCCacheTargetName, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "cache_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "namespace_junction": { - Type: schema.TypeSet, - Required: true, - MinItems: 1, - // Confirmed with service team that they have a mac of 10 that is enforced by the backend. - MaxItems: 10, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "namespace_path": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.HPCCacheNamespacePath, - }, - "nfs_export": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.HPCCacheNFSExport, - }, - "target_path": { - Type: schema.TypeString, - Optional: true, - Default: "", - ValidateFunc: validate.HPCCacheNFSTargetPath, - }, - }, - }, - }, - - "target_host_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "usage_model": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{ - "READ_HEAVY_INFREQ", - "WRITE_WORKLOAD_15", - "WRITE_AROUND", - }, false), - }, - }, - } -} - -func resourceArmHPCCacheNFSTargetCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.StorageTargetsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for Azure HPC Cache NFS Target creation.") - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - cache := d.Get("cache_name").(string) - - if d.IsNewResource() { - resp, err := client.Get(ctx, resourceGroup, cache, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error checking for existing HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - - if !utils.ResponseWasNotFound(resp.Response) { - return tf.ImportAsExistsError("azurerm_hpc_cache_nfs_target", *resp.ID) - } - } - - // Construct parameters - param := &storagecache.StorageTarget{ - BasicStorageTargetProperties: &storagecache.Nfs3TargetProperties{ - Junctions: expandNamespaceJunctions(d.Get("namespace_junction").(*schema.Set).List()), - TargetType: storagecache.TargetTypeNfs3, - Nfs3: &storagecache.Nfs3Target{ - Target: utils.String(d.Get("target_host_name").(string)), - UsageModel: utils.String(d.Get("usage_model").(string)), - }, - }, - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, cache, name, param) - if err != nil { - return fmt.Errorf("Error creating HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for creation of HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - read, err := client.Get(ctx, resourceGroup, cache, name) - if err != nil { - return fmt.Errorf("Error retrieving HPC Cache NFS Target %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if read.ID == nil { - return fmt.Errorf("Error retrieving HPC Cache NFS Target %q (Resource Group %q): `id` was nil", name, resourceGroup) - } - - d.SetId(*read.ID) - - return resourceArmHPCCacheNFSTargetRead(d, meta) -} - -func resourceArmHPCCacheNFSTargetRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.StorageTargetsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parsers.HPCCacheTargetID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] HPC Cache NFS Target %q was not found (Resource Group %q, Cahe %q) - removing from state!", id.Name, id.ResourceGroup, id.Cache) - d.SetId("") - return nil - } - return fmt.Errorf("Error retrieving HPC Cache NFS Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) - } - - d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("cache_name", id.Cache) - - if props := resp.BasicStorageTargetProperties; props != nil { - props, ok := props.AsNfs3TargetProperties() - if !ok { - return fmt.Errorf("The type of this HPC Cache Target %q (Resource Group %q, Cahe %q) is not a NFS Target", id.Name, id.ResourceGroup, id.Cache) - } - if nfs3 := props.Nfs3; nfs3 != nil { - d.Set("target_host_name", nfs3.Target) - d.Set("usage_model", nfs3.UsageModel) - } - if err := d.Set("namespace_junction", flattenNamespaceJunctions(props.Junctions)); err != nil { - return fmt.Errorf(`Error setting "namespace_junction" %q (Resource Group %q, Cahe %q): %+v`, id.Name, id.ResourceGroup, id.Cache, err) - } - } - - return nil -} - -func resourceArmHPCCacheNFSTargetDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HPCCache.StorageTargetsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parsers.HPCCacheTargetID(d.Id()) - if err != nil { - return err - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.Cache, id.Name) - if err != nil { - return fmt.Errorf("Error deleting HPC Cache NFS Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of HPC Cache NFS Target %q (Resource Group %q, Cahe %q): %+v", id.Name, id.ResourceGroup, id.Cache, err) - } - - return nil -} - -func expandNamespaceJunctions(input []interface{}) *[]storagecache.NamespaceJunction { - result := make([]storagecache.NamespaceJunction, 0) - - for _, v := range input { - b := v.(map[string]interface{}) - result = append(result, storagecache.NamespaceJunction{ - NamespacePath: utils.String(b["namespace_path"].(string)), - NfsExport: utils.String(b["nfs_export"].(string)), - TargetPath: utils.String(b["target_path"].(string)), - }) - } - - return &result -} - -func flattenNamespaceJunctions(input *[]storagecache.NamespaceJunction) []interface{} { - if input == nil { - return []interface{}{} - } - - output := make([]interface{}, 0) - - for _, e := range *input { - namespacePath := "" - if v := e.NamespacePath; v != nil { - namespacePath = *v - } - - nfsExport := "" - if v := e.NfsExport; v != nil { - nfsExport = *v - } - - targetPath := "" - if v := e.TargetPath; v != nil { - targetPath = *v - } - - output = append(output, map[string]interface{}{ - "namespace_path": namespacePath, - "nfs_export": nfsExport, - "target_path": targetPath, - }) - } - - return output -} diff --git a/azurerm/internal/services/hpccache/resourceids.go b/azurerm/internal/services/hpccache/resourceids.go new file mode 100644 index 000000000000..7df32a6fec75 --- /dev/null +++ b/azurerm/internal/services/hpccache/resourceids.go @@ -0,0 +1,4 @@ +package hpccache + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cache -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=StorageTarget -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/target1 diff --git a/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_blob_target_test.go b/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_blob_target_test.go deleted file mode 100644 index 40d01b32404e..000000000000 --- a/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_blob_target_test.go +++ /dev/null @@ -1,239 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parsers" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMHPCCacheBlobTarget_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheBlobTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheBlobTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCacheBlobTarget_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheBlobTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheBlobTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCacheBlobTarget_namespace(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCacheBlobTarget_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_blob_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheBlobTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheBlobTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheBlobTargetExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHPCCacheBlobTarget_requiresImport), - }, - }) -} - -func testCheckAzureRMHPCCacheBlobTargetExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("HPC Cache Blob Target not found: %s", resourceName) - } - - id, err := parsers.HPCCacheTargetID(rs.Primary.ID) - if err != nil { - return err - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).HPCCache.StorageTargetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: HPC Cache Blob Target %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("Bad: Get on Storage.StorageTargetsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMHPCCacheBlobTargetDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HPCCache.StorageTargetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_hpc_cache_blob_target" { - continue - } - - id, err := parsers.HPCCacheTargetID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on Storage.StorageTargetsClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMHPCCacheBlobTarget_basic(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheBlobTarget_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_blob_target" "test" { - name = "acctest-HPCCTGT-%s" - resource_group_name = azurerm_resource_group.test.name - cache_name = azurerm_hpc_cache.test.name - storage_container_id = azurerm_storage_container.test.resource_manager_id - namespace_path = "/blob_storage1" -} -`, template, data.RandomString) -} - -func testAccAzureRMHPCCacheBlobTarget_namespace(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheBlobTarget_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_blob_target" "test" { - name = "acctest-HPCCTGT-%s" - resource_group_name = azurerm_resource_group.test.name - cache_name = azurerm_hpc_cache.test.name - storage_container_id = azurerm_storage_container.test.resource_manager_id - namespace_path = "/blob_storage2" -} -`, template, data.RandomString) -} - -func testAccAzureRMHPCCacheBlobTarget_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheBlobTarget_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_blob_target" "import" { - name = azurerm_hpc_cache_blob_target.test.name - resource_group_name = azurerm_hpc_cache_blob_target.test.resource_group_name - cache_name = azurerm_hpc_cache_blob_target.test.cache_name - storage_container_id = azurerm_hpc_cache_blob_target.test.storage_container_id - namespace_path = azurerm_hpc_cache_blob_target.test.namespace_path -} -`, template) -} - -func testAccAzureRMHPCCacheBlobTarget_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-storage-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VN-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsub-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_hpc_cache" "test" { - name = "acctest-HPCC-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cache_size_in_gb = 3072 - subnet_id = azurerm_subnet.test.id - sku_name = "Standard_2G" -} - -data "azuread_service_principal" "test" { - display_name = "HPC Cache Resource Provider" -} - -resource "azurerm_storage_account" "test" { - name = "accteststorgacc%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctest-strgctn-%s" - storage_account_name = azurerm_storage_account.test.name -} - -resource "azurerm_role_assignment" "test_storage_account_contrib" { - scope = azurerm_storage_account.test.id - role_definition_name = "Storage Account Contributor" - principal_id = data.azuread_service_principal.test.object_id -} - -resource "azurerm_role_assignment" "test_storage_blob_data_contrib" { - scope = azurerm_storage_account.test.id - role_definition_name = "Storage Blob Data Contributor" - principal_id = data.azuread_service_principal.test.object_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomString, data.RandomString) -} diff --git a/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_nfs_target_test.go b/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_nfs_target_test.go deleted file mode 100644 index 9d1db760bbd9..000000000000 --- a/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_nfs_target_test.go +++ /dev/null @@ -1,332 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parsers" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMHPCCacheNFSTarget_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheNFSTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheNFSTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCacheNFSTarget_usageModel(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheNFSTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheNFSTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCacheNFSTarget_usageModel(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCacheNFSTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCacheNFSTarget_namespaceJunction(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheNFSTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheNFSTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCacheNFSTarget_namespaceJunction(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCacheNFSTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCacheNFSTarget_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache_nfs_target", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheNFSTargetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCacheNFSTarget_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheNFSTargetExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHPCCacheNFSTarget_requiresImport), - }, - }) -} - -func testCheckAzureRMHPCCacheNFSTargetExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HPCCache.StorageTargetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("HPC Cache NFS Target not found: %s", resourceName) - } - - id, err := parsers.HPCCacheTargetID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: HPC Cache NFS Target %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("Bad: Get on Storage.StorageTargetsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMHPCCacheNFSTargetDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HPCCache.StorageTargetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_hpc_cache_nfs_target" { - continue - } - - id, err := parsers.HPCCacheTargetID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Cache, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on Storage.StorageTargetsClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMHPCCacheNFSTarget_basic(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheNFSTarget_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_nfs_target" "test" { - name = "acctest-HPCCTGT-%s" - resource_group_name = azurerm_resource_group.test.name - cache_name = azurerm_hpc_cache.test.name - target_host_name = azurerm_linux_virtual_machine.test.private_ip_address - usage_model = "READ_HEAVY_INFREQ" - namespace_junction { - namespace_path = "/nfs/a1" - nfs_export = "/export/a" - target_path = "1" - } - namespace_junction { - namespace_path = "/nfs/b" - nfs_export = "/export/b" - } -} -`, template, data.RandomString) -} - -func testAccAzureRMHPCCacheNFSTarget_usageModel(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheNFSTarget_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_nfs_target" "test" { - name = "acctest-HPCCTGT-%s" - resource_group_name = azurerm_resource_group.test.name - cache_name = azurerm_hpc_cache.test.name - target_host_name = azurerm_linux_virtual_machine.test.private_ip_address - usage_model = "WRITE_WORKLOAD_15" - namespace_junction { - namespace_path = "/nfs/a1" - nfs_export = "/export/a" - target_path = "1" - } - namespace_junction { - namespace_path = "/nfs/b" - nfs_export = "/export/b" - } -} -`, template, data.RandomString) -} - -func testAccAzureRMHPCCacheNFSTarget_namespaceJunction(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheNFSTarget_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_nfs_target" "test" { - name = "acctest-HPCCTGT-%s" - resource_group_name = azurerm_resource_group.test.name - cache_name = azurerm_hpc_cache.test.name - target_host_name = azurerm_linux_virtual_machine.test.private_ip_address - usage_model = "WRITE_WORKLOAD_15" - namespace_junction { - namespace_path = "/nfs/a" - nfs_export = "/export/a" - target_path = "" - } -} -`, template, data.RandomString) -} - -func testAccAzureRMHPCCacheNFSTarget_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHPCCacheNFSTarget_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache_nfs_target" "import" { - name = azurerm_hpc_cache_nfs_target.test.name - resource_group_name = azurerm_hpc_cache_nfs_target.test.resource_group_name - cache_name = azurerm_hpc_cache_nfs_target.test.cache_name - target_host_name = azurerm_hpc_cache_nfs_target.test.target_host_name - usage_model = azurerm_hpc_cache_nfs_target.test.usage_model - namespace_junction { - namespace_path = "/nfs/a1" - nfs_export = "/export/a" - target_path = "1" - } - namespace_junction { - namespace_path = "/nfs/b" - nfs_export = "/export/b" - } -} -`, template) -} - -func testAccAzureRMHPCCacheNFSTarget_template(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_subnet" "testvm" { - name = "acctest-sub-vm-%[2]s" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.3.0/24" -} - -resource "azurerm_network_interface" "test" { - name = "acctest-nic-%[2]s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "internal" - subnet_id = azurerm_subnet.testvm.id - private_ip_address_allocation = "Dynamic" - } -} - -locals { - custom_data = < /etc/exports -/export/a *(rw,fsid=0,insecure,no_subtree_check,async) -/export/b *(rw,fsid=0,insecure,no_subtree_check,async) -EOF -systemctl start nfs-server -exportfs -arv -CUSTOM_DATA -} - -resource "azurerm_linux_virtual_machine" "test" { - name = "acctest-vm-%[2]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - size = "Standard_F2" - admin_username = "adminuser" - admin_password = "P@$$w0rd1234!" - disable_password_authentication = false - - network_interface_ids = [ - azurerm_network_interface.test.id, - ] - - os_disk { - caching = "ReadWrite" - storage_account_type = "Standard_LRS" - } - - source_image_reference { - publisher = "Canonical" - offer = "UbuntuServer" - sku = "18.04-LTS" - version = "latest" - } - - custom_data = base64encode(local.custom_data) -} - -`, testAccAzureRMHPCCache_basic(data), data.RandomString) -} diff --git a/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_test.go b/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_test.go deleted file mode 100644 index c75d0936fe04..000000000000 --- a/azurerm/internal/services/hpccache/tests/resource_arm_hpc_cache_test.go +++ /dev/null @@ -1,266 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMHPCCache_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCache_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCache_mtu(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCache_mtu(data, 1000), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCache_mtu(data, 1500), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCache_mtu(data, 1000), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCache_rootSquash(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCache_rootSquash(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCache_rootSquash(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMHPCCache_rootSquash(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "mount_addresses.#"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMHPCCache_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_hpc_cache", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMHPCCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMHPCCache_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMHPCCacheExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMHPCCahce_requiresImport), - }, - }) -} - -func testCheckAzureRMHPCCacheExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).HPCCache.CachesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on storageCacheCachesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: HPC Cache %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMHPCCacheDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).HPCCache.CachesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_hpc_cache" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("HPC Cache still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMHPCCache_basic(data acceptance.TestData) string { - template := testAccAzureRMHPCCache_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache" "test" { - name = "acctest-HPCC-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cache_size_in_gb = 3072 - subnet_id = azurerm_subnet.test.id - sku_name = "Standard_2G" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMHPCCahce_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMHPCCache_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache" "import" { - name = azurerm_hpc_cache.test.name - resource_group_name = azurerm_hpc_cache.test.resource_group_name - location = azurerm_hpc_cache.test.location - cache_size_in_gb = azurerm_hpc_cache.test.cache_size_in_gb - subnet_id = azurerm_hpc_cache.test.subnet_id - sku_name = azurerm_hpc_cache.test.sku_name -} -`, template) -} - -func testAccAzureRMHPCCache_mtu(data acceptance.TestData, mtu int) string { - template := testAccAzureRMHPCCache_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache" "test" { - name = "acctest-HPCC-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cache_size_in_gb = 3072 - subnet_id = azurerm_subnet.test.id - sku_name = "Standard_2G" - mtu = %d -} -`, template, data.RandomInteger, mtu) -} - -func testAccAzureRMHPCCache_rootSquash(data acceptance.TestData, enable bool) string { - template := testAccAzureRMHPCCache_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_hpc_cache" "test" { - name = "acctest-HPCC-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cache_size_in_gb = 3072 - subnet_id = azurerm_subnet.test.id - sku_name = "Standard_2G" - root_squash_enabled = %t -} -`, template, data.RandomInteger, enable) -} - -func testAccAzureRMHPCCache_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-storage-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VN-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsub-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/hpccache/validate/cache_id.go b/azurerm/internal/services/hpccache/validate/cache_id.go new file mode 100644 index 000000000000..91e57b90b02a --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/cache_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" +) + +func CacheID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CacheID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/hpccache/validate/cache_id_test.go b/azurerm/internal/services/hpccache/validate/cache_id_test.go new file mode 100644 index 000000000000..538506ca83e7 --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/cache_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCacheID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.STORAGECACHE/CACHES/CACHE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CacheID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/hpccache/validate/cache_namespace_path.go b/azurerm/internal/services/hpccache/validate/cache_namespace_path.go new file mode 100644 index 000000000000..7698e04ea5b5 --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/cache_namespace_path.go @@ -0,0 +1,5 @@ +package validate + +func CacheNamespacePath(i interface{}, k string) (warnings []string, errs []error) { + return absolutePath(i, k) +} diff --git a/azurerm/internal/services/hpccache/validate/cache_nfs_export.go b/azurerm/internal/services/hpccache/validate/cache_nfs_export.go new file mode 100644 index 000000000000..97a0e955bd63 --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/cache_nfs_export.go @@ -0,0 +1,5 @@ +package validate + +func CacheNFSExport(i interface{}, k string) (warnings []string, errs []error) { + return absolutePath(i, k) +} diff --git a/azurerm/internal/services/hpccache/validate/cache_nfs_target_path.go b/azurerm/internal/services/hpccache/validate/cache_nfs_target_path.go new file mode 100644 index 000000000000..5e33499a68d4 --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/cache_nfs_target_path.go @@ -0,0 +1,5 @@ +package validate + +func CacheNFSTargetPath(i interface{}, k string) (warnings []string, errs []error) { + return relativePath(i, k) +} diff --git a/azurerm/internal/services/hpccache/validate/hpc_cache_target.go b/azurerm/internal/services/hpccache/validate/hpc_cache_target.go deleted file mode 100644 index ebb4be25e617..000000000000 --- a/azurerm/internal/services/hpccache/validate/hpc_cache_target.go +++ /dev/null @@ -1,60 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - "strings" -) - -func HPCCacheTargetName(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - exp := `^[-0-9a-zA-Z_]{1,31}$` - p := regexp.MustCompile(exp) - if !p.MatchString(v) { - errors = append(errors, fmt.Errorf(`cache target name doesn't comply with regexp: "%s"`, exp)) - } - - return warnings, errors -} - -func HPCCacheNamespacePath(i interface{}, k string) (warnings []string, errs []error) { - return absolutePath(i, k) -} - -func HPCCacheNFSExport(i interface{}, k string) (warnings []string, errs []error) { - return absolutePath(i, k) -} - -func HPCCacheNFSTargetPath(i interface{}, k string) (warnings []string, errs []error) { - return relativePath(i, k) -} - -func absolutePath(i interface{}, k string) (warnings []string, errs []error) { - v, ok := i.(string) - if !ok { - errs = append(errs, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if !strings.HasPrefix(v, "/") { - errs = append(errs, fmt.Errorf(`%s path should start with "/"`, k)) - } - return warnings, errs -} - -func relativePath(i interface{}, k string) (warnings []string, errs []error) { - v, ok := i.(string) - if !ok { - errs = append(errs, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if strings.HasPrefix(v, "/") { - errs = append(errs, fmt.Errorf(`%s path should not start with "/"`, k)) - } - return warnings, errs -} diff --git a/azurerm/internal/services/hpccache/validate/internal.go b/azurerm/internal/services/hpccache/validate/internal.go new file mode 100644 index 000000000000..667041afb6de --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/internal.go @@ -0,0 +1,32 @@ +package validate + +import ( + "fmt" + "strings" +) + +func absolutePath(i interface{}, k string) (warnings []string, errs []error) { + v, ok := i.(string) + if !ok { + errs = append(errs, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if !strings.HasPrefix(v, "/") { + errs = append(errs, fmt.Errorf(`%s path should start with "/"`, k)) + } + return warnings, errs +} + +func relativePath(i interface{}, k string) (warnings []string, errs []error) { + v, ok := i.(string) + if !ok { + errs = append(errs, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if strings.HasPrefix(v, "/") { + errs = append(errs, fmt.Errorf(`%s path should not start with "/"`, k)) + } + return warnings, errs +} diff --git a/azurerm/internal/services/hpccache/validate/storage_target_id.go b/azurerm/internal/services/hpccache/validate/storage_target_id.go new file mode 100644 index 000000000000..4809a5cbfa4d --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/storage_target_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hpccache/parse" +) + +func StorageTargetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.StorageTargetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/hpccache/validate/storage_target_id_test.go b/azurerm/internal/services/hpccache/validate/storage_target_id_test.go new file mode 100644 index 000000000000..4f25f9bc2c43 --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/storage_target_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestStorageTargetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing CacheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/", + Valid: false, + }, + + { + // missing value for CacheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.StorageCache/caches/cache1/storageTargets/target1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.STORAGECACHE/CACHES/CACHE1/STORAGETARGETS/TARGET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := StorageTargetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/hpccache/validate/storage_target_name.go b/azurerm/internal/services/hpccache/validate/storage_target_name.go new file mode 100644 index 000000000000..e5d34e32a6ae --- /dev/null +++ b/azurerm/internal/services/hpccache/validate/storage_target_name.go @@ -0,0 +1,22 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func StorageTargetName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + exp := `^[-0-9a-zA-Z_]{1,31}$` + p := regexp.MustCompile(exp) + if !p.MatchString(v) { + // TODO: make this error message less user hostile + errors = append(errors, fmt.Errorf(`cache target name doesn't comply with regexp: "%s"`, exp)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/hsm/dedicated_hardware_security_module_resource.go b/azurerm/internal/services/hsm/dedicated_hardware_security_module_resource.go new file mode 100644 index 000000000000..92813656c8d3 --- /dev/null +++ b/azurerm/internal/services/hsm/dedicated_hardware_security_module_resource.go @@ -0,0 +1,323 @@ +package hsm + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/preview/hardwaresecuritymodules/mgmt/2018-10-31-preview/hardwaresecuritymodules" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/validate" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDedicatedHardwareSecurityModule() *schema.Resource { + return &schema.Resource{ + Create: resourceDedicatedHardwareSecurityModuleCreate, + Read: resourceDedicatedHardwareSecurityModuleRead, + Update: resourceDedicatedHardwareSecurityModuleUpdate, + Delete: resourceDedicatedHardwareSecurityModuleDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DedicatedHardwareSecurityModuleID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DedicatedHardwareSecurityModuleName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "sku_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(hardwaresecuritymodules.SafeNetLunaNetworkHSMA790), + }, false), + }, + + "network_profile": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "network_interface_private_ip_addresses": { + Type: schema.TypeSet, + Required: true, + ForceNew: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: azValidate.IPv4Address, + }, + }, + + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: networkValidate.SubnetID, + }, + }, + }, + }, + + "stamp_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + "stamp1", + "stamp2", + }, false), + }, + + "zones": azure.SchemaZones(), + + "tags": tags.Schema(), + }, + } +} + +func resourceDedicatedHardwareSecurityModuleCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HSM.DedicatedHsmClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_dedicated_hardware_security_module", *existing.ID) + } + + parameters := hardwaresecuritymodules.DedicatedHsm{ + Location: utils.String(location.Normalize(d.Get("location").(string))), + DedicatedHsmProperties: &hardwaresecuritymodules.DedicatedHsmProperties{ + NetworkProfile: expandDedicatedHsmNetworkProfile(d.Get("network_profile").([]interface{})), + }, + Sku: &hardwaresecuritymodules.Sku{ + Name: hardwaresecuritymodules.Name(d.Get("sku_name").(string)), + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + if v, ok := d.GetOk("stamp_id"); ok { + parameters.DedicatedHsmProperties.StampID = utils.String(v.(string)) + } + + if v, ok := d.GetOk("zones"); ok { + parameters.Zones = azure.ExpandZones(v.([]interface{})) + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) + if err != nil { + return fmt.Errorf("creating Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating future for Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("retrieving Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Dedicated Hardware Security Module %q (Resource Group %q) ID", name, resourceGroup) + } + + d.SetId(*resp.ID) + return resourceDedicatedHardwareSecurityModuleRead(d, meta) +} + +func resourceDedicatedHardwareSecurityModuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HSM.DedicatedHsmClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DedicatedHardwareSecurityModuleID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.DedicatedHSMName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Dedicated Hardware Security Module %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Dedicate Hardware Security Module %q (Resource Group %q): %+v", id.DedicatedHSMName, id.ResourceGroup, err) + } + + d.Set("name", id.DedicatedHSMName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if props := resp.DedicatedHsmProperties; props != nil { + if err := d.Set("network_profile", flattenDedicatedHsmNetworkProfile(props.NetworkProfile)); err != nil { + return fmt.Errorf("setting network_profile: %+v", err) + } + d.Set("stamp_id", props.StampID) + } + + if sku := resp.Sku; sku != nil { + d.Set("sku_name", sku.Name) + } + + if err := d.Set("zones", resp.Zones); err != nil { + return fmt.Errorf("setting `zones`: %+v", err) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceDedicatedHardwareSecurityModuleUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HSM.DedicatedHsmClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DedicatedHardwareSecurityModuleID(d.Id()) + if err != nil { + return err + } + + parameters := hardwaresecuritymodules.DedicatedHsmPatchParameters{} + if d.HasChange("tags") { + parameters.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + future, err := client.Update(ctx, id.ResourceGroup, id.DedicatedHSMName, parameters) + if err != nil { + return fmt.Errorf("updating Dedicate Hardware Security Module %q (Resource Group %q): %+v", id.DedicatedHSMName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on updating future for Dedicate Hardware Security Module %q (Resource Group %q): %+v", id.DedicatedHSMName, id.ResourceGroup, err) + } + + return resourceDedicatedHardwareSecurityModuleRead(d, meta) +} + +func resourceDedicatedHardwareSecurityModuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).HSM.DedicatedHsmClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DedicatedHardwareSecurityModuleID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.DedicatedHSMName) + if err != nil { + return fmt.Errorf("deleting Dedicated Hardware Security Module %q (Resource Group %q): %+v", id.DedicatedHSMName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on deleting future for Dedicated Hardware Security Module %q (Resource Group %q): %+v", id.DedicatedHSMName, id.ResourceGroup, err) + } + + return nil +} + +func expandDedicatedHsmNetworkProfile(input []interface{}) *hardwaresecuritymodules.NetworkProfile { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + result := hardwaresecuritymodules.NetworkProfile{ + Subnet: &hardwaresecuritymodules.APIEntityReference{ + ID: utils.String(v["subnet_id"].(string)), + }, + NetworkInterfaces: expandDedicatedHsmNetworkInterfacePrivateIPAddresses(v["network_interface_private_ip_addresses"].(*schema.Set).List()), + } + + return &result +} + +func expandDedicatedHsmNetworkInterfacePrivateIPAddresses(input []interface{}) *[]hardwaresecuritymodules.NetworkInterface { + results := make([]hardwaresecuritymodules.NetworkInterface, 0) + + for _, item := range input { + if item != nil { + result := hardwaresecuritymodules.NetworkInterface{ + PrivateIPAddress: utils.String(item.(string)), + } + + results = append(results, result) + } + } + + return &results +} + +func flattenDedicatedHsmNetworkProfile(input *hardwaresecuritymodules.NetworkProfile) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + var subnetId string + if input.Subnet != nil && input.Subnet.ID != nil { + subnetId = *input.Subnet.ID + } + + return []interface{}{ + map[string]interface{}{ + "network_interface_private_ip_addresses": flattenDedicatedHsmNetworkInterfacePrivateIPAddresses(input.NetworkInterfaces), + "subnet_id": subnetId, + }, + } +} + +func flattenDedicatedHsmNetworkInterfacePrivateIPAddresses(input *[]hardwaresecuritymodules.NetworkInterface) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + for _, item := range *input { + if item.PrivateIPAddress != nil { + results = append(results, *item.PrivateIPAddress) + } + } + + return results +} diff --git a/azurerm/internal/services/hsm/dedicated_hardware_security_module_resource_test.go b/azurerm/internal/services/hsm/dedicated_hardware_security_module_resource_test.go new file mode 100644 index 000000000000..dcfc8ed42563 --- /dev/null +++ b/azurerm/internal/services/hsm/dedicated_hardware_security_module_resource_test.go @@ -0,0 +1,245 @@ +package hsm_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DedicatedHardwareSecurityModuleResource struct { +} + +func TestAccDedicatedHardwareSecurityModule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") + r := DedicatedHardwareSecurityModuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDedicatedHardwareSecurityModule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") + r := DedicatedHardwareSecurityModuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDedicatedHardwareSecurityModule_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") + r := DedicatedHardwareSecurityModuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDedicatedHardwareSecurityModule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") + r := DedicatedHardwareSecurityModuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r DedicatedHardwareSecurityModuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_hardware_security_module" "import" { + name = azurerm_dedicated_hardware_security_module.test.name + resource_group_name = azurerm_dedicated_hardware_security_module.test.resource_group_name + location = azurerm_dedicated_hardware_security_module.test.location + sku_name = azurerm_dedicated_hardware_security_module.test.sku_name + stamp_id = azurerm_dedicated_hardware_security_module.test.stamp_id + + network_profile { + network_interface_private_ip_addresses = azurerm_dedicated_hardware_security_module.test.network_profile[0].network_interface_private_ip_addresses + subnet_id = azurerm_dedicated_hardware_security_module.test.network_profile[0].subnet_id + } +} +`, r.basic(data)) +} + +func (DedicatedHardwareSecurityModuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DedicatedHardwareSecurityModuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.HSM.DedicatedHsmClient.Get(ctx, id.ResourceGroup, id.DedicatedHSMName) + if err != nil { + return nil, fmt.Errorf("retrieving Dedicated HardwareSecurityModule %q (resource group: %q): %+v", id.DedicatedHSMName, id.ResourceGroup, err) + } + + return utils.Bool(resp.DedicatedHsmProperties != nil), nil +} + +func (DedicatedHardwareSecurityModuleResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-hsm-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-vnet-%d" + address_space = ["10.2.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-computesubnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.2.0.0/24"] +} + +resource "azurerm_subnet" "test2" { + name = "acctest-hsmsubnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.2.1.0/24"] + + delegation { + name = "first" + + service_delegation { + name = "Microsoft.HardwareSecurityModules/dedicatedHSMs" + + actions = [ + "Microsoft.Network/networkinterfaces/*", + "Microsoft.Network/virtualNetworks/subnets/join/action", + ] + } + } +} + +resource "azurerm_subnet" "test3" { + name = "gatewaysubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.2.255.0/26"] +} + +resource "azurerm_public_ip" "test" { + name = "acctest-pip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" +} + +resource "azurerm_virtual_network_gateway" "test" { + name = "acctest-vnetgateway-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + type = "ExpressRoute" + vpn_type = "PolicyBased" + sku = "Standard" + + ip_configuration { + public_ip_address_id = azurerm_public_ip.test.id + private_ip_address_allocation = "Dynamic" + subnet_id = azurerm_subnet.test3.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DedicatedHardwareSecurityModuleResource) basic(data acceptance.TestData) string { + template := DedicatedHardwareSecurityModuleResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_hardware_security_module" "test" { + name = "acctest-hsm-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "SafeNet Luna Network HSM A790" + + network_profile { + network_interface_private_ip_addresses = ["10.2.1.8"] + subnet_id = azurerm_subnet.test2.id + } + + stamp_id = "stamp2" + + depends_on = [azurerm_virtual_network_gateway.test] +} +`, template, data.RandomString) +} + +func (DedicatedHardwareSecurityModuleResource) complete(data acceptance.TestData) string { + template := DedicatedHardwareSecurityModuleResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_dedicated_hardware_security_module" "test" { + name = "acctest-hsm-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "SafeNet Luna Network HSM A790" + + network_profile { + network_interface_private_ip_addresses = ["10.2.1.8"] + subnet_id = azurerm_subnet.test2.id + } + + stamp_id = "stamp2" + + tags = { + env = "Test" + } + + depends_on = [azurerm_virtual_network_gateway.test] +} +`, template, data.RandomString) +} diff --git a/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module.go b/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module.go index eb66e1812f3c..b8ca2f915a7e 100644 --- a/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module.go +++ b/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module.go @@ -1,27 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type DedicatedHardwareSecurityModuleId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + DedicatedHSMName string +} + +func NewDedicatedHardwareSecurityModuleID(subscriptionId, resourceGroup, dedicatedHSMName string) DedicatedHardwareSecurityModuleId { + return DedicatedHardwareSecurityModuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DedicatedHSMName: dedicatedHSMName, + } +} + +func (id DedicatedHardwareSecurityModuleId) String() string { + segments := []string{ + fmt.Sprintf("Dedicated H S M Name %q", id.DedicatedHSMName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Dedicated Hardware Security Module", segmentsStr) } +func (id DedicatedHardwareSecurityModuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DedicatedHSMName) +} + +// DedicatedHardwareSecurityModuleID parses a DedicatedHardwareSecurityModule ID into an DedicatedHardwareSecurityModuleId struct func DedicatedHardwareSecurityModuleID(input string) (*DedicatedHardwareSecurityModuleId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing DedicatedHardwareSecurityModule ID %q: %+v", input, err) + return nil, err + } + + resourceId := DedicatedHardwareSecurityModuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - dedicatedHardwareSecurityModule := DedicatedHardwareSecurityModuleId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if dedicatedHardwareSecurityModule.Name, err = id.PopSegment("dedicatedHSMs"); err != nil { + if resourceId.DedicatedHSMName, err = id.PopSegment("dedicatedHSMs"); err != nil { return nil, err } @@ -29,5 +65,5 @@ func DedicatedHardwareSecurityModuleID(input string) (*DedicatedHardwareSecurity return nil, err } - return &dedicatedHardwareSecurityModule, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module_test.go b/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module_test.go index c60b822bf3ff..58d39f0546a5 100644 --- a/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module_test.go +++ b/azurerm/internal/services/hsm/parse/dedicated_hardware_security_module_test.go @@ -1,72 +1,112 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = DedicatedHardwareSecurityModuleId{} + +func TestDedicatedHardwareSecurityModuleIDFormatter(t *testing.T) { + actual := NewDedicatedHardwareSecurityModuleID("12345678-1234-9876-4563-123456789012", "group1", "hsm1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/hsm1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestDedicatedHardwareSecurityModuleID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *DedicatedHardwareSecurityModuleId }{ + { - Name: "Empty", - Input: "", - Expected: nil, + // empty + Input: "", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing SubscriptionId + Input: "/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "Missing DedicatedHardwareSecurityModule Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "DedicatedHardwareSecurityModule ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/hsm1", + // missing DedicatedHSMName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/", + Error: true, + }, + + { + // missing value for DedicatedHSMName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/hsm1", Expected: &DedicatedHardwareSecurityModuleId{ - ResourceGroup: "group1", - Name: "hsm1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + DedicatedHSMName: "hsm1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/DedicatedHSMs/hsm1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.HARDWARESECURITYMODULES/DEDICATEDHSMS/HSM1", + Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := DedicatedHardwareSecurityModuleID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if actual.DedicatedHSMName != v.Expected.DedicatedHSMName { + t.Fatalf("Expected %q but got %q for DedicatedHSMName", v.Expected.DedicatedHSMName, actual.DedicatedHSMName) } } } diff --git a/azurerm/internal/services/hsm/registration.go b/azurerm/internal/services/hsm/registration.go index c97f8449eb77..dc6796e52f57 100644 --- a/azurerm/internal/services/hsm/registration.go +++ b/azurerm/internal/services/hsm/registration.go @@ -24,6 +24,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_dedicated_hardware_security_module": resourceArmDedicatedHardwareSecurityModule(), + "azurerm_dedicated_hardware_security_module": resourceDedicatedHardwareSecurityModule(), } } diff --git a/azurerm/internal/services/hsm/resource_arm_dedicated_hardware_security_module.go b/azurerm/internal/services/hsm/resource_arm_dedicated_hardware_security_module.go deleted file mode 100644 index e0df7290f428..000000000000 --- a/azurerm/internal/services/hsm/resource_arm_dedicated_hardware_security_module.go +++ /dev/null @@ -1,323 +0,0 @@ -package hsm - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/preview/hardwaresecuritymodules/mgmt/2018-10-31-preview/hardwaresecuritymodules" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/validate" - networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDedicatedHardwareSecurityModule() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDedicatedHardwareSecurityModuleCreate, - Read: resourceArmDedicatedHardwareSecurityModuleRead, - Update: resourceArmDedicatedHardwareSecurityModuleUpdate, - Delete: resourceArmDedicatedHardwareSecurityModuleDelete, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.DedicatedHardwareSecurityModuleID(id) - return err - }), - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DedicatedHardwareSecurityModuleName, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "location": azure.SchemaLocation(), - - "sku_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - string(hardwaresecuritymodules.SafeNetLunaNetworkHSMA790), - }, false), - }, - - "network_profile": { - Type: schema.TypeList, - Required: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "network_interface_private_ip_addresses": { - Type: schema.TypeSet, - Required: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: azValidate.IPv4Address, - }, - }, - - "subnet_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: networkValidate.SubnetID, - }, - }, - }, - }, - - "stamp_id": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - "stamp1", - "stamp2", - }, false), - }, - - "zones": azure.SchemaZones(), - - "tags": tags.Schema(), - }, - } -} - -func resourceArmDedicatedHardwareSecurityModuleCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HSM.DedicatedHsmClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_dedicated_hardware_security_module", *existing.ID) - } - - parameters := hardwaresecuritymodules.DedicatedHsm{ - Location: utils.String(location.Normalize(d.Get("location").(string))), - DedicatedHsmProperties: &hardwaresecuritymodules.DedicatedHsmProperties{ - NetworkProfile: expandArmDedicatedHsmNetworkProfile(d.Get("network_profile").([]interface{})), - }, - Sku: &hardwaresecuritymodules.Sku{ - Name: hardwaresecuritymodules.Name(d.Get("sku_name").(string)), - }, - Tags: tags.Expand(d.Get("tags").(map[string]interface{})), - } - - if v, ok := d.GetOk("stamp_id"); ok { - parameters.DedicatedHsmProperties.StampID = utils.String(v.(string)) - } - - if v, ok := d.GetOk("zones"); ok { - parameters.Zones = azure.ExpandZones(v.([]interface{})) - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) - if err != nil { - return fmt.Errorf("creating Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting on creating future for Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("retrieving Dedicated Hardware Security Module %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("empty or nil ID returned for Dedicated Hardware Security Module %q (Resource Group %q) ID", name, resourceGroup) - } - - d.SetId(*resp.ID) - return resourceArmDedicatedHardwareSecurityModuleRead(d, meta) -} - -func resourceArmDedicatedHardwareSecurityModuleRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HSM.DedicatedHsmClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DedicatedHardwareSecurityModuleID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Dedicated Hardware Security Module %q does not exist - removing from state", d.Id()) - d.SetId("") - return nil - } - - return fmt.Errorf("retrieving Dedicate Hardware Security Module %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("location", location.NormalizeNilable(resp.Location)) - - if props := resp.DedicatedHsmProperties; props != nil { - if err := d.Set("network_profile", flattenArmDedicatedHsmNetworkProfile(props.NetworkProfile)); err != nil { - return fmt.Errorf("setting network_profile: %+v", err) - } - d.Set("stamp_id", props.StampID) - } - - if sku := resp.Sku; sku != nil { - d.Set("sku_name", sku.Name) - } - - if err := d.Set("zones", resp.Zones); err != nil { - return fmt.Errorf("setting `zones`: %+v", err) - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmDedicatedHardwareSecurityModuleUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HSM.DedicatedHsmClient - ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DedicatedHardwareSecurityModuleID(d.Id()) - if err != nil { - return err - } - - parameters := hardwaresecuritymodules.DedicatedHsmPatchParameters{} - if d.HasChange("tags") { - parameters.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) - } - - future, err := client.Update(ctx, id.ResourceGroup, id.Name, parameters) - if err != nil { - return fmt.Errorf("updating Dedicate Hardware Security Module %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting on updating future for Dedicate Hardware Security Module %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - return resourceArmDedicatedHardwareSecurityModuleRead(d, meta) -} - -func resourceArmDedicatedHardwareSecurityModuleDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).HSM.DedicatedHsmClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.DedicatedHardwareSecurityModuleID(d.Id()) - if err != nil { - return err - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("deleting Dedicated Hardware Security Module %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting on deleting future for Dedicated Hardware Security Module %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - return nil -} - -func expandArmDedicatedHsmNetworkProfile(input []interface{}) *hardwaresecuritymodules.NetworkProfile { - if len(input) == 0 { - return nil - } - - v := input[0].(map[string]interface{}) - - result := hardwaresecuritymodules.NetworkProfile{ - Subnet: &hardwaresecuritymodules.APIEntityReference{ - ID: utils.String(v["subnet_id"].(string)), - }, - NetworkInterfaces: expandArmDedicatedHsmNetworkInterfacePrivateIPAddresses(v["network_interface_private_ip_addresses"].(*schema.Set).List()), - } - - return &result -} - -func expandArmDedicatedHsmNetworkInterfacePrivateIPAddresses(input []interface{}) *[]hardwaresecuritymodules.NetworkInterface { - results := make([]hardwaresecuritymodules.NetworkInterface, 0) - - for _, item := range input { - if item != nil { - result := hardwaresecuritymodules.NetworkInterface{ - PrivateIPAddress: utils.String(item.(string)), - } - - results = append(results, result) - } - } - - return &results -} - -func flattenArmDedicatedHsmNetworkProfile(input *hardwaresecuritymodules.NetworkProfile) []interface{} { - if input == nil { - return make([]interface{}, 0) - } - - var subnetId string - if input.Subnet != nil && input.Subnet.ID != nil { - subnetId = *input.Subnet.ID - } - - return []interface{}{ - map[string]interface{}{ - "network_interface_private_ip_addresses": flattenArmDedicatedHsmNetworkInterfacePrivateIPAddresses(input.NetworkInterfaces), - "subnet_id": subnetId, - }, - } -} - -func flattenArmDedicatedHsmNetworkInterfacePrivateIPAddresses(input *[]hardwaresecuritymodules.NetworkInterface) []interface{} { - results := make([]interface{}, 0) - if input == nil { - return results - } - - for _, item := range *input { - if item.PrivateIPAddress != nil { - results = append(results, *item.PrivateIPAddress) - } - } - - return results -} diff --git a/azurerm/internal/services/hsm/resourceids.go b/azurerm/internal/services/hsm/resourceids.go new file mode 100644 index 000000000000..378ff1bd5837 --- /dev/null +++ b/azurerm/internal/services/hsm/resourceids.go @@ -0,0 +1,3 @@ +package hsm + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DedicatedHardwareSecurityModule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/hsm1 diff --git a/azurerm/internal/services/hsm/tests/resource_arm_dedicated_hardware_security_module_test.go b/azurerm/internal/services/hsm/tests/resource_arm_dedicated_hardware_security_module_test.go deleted file mode 100644 index 03b34c35ae80..000000000000 --- a/azurerm/internal/services/hsm/tests/resource_arm_dedicated_hardware_security_module_test.go +++ /dev/null @@ -1,287 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMDedicatedHardwareSecurityModule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDedicatedHardwareSecurityModuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDedicatedHardwareSecurityModule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDedicatedHardwareSecurityModuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDedicatedHardwareSecurityModule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDedicatedHardwareSecurityModuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDedicatedHardwareSecurityModule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDedicatedHardwareSecurityModuleExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMDedicatedHardwareSecurityModule_requiresImport), - }, - }) -} - -func TestAccAzureRMDedicatedHardwareSecurityModule_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDedicatedHardwareSecurityModuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDedicatedHardwareSecurityModule_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDedicatedHardwareSecurityModuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMDedicatedHardwareSecurityModule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_dedicated_hardware_security_module", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDedicatedHardwareSecurityModuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDedicatedHardwareSecurityModule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDedicatedHardwareSecurityModuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDedicatedHardwareSecurityModule_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDedicatedHardwareSecurityModuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMDedicatedHardwareSecurityModule_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMDedicatedHardwareSecurityModule_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dedicated_hardware_security_module" "import" { - name = azurerm_dedicated_hardware_security_module.test.name - resource_group_name = azurerm_dedicated_hardware_security_module.test.resource_group_name - location = azurerm_dedicated_hardware_security_module.test.location - sku_name = azurerm_dedicated_hardware_security_module.test.sku_name - stamp_id = azurerm_dedicated_hardware_security_module.test.stamp_id - - network_profile { - network_interface_private_ip_addresses = azurerm_dedicated_hardware_security_module.test.network_profile[0].network_interface_private_ip_addresses - subnet_id = azurerm_dedicated_hardware_security_module.test.network_profile[0].subnet_id - } -} -`, config) -} - -func testCheckAzureRMDedicatedHardwareSecurityModuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HSM.DedicatedHsmClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("dedicated hardware security module not found: %s", resourceName) - } - id, err := parse.DedicatedHardwareSecurityModuleID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Dedicated HardwareSecurityModule %q does not exist", id.Name) - } - return fmt.Errorf("bad: Get on HardwareSecurityModules.DedicatedHsmClient: %+v", err) - } - return nil - } -} - -func testCheckAzureRMDedicatedHardwareSecurityModuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).HSM.DedicatedHsmClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_dedicated_hardware_security_module" { - continue - } - id, err := parse.DedicatedHardwareSecurityModuleID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Get on HardwareSecurityModules.DedicatedHsmClient: %+v", err) - } - } - return nil - } - return nil -} - -func testAccAzureRMDedicatedHardwareSecurityModule_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-hsm-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-vnet-%d" - address_space = ["10.2.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctest-computesubnet-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.2.0.0/24"] -} - -resource "azurerm_subnet" "test2" { - name = "acctest-hsmsubnet-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.2.1.0/24"] - - delegation { - name = "first" - - service_delegation { - name = "Microsoft.HardwareSecurityModules/dedicatedHSMs" - - actions = [ - "Microsoft.Network/networkinterfaces/*", - "Microsoft.Network/virtualNetworks/subnets/join/action", - ] - } - } -} - -resource "azurerm_subnet" "test3" { - name = "gatewaysubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.2.255.0/26"] -} - -resource "azurerm_public_ip" "test" { - name = "acctest-pip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" -} - -resource "azurerm_virtual_network_gateway" "test" { - name = "acctest-vnetgateway-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - type = "ExpressRoute" - vpn_type = "PolicyBased" - sku = "Standard" - - ip_configuration { - public_ip_address_id = azurerm_public_ip.test.id - private_ip_address_allocation = "Dynamic" - subnet_id = azurerm_subnet.test3.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMDedicatedHardwareSecurityModule_basic(data acceptance.TestData) string { - template := testAccAzureRMDedicatedHardwareSecurityModule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dedicated_hardware_security_module" "test" { - name = "acctest-hsm-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "SafeNet Luna Network HSM A790" - - network_profile { - network_interface_private_ip_addresses = ["10.2.1.8"] - subnet_id = azurerm_subnet.test2.id - } - - stamp_id = "stamp2" - - depends_on = [azurerm_virtual_network_gateway.test] -} -`, template, data.RandomString) -} - -func testAccAzureRMDedicatedHardwareSecurityModule_complete(data acceptance.TestData) string { - template := testAccAzureRMDedicatedHardwareSecurityModule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_dedicated_hardware_security_module" "test" { - name = "acctest-hsm-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "SafeNet Luna Network HSM A790" - - network_profile { - network_interface_private_ip_addresses = ["10.2.1.8"] - subnet_id = azurerm_subnet.test2.id - } - - stamp_id = "stamp2" - - tags = { - env = "Test" - } - - depends_on = [azurerm_virtual_network_gateway.test] -} -`, template, data.RandomString) -} diff --git a/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_id.go b/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_id.go new file mode 100644 index 000000000000..9dcb5b32f901 --- /dev/null +++ b/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/hsm/parse" +) + +func DedicatedHardwareSecurityModuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DedicatedHardwareSecurityModuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_id_test.go b/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_id_test.go new file mode 100644 index 000000000000..23a7d6c5c14d --- /dev/null +++ b/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDedicatedHardwareSecurityModuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing DedicatedHSMName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/", + Valid: false, + }, + + { + // missing value for DedicatedHSMName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.HardwareSecurityModules/dedicatedHSMs/hsm1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.HARDWARESECURITYMODULES/DEDICATEDHSMS/HSM1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DedicatedHardwareSecurityModuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module.go b/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_name.go similarity index 100% rename from azurerm/internal/services/hsm/validate/dedicated_hardware_security_module.go rename to azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_name.go diff --git a/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_test.go b/azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_name_test.go similarity index 100% rename from azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_test.go rename to azurerm/internal/services/hsm/validate/dedicated_hardware_security_module_name_test.go diff --git a/azurerm/internal/services/iotcentral/iotcentral_application_resource.go b/azurerm/internal/services/iotcentral/iotcentral_application_resource.go index f3c50245e07d..fb8cc2e56418 100644 --- a/azurerm/internal/services/iotcentral/iotcentral_application_resource.go +++ b/azurerm/internal/services/iotcentral/iotcentral_application_resource.go @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotCentralApplication() *schema.Resource { +func resourceIotCentralApplication() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotCentralAppCreate, - Read: resourceArmIotCentralAppRead, - Update: resourceArmIotCentralAppUpdate, - Delete: resourceArmIotCentralAppDelete, + Create: resourceIotCentralAppCreate, + Read: resourceIotCentralAppRead, + Update: resourceIotCentralAppUpdate, + Delete: resourceIotCentralAppDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -41,7 +41,7 @@ func resourceArmIotCentralApplication() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.IotCentralAppName, + ValidateFunc: validate.ApplicationName, }, "location": azure.SchemaLocation(), @@ -51,14 +51,14 @@ func resourceArmIotCentralApplication() *schema.Resource { "sub_domain": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.IotCentralAppSubdomain, + ValidateFunc: validate.ApplicationSubdomain, }, "display_name": { Type: schema.TypeString, Optional: true, Computed: true, - ValidateFunc: validate.IotCentralAppDisplayName, + ValidateFunc: validate.ApplicationDisplayName, }, "sku": { @@ -76,7 +76,7 @@ func resourceArmIotCentralApplication() *schema.Resource { Type: schema.TypeString, Optional: true, Computed: true, - ValidateFunc: validate.IotCentralAppTemplateName, + ValidateFunc: validate.ApplicationTemplateName, }, "tags": tags.Schema(), @@ -84,7 +84,7 @@ func resourceArmIotCentralApplication() *schema.Resource { } } -func resourceArmIotCentralAppCreate(d *schema.ResourceData, meta interface{}) error { +func resourceIotCentralAppCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTCentral.AppsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -153,22 +153,22 @@ func resourceArmIotCentralAppCreate(d *schema.ResourceData, meta interface{}) er } d.SetId(*response.ID) - return resourceArmIotCentralAppRead(d, meta) + return resourceIotCentralAppRead(d, meta) } -func resourceArmIotCentralAppUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotCentralAppUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTCentral.AppsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.IoTCentralApplicationID(d.Id()) + id, err := parse.ApplicationID(d.Id()) if err != nil { return err } displayName := d.Get("display_name").(string) if displayName == "" { - displayName = id.Name + displayName = id.IoTAppName } subdomain := d.Get("sub_domain").(string) @@ -181,45 +181,45 @@ func resourceArmIotCentralAppUpdate(d *schema.ResourceData, meta interface{}) er Template: &template, }, } - future, err := client.Update(ctx, id.ResourceGroup, id.Name, appPatch) + future, err := client.Update(ctx, id.ResourceGroup, id.IoTAppName, appPatch) if err != nil { - return fmt.Errorf("Error update Iot Central Application %q (Resource Group %q). %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error update Iot Central Application %q (Resource Group %q). %+v", id.IoTAppName, id.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the completion of update Iot Central Application %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for the completion of update Iot Central Application %q (Resource Group %q): %+v", id.IoTAppName, id.ResourceGroup, err) } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.IoTAppName) if err != nil { - return fmt.Errorf("Error retrieving IoT Central Application %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving IoT Central Application %q (Resource Group %q): %+v", id.IoTAppName, id.ResourceGroup, err) } if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("Cannot read IoT Central Application %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Cannot read IoT Central Application %q (Resource Group %q): %+v", id.IoTAppName, id.ResourceGroup, err) } d.SetId(*resp.ID) - return resourceArmIotCentralAppRead(d, meta) + return resourceIotCentralAppRead(d, meta) } -func resourceArmIotCentralAppRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotCentralAppRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTCentral.AppsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.IoTCentralApplicationID(d.Id()) + id, err := parse.ApplicationID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.IoTAppName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving IoT Central Application %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving IoT Central Application %q (Resource Group %q): %+v", id.IoTAppName, id.ResourceGroup, err) } d.Set("name", resp.Name) @@ -241,24 +241,24 @@ func resourceArmIotCentralAppRead(d *schema.ResourceData, meta interface{}) erro return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmIotCentralAppDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotCentralAppDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTCentral.AppsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.IoTCentralApplicationID(d.Id()) + id, err := parse.ApplicationID(d.Id()) if err != nil { return err } - resp, err := client.Delete(ctx, id.ResourceGroup, id.Name) + resp, err := client.Delete(ctx, id.ResourceGroup, id.IoTAppName) if err != nil { if !response.WasNotFound(resp.Response()) { - return fmt.Errorf("Error delete Iot Central Application %q (Resource Group %q). %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error delete Iot Central Application %q (Resource Group %q). %+v", id.IoTAppName, id.ResourceGroup, err) } } if err := resp.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error delete Iot Central Application %q Resource Group %q). %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error delete Iot Central Application %q Resource Group %q). %+v", id.IoTAppName, id.ResourceGroup, err) } return nil } diff --git a/azurerm/internal/services/iotcentral/iotcentral_application_resource_test.go b/azurerm/internal/services/iotcentral/iotcentral_application_resource_test.go new file mode 100644 index 000000000000..c8f2e364de2a --- /dev/null +++ b/azurerm/internal/services/iotcentral/iotcentral_application_resource_test.go @@ -0,0 +1,193 @@ +package iotcentral_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iotcentral/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IoTCentralApplicationResource struct { +} + +func TestAccIoTCentralApplication_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") + r := IoTCentralApplicationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("ST1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTCentralApplication_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") + r := IoTCentralApplicationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("template").HasValue("iotc-default@1.0.0"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTCentralApplication_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") + r := IoTCentralApplicationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku").HasValue("ST1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTCentralApplication_requiresImportErrorStep(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") + r := IoTCentralApplicationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iotcentral_application"), + }, + }) +} + +func (IoTCentralApplicationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApplicationID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.IoTCentral.AppsClient.Get(ctx, id.ResourceGroup, id.IoTAppName) + if err != nil { + return nil, fmt.Errorf("retrieving Analysis Services Server %q (resource group: %q): %+v", id.IoTAppName, id.ResourceGroup, err) + } + + return utils.Bool(resp.AppProperties != nil), nil +} + +func (IoTCentralApplicationResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_iotcentral_application" "test" { + name = "acctest-iotcentralapp-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sub_domain = "acctest-iotcentralapp-%[1]d" + sku = "ST1" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (IoTCentralApplicationResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[2]d" + location = "%[1]s" +} + +resource "azurerm_iotcentral_application" "test" { + name = "acctest-iotcentralapp-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sub_domain = "acctest-iotcentralapp-%[2]d" + display_name = "acctest-iotcentralapp-%[2]d" + sku = "ST1" + template = "iotc-default@1.0.0" + tags = { + ENV = "Test" + } +} +`, data.Locations.Primary, data.RandomInteger) +} + +func (IoTCentralApplicationResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[2]d" + location = "%[1]s" +} + +resource "azurerm_iotcentral_application" "test" { + name = "acctest-iotcentralapp-%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sub_domain = "acctest-iotcentralapp-%[2]d" + display_name = "acctest-iotcentralapp-%[2]d" + sku = "ST1" + tags = { + ENV = "Test" + } +} +`, data.Locations.Primary, data.RandomInteger) +} + +func (IoTCentralApplicationResource) requiresImport(data acceptance.TestData) string { + template := IoTCentralApplicationResource{}.basic(data) + return fmt.Sprintf(` +%s +resource "azurerm_iotcentral_application" "import" { + name = azurerm_iotcentral_application.test.name + resource_group_name = azurerm_iotcentral_application.test.resource_group_name + location = azurerm_iotcentral_application.test.location + sub_domain = azurerm_iotcentral_application.test.sub_domain + display_name = azurerm_iotcentral_application.test.display_name + sku = "ST1" +} +`, template) +} diff --git a/azurerm/internal/services/iotcentral/parse/application.go b/azurerm/internal/services/iotcentral/parse/application.go new file mode 100644 index 000000000000..50a0dd621ad7 --- /dev/null +++ b/azurerm/internal/services/iotcentral/parse/application.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ApplicationId struct { + SubscriptionId string + ResourceGroup string + IoTAppName string +} + +func NewApplicationID(subscriptionId, resourceGroup, ioTAppName string) ApplicationId { + return ApplicationId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + IoTAppName: ioTAppName, + } +} + +func (id ApplicationId) String() string { + segments := []string{ + fmt.Sprintf("Io T App Name %q", id.IoTAppName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Application", segmentsStr) +} + +func (id ApplicationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.IoTCentral/IoTApps/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.IoTAppName) +} + +// ApplicationID parses a Application ID into an ApplicationId struct +func ApplicationID(input string) (*ApplicationId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ApplicationId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.IoTAppName, err = id.PopSegment("IoTApps"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/iotcentral/parse/application_test.go b/azurerm/internal/services/iotcentral/parse/application_test.go new file mode 100644 index 000000000000..5a55e0505e6c --- /dev/null +++ b/azurerm/internal/services/iotcentral/parse/application_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ApplicationId{} + +func TestApplicationIDFormatter(t *testing.T) { + actual := NewApplicationID("12345678-1234-9876-4563-123456789012", "resGroup1", "app1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/app1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestApplicationID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ApplicationId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing IoTAppName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/", + Error: true, + }, + + { + // missing value for IoTAppName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/app1", + Expected: &ApplicationId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + IoTAppName: "app1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.IOTCENTRAL/IOTAPPS/APP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ApplicationID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.IoTAppName != v.Expected.IoTAppName { + t.Fatalf("Expected %q but got %q for IoTAppName", v.Expected.IoTAppName, actual.IoTAppName) + } + } +} diff --git a/azurerm/internal/services/iotcentral/parse/iotcentral_application.go b/azurerm/internal/services/iotcentral/parse/iotcentral_application.go deleted file mode 100644 index 5cce5e047314..000000000000 --- a/azurerm/internal/services/iotcentral/parse/iotcentral_application.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type IoTCentralApplicationId struct { - ResourceGroup string - Name string -} - -func IoTCentralApplicationID(input string) (*IoTCentralApplicationId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse IoT Central Application ID %q: %+v", input, err) - } - - server := IoTCentralApplicationId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("IoTApps"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/iotcentral/parse/iotcentral_application_test.go b/azurerm/internal/services/iotcentral/parse/iotcentral_application_test.go deleted file mode 100644 index eb7e27b0dc60..000000000000 --- a/azurerm/internal/services/iotcentral/parse/iotcentral_application_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestIotCentralApplicationID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *IoTCentralApplicationId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing IoT Central Application Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/", - Expected: nil, - }, - { - Name: "IoT Central Application ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/App1", - Expected: &IoTCentralApplicationId{ - Name: "App1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IotApps/App1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := IoTCentralApplicationID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/iotcentral/registration.go b/azurerm/internal/services/iotcentral/registration.go index bfa321dcd032..03677b0f4c59 100644 --- a/azurerm/internal/services/iotcentral/registration.go +++ b/azurerm/internal/services/iotcentral/registration.go @@ -19,7 +19,7 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_iotcentral_application": resourceArmIotCentralApplication(), + "azurerm_iotcentral_application": resourceIotCentralApplication(), } } diff --git a/azurerm/internal/services/iotcentral/resourceids.go b/azurerm/internal/services/iotcentral/resourceids.go new file mode 100644 index 000000000000..622665c6f3a4 --- /dev/null +++ b/azurerm/internal/services/iotcentral/resourceids.go @@ -0,0 +1,3 @@ +package iotcentral + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Application -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/app1 diff --git a/azurerm/internal/services/iotcentral/tests/iotcentral_application_resource_test.go b/azurerm/internal/services/iotcentral/tests/iotcentral_application_resource_test.go deleted file mode 100644 index 45ba95f74c0b..000000000000 --- a/azurerm/internal/services/iotcentral/tests/iotcentral_application_resource_test.go +++ /dev/null @@ -1,238 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMIoTCentralApplication_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotCentralApplicationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotCentralApplication_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotCentralApplicationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "ST1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTCentralApplication_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotCentralApplicationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotCentralApplication_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotCentralApplicationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "template", "iotc-default@1.0.0"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "Test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTCentralApplication_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotCentralApplicationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotCentralApplication_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotCentralApplicationExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotCentralApplication_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotCentralApplicationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "ST1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "Test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTCentralApplication_requiresImportErrorStep(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iotcentral_application", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotCentralApplicationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotCentralApplication_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotCentralApplicationExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotCentralApplication_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iotcentral_application"), - }, - }, - }) -} - -func testCheckAzureRMIotCentralApplicationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - appName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for IoT Central Application: %s", appName) - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTCentral.AppsClient - resp, err := client.Get(ctx, resourceGroup, appName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: IoT Central Application %q (Resource Group %q) does not exist", appName, resourceGroup) - } - - return fmt.Errorf("Bad: Get IoT Central Application: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMIotCentralApplicationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTCentral.AppsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iotcentral_application" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Bad: Get IoT Central Application: %+v", err) - } - } - } - return nil -} - -func testAccAzureRMIotCentralApplication_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_iotcentral_application" "test" { - name = "acctest-iotcentralapp-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sub_domain = "acctest-iotcentralapp-%[1]d" - sku = "ST1" -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMIotCentralApplication_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[2]d" - location = "%[1]s" -} - -resource "azurerm_iotcentral_application" "test" { - name = "acctest-iotcentralapp-%[2]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sub_domain = "acctest-iotcentralapp-%[2]d" - display_name = "acctest-iotcentralapp-%[2]d" - sku = "ST1" - template = "iotc-default@1.0.0" - tags = { - ENV = "Test" - } -} -`, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotCentralApplication_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[2]d" - location = "%[1]s" -} - -resource "azurerm_iotcentral_application" "test" { - name = "acctest-iotcentralapp-%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sub_domain = "acctest-iotcentralapp-%[2]d" - display_name = "acctest-iotcentralapp-%[2]d" - sku = "ST1" - tags = { - ENV = "Test" - } -} -`, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotCentralApplication_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotCentralApplication_basic(data) - return fmt.Sprintf(` -%s -resource "azurerm_iotcentral_application" "import" { - name = azurerm_iotcentral_application.test.name - resource_group_name = azurerm_iotcentral_application.test.resource_group_name - location = azurerm_iotcentral_application.test.location - sub_domain = azurerm_iotcentral_application.test.sub_domain - display_name = azurerm_iotcentral_application.test.display_name - sku = "ST1" -} -`, template) -} diff --git a/azurerm/internal/services/iotcentral/validate/application_display_name.go b/azurerm/internal/services/iotcentral/validate/application_display_name.go new file mode 100644 index 000000000000..9fea240ce77f --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_display_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ApplicationDisplayName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if matched := regexp.MustCompile(`^.{1,200}$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("test: %s, %q length should between 1~200", k, v)) + } + return warnings, errors +} diff --git a/azurerm/internal/services/iotcentral/validate/application_display_name_test.go b/azurerm/internal/services/iotcentral/validate/application_display_name_test.go new file mode 100644 index 000000000000..8ade21391a8f --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_display_name_test.go @@ -0,0 +1,112 @@ +package validate + +import "testing" + +func TestApplicationDisplayName(t *testing.T) { + testData := []struct { + Value string + Error bool + }{ + { + Value: "a", + Error: false, + }, + { + Value: "A", + Error: false, + }, + { + Value: "1", + Error: false, + }, + { + Value: "1-", + Error: false, + }, + { + Value: "a-", + Error: false, + }, + { + Value: "A-", + Error: false, + }, + { + Value: "a1-", + Error: false, + }, + { + Value: "1a-", + Error: false, + }, + { + Value: "aA-", + Error: false, + }, + { + Value: "Aa-", + Error: false, + }, + { + Value: "-", + Error: false, + }, + { + Value: "-1", + Error: false, + }, + { + Value: "_-a", + Error: false, + }, + { + Value: "#$%$#!", + Error: false, + }, + { + Value: "AA", + Error: false, + }, + { + Value: "AA-1", + Error: false, + }, + { + Value: "AA-a", + Error: false, + }, + { + Value: "A1-", + Error: false, + }, + { + Value: "AA-A", + Error: false, + }, + { + Value: "AA-aA", + Error: false, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde1234", + Error: false, + }, + + { + Value: "", + Error: true, + }, + { + Value: "adcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdssdavcadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdssdavcc", + Error: true, + }, + } + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Value) + + _, err := ApplicationDisplayName(v.Value, "unit test") + if err != nil && !v.Error { + t.Fatalf("Expected pass but got an error: %s", err) + } + } +} diff --git a/azurerm/internal/services/iotcentral/validate/application_id.go b/azurerm/internal/services/iotcentral/validate/application_id.go new file mode 100644 index 000000000000..4afa2f71f402 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iotcentral/parse" +) + +func ApplicationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApplicationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/iotcentral/validate/application_id_test.go b/azurerm/internal/services/iotcentral/validate/application_id_test.go new file mode 100644 index 000000000000..82485c638fb4 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApplicationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing IoTAppName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/", + Valid: false, + }, + + { + // missing value for IoTAppName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.IoTCentral/IoTApps/app1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.IOTCENTRAL/IOTAPPS/APP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApplicationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/iotcentral/validate/application_name.go b/azurerm/internal/services/iotcentral/validate/application_name.go new file mode 100644 index 000000000000..16fb655aff94 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ApplicationName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + // Portal: The value must contain only alphanumeric characters or the following: - + if matched := regexp.MustCompile(`^[a-z\d][a-z\d-]{0,61}[a-z\d]$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("test: %s, %q may only contain alphanumeric characters and dashes, length between 2-63", k, v)) + } + return warnings, errors +} diff --git a/azurerm/internal/services/iotcentral/validate/application_name_test.go b/azurerm/internal/services/iotcentral/validate/application_name_test.go new file mode 100644 index 000000000000..9da763154a06 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_name_test.go @@ -0,0 +1,140 @@ +package validate + +import "testing" + +func TestApplicationName(t *testing.T) { + testData := []struct { + Value string + Error bool + }{ + { + Value: "a1", + Error: false, + }, + { + Value: "11", + Error: false, + }, + { + Value: "1a", + Error: false, + }, + { + Value: "aa", + Error: false, + }, + { + Value: "1-1", + Error: false, + }, + { + Value: "aaa-aa", + Error: false, + }, + { + Value: "a--a-aa", + Error: false, + }, + { + Value: "a1-1", + Error: false, + }, + { + Value: "a1-a", + Error: false, + }, + { + Value: "1a-1", + Error: false, + }, + { + Value: "1a-a-1-2", + Error: false, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde", + Error: false, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde123", + Error: false, + }, + { + Value: "a", + Error: true, + }, + { + Value: "1", + Error: true, + }, + { + Value: "1-", + Error: true, + }, + { + Value: "a-", + Error: true, + }, + { + Value: "a1-", + Error: true, + }, + { + Value: "1a--1-1-a-", + Error: true, + }, + { + Value: "aa-", + Error: true, + }, + { + Value: "a1-", + Error: true, + }, + { + Value: "1a--a1-", + Error: true, + }, + { + Value: "aa-", + Error: true, + }, + { + Value: "-", + Error: true, + }, + { + Value: "-1", + Error: true, + }, + { + Value: "-a", + Error: true, + }, + { + Value: "AA", + Error: true, + }, + { + Value: "AA-1", + Error: true, + }, + { + Value: "AA-a", + Error: true, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde1234", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Value) + + _, err := ApplicationName(v.Value, "unit test") + if err != nil && !v.Error { + t.Fatalf("Expected pass but got an error: %s", err) + } + } +} diff --git a/azurerm/internal/services/iotcentral/validate/application_subdomain.go b/azurerm/internal/services/iotcentral/validate/application_subdomain.go new file mode 100644 index 000000000000..8533e38ea05c --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_subdomain.go @@ -0,0 +1,16 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ApplicationSubdomain(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + // Portal: The value must contain only alphanumeric characters or the following: - + if matched := regexp.MustCompile(`^[a-z\d][a-z\d-]{0,61}[a-z\d]$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("test: %s, %q may only contain alphanumeric characters and dashes, length between 2-63", k, v)) + } + return warnings, errors +} diff --git a/azurerm/internal/services/iotcentral/validate/application_subdomain_test.go b/azurerm/internal/services/iotcentral/validate/application_subdomain_test.go new file mode 100644 index 000000000000..c22d59f37e85 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_subdomain_test.go @@ -0,0 +1,152 @@ +package validate + +import "testing" + +func TestApplicationSubdomain(t *testing.T) { + testData := []struct { + Value string + Error bool + }{ + { + Value: "a1", + Error: false, + }, + { + Value: "11", + Error: false, + }, + { + Value: "1a", + Error: false, + }, + { + Value: "aa", + Error: false, + }, + { + Value: "1-1", + Error: false, + }, + { + Value: "a-a", + Error: false, + }, + { + Value: "a1-1", + Error: false, + }, + { + Value: "a1-a", + Error: false, + }, + { + Value: "1a-1", + Error: false, + }, + { + Value: "1a-a", + Error: false, + }, + { + Value: "a1-11", + Error: false, + }, + { + Value: "aa-11", + Error: false, + }, + { + Value: "11-1a", + Error: false, + }, + { + Value: "11-a1", + Error: false, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde", + Error: false, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde123", + Error: false, + }, + { + Value: "a", + Error: true, + }, + { + Value: "1", + Error: true, + }, + { + Value: "1-", + Error: true, + }, + { + Value: "a-", + Error: true, + }, + { + Value: "a1-", + Error: true, + }, + { + Value: "1a-", + Error: true, + }, + { + Value: "aa-", + Error: true, + }, + { + Value: "-", + Error: true, + }, + { + Value: "-1", + Error: true, + }, + { + Value: "-a", + Error: true, + }, + { + Value: "AA", + Error: true, + }, + { + Value: "AA-1", + Error: true, + }, + { + Value: "AA-a", + Error: true, + }, + { + Value: "A1-", + Error: true, + }, + { + Value: "AA-A", + Error: true, + }, + { + Value: "AA-aA", + Error: true, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde1234", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Value) + + _, err := ApplicationSubdomain(v.Value, "unit test") + if err != nil && !v.Error { + t.Fatalf("Expected pass but got an error: %s", err) + } + } +} diff --git a/azurerm/internal/services/iotcentral/validate/application_template_name.go b/azurerm/internal/services/iotcentral/validate/application_template_name.go new file mode 100644 index 000000000000..9978eff23b43 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_template_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ApplicationTemplateName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if matched := regexp.MustCompile(`^.{1,50}$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("test: %s, %q length should between 1~50", k, v)) + } + return warnings, errors +} diff --git a/azurerm/internal/services/iotcentral/validate/application_template_name_test.go b/azurerm/internal/services/iotcentral/validate/application_template_name_test.go new file mode 100644 index 000000000000..6917ee5f9f49 --- /dev/null +++ b/azurerm/internal/services/iotcentral/validate/application_template_name_test.go @@ -0,0 +1,74 @@ +package validate + +import ( + "testing" +) + +func TestApplicationTemplateName(t *testing.T) { + testData := []struct { + Value string + Error bool + }{ + { + Value: "a", + Error: false, + }, + { + Value: "A", + Error: false, + }, + { + Value: "1", + Error: false, + }, + { + Value: "1-", + Error: false, + }, + { + Value: "a-", + Error: false, + }, + { + Value: "A-", + Error: false, + }, + { + Value: "-", + Error: false, + }, + { + Value: "-1", + Error: false, + }, + { + Value: "_-a", + Error: false, + }, + { + Value: "#$%$#!", + Error: false, + }, + { + Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde", + Error: false, + }, + { + Value: "", + Error: true, + }, + { + Value: "abcdeabcdeabcdeabcde@$#%abcdeabcdeadeabcdeabcdeabcdeabcde-1a", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Value) + + _, err := ApplicationTemplateName(v.Value, "unit test") + if err != nil && !v.Error { + t.Fatalf("Expected pass but got an error: %s", err) + } + } +} diff --git a/azurerm/internal/services/iotcentral/validate/iotcentral_app.go b/azurerm/internal/services/iotcentral/validate/iotcentral_app.go deleted file mode 100644 index bfda4ecb46bf..000000000000 --- a/azurerm/internal/services/iotcentral/validate/iotcentral_app.go +++ /dev/null @@ -1,44 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" -) - -func IotCentralAppName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - // Portal: The value must contain only alphanumeric characters or the following: - - if matched := regexp.MustCompile(`^[a-z\d][a-z\d-]{0,61}[a-z\d]$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("test: %s, %q may only contain alphanumeric characters and dashes, length between 2-63", k, v)) - } - return warnings, errors -} - -func IotCentralAppSubdomain(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - // Portal: The value must contain only alphanumeric characters or the following: - - if matched := regexp.MustCompile(`^[a-z\d][a-z\d-]{0,61}[a-z\d]$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("test: %s, %q may only contain alphanumeric characters and dashes, length between 2-63", k, v)) - } - return warnings, errors -} - -func IotCentralAppDisplayName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if matched := regexp.MustCompile(`^.{1,200}$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("test: %s, %q length should between 1~200", k, v)) - } - return warnings, errors -} - -func IotCentralAppTemplateName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if matched := regexp.MustCompile(`^.{1,50}$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("test: %s, %q length should between 1~50", k, v)) - } - return warnings, errors -} diff --git a/azurerm/internal/services/iotcentral/validate/iotcentral_app_test.go b/azurerm/internal/services/iotcentral/validate/iotcentral_app_test.go deleted file mode 100644 index f9ce1cc36bdb..000000000000 --- a/azurerm/internal/services/iotcentral/validate/iotcentral_app_test.go +++ /dev/null @@ -1,468 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestIotCentralAppName(t *testing.T) { - testData := []struct { - Value string - Error bool - }{ - { - Value: "a1", - Error: false, - }, - { - Value: "11", - Error: false, - }, - { - Value: "1a", - Error: false, - }, - { - Value: "aa", - Error: false, - }, - { - Value: "1-1", - Error: false, - }, - { - Value: "aaa-aa", - Error: false, - }, - { - Value: "a--a-aa", - Error: false, - }, - { - Value: "a1-1", - Error: false, - }, - { - Value: "a1-a", - Error: false, - }, - { - Value: "1a-1", - Error: false, - }, - { - Value: "1a-a-1-2", - Error: false, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde", - Error: false, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde123", - Error: false, - }, { - Value: "a", - Error: true, - }, - { - Value: "1", - Error: true, - }, - { - Value: "1-", - Error: true, - }, - { - Value: "a-", - Error: true, - }, - { - Value: "a1-", - Error: true, - }, - { - Value: "1a--1-1-a-", - Error: true, - }, - { - Value: "aa-", - Error: true, - }, - { - Value: "a1-", - Error: true, - }, - { - Value: "1a--a1-", - Error: true, - }, - { - Value: "aa-", - Error: true, - }, - { - Value: "-", - Error: true, - }, - { - Value: "-1", - Error: true, - }, - { - Value: "-a", - Error: true, - }, - { - Value: "AA", - Error: true, - }, - { - Value: "AA-1", - Error: true, - }, - { - Value: "AA-a", - Error: true, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde1234", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Value) - - _, err := IotCentralAppName(v.Value, "unit test") - if err != nil && !v.Error { - t.Fatalf("Expected pass but got an error: %s", err) - } - } -} - -func TestIotCentralAppSubdomain(t *testing.T) { - testData := []struct { - Value string - Error bool - }{ - { - Value: "a1", - Error: false, - }, - { - Value: "11", - Error: false, - }, - { - Value: "1a", - Error: false, - }, - { - Value: "aa", - Error: false, - }, - { - Value: "1-1", - Error: false, - }, - { - Value: "a-a", - Error: false, - }, - { - Value: "a1-1", - Error: false, - }, - { - Value: "a1-a", - Error: false, - }, - { - Value: "1a-1", - Error: false, - }, - { - Value: "1a-a", - Error: false, - }, - { - Value: "a1-11", - Error: false, - }, - { - Value: "aa-11", - Error: false, - }, - { - Value: "11-1a", - Error: false, - }, - { - Value: "11-a1", - Error: false, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde", - Error: false, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde123", - Error: false, - }, - { - Value: "a", - Error: true, - }, - { - Value: "1", - Error: true, - }, - { - Value: "1-", - Error: true, - }, - { - Value: "a-", - Error: true, - }, - { - Value: "a1-", - Error: true, - }, - { - Value: "1a-", - Error: true, - }, - { - Value: "aa-", - Error: true, - }, - { - Value: "-", - Error: true, - }, - { - Value: "-1", - Error: true, - }, - { - Value: "-a", - Error: true, - }, - { - Value: "AA", - Error: true, - }, - { - Value: "AA-1", - Error: true, - }, - { - Value: "AA-a", - Error: true, - }, - { - Value: "A1-", - Error: true, - }, - { - Value: "AA-A", - Error: true, - }, - { - Value: "AA-aA", - Error: true, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde1234", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Value) - - _, err := IotCentralAppSubdomain(v.Value, "unit test") - if err != nil && !v.Error { - t.Fatalf("Expected pass but got an error: %s", err) - } - } -} - -func TestIotCentralAppDisplayName(t *testing.T) { - testData := []struct { - Value string - Error bool - }{ - { - Value: "a", - Error: false, - }, - { - Value: "A", - Error: false, - }, - { - Value: "1", - Error: false, - }, - { - Value: "1-", - Error: false, - }, - { - Value: "a-", - Error: false, - }, - { - Value: "A-", - Error: false, - }, - { - Value: "a1-", - Error: false, - }, - { - Value: "1a-", - Error: false, - }, - { - Value: "aA-", - Error: false, - }, - { - Value: "Aa-", - Error: false, - }, - { - Value: "-", - Error: false, - }, - { - Value: "-1", - Error: false, - }, - { - Value: "_-a", - Error: false, - }, - { - Value: "#$%$#!", - Error: false, - }, - { - Value: "AA", - Error: false, - }, - { - Value: "AA-1", - Error: false, - }, - { - Value: "AA-a", - Error: false, - }, - { - Value: "A1-", - Error: false, - }, - { - Value: "AA-A", - Error: false, - }, - { - Value: "AA-aA", - Error: false, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde1234", - Error: false, - }, - - { - Value: "", - Error: true, - }, - { - Value: "adcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdssdavcadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdsadcdssdavcc", - Error: true, - }, - } - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Value) - - _, err := IotCentralAppDisplayName(v.Value, "unit test") - if err != nil && !v.Error { - t.Fatalf("Expected pass but got an error: %s", err) - } - } -} - -func TestIotCentralAppTemplateName(t *testing.T) { - testData := []struct { - Value string - Error bool - }{ - { - Value: "a", - Error: false, - }, - { - Value: "A", - Error: false, - }, - { - Value: "1", - Error: false, - }, - { - Value: "1-", - Error: false, - }, - { - Value: "a-", - Error: false, - }, - { - Value: "A-", - Error: false, - }, - { - Value: "-", - Error: false, - }, - { - Value: "-1", - Error: false, - }, - { - Value: "_-a", - Error: false, - }, - { - Value: "#$%$#!", - Error: false, - }, - { - Value: "abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde", - Error: false, - }, - { - Value: "", - Error: true, - }, - { - Value: "abcdeabcdeabcdeabcde@$#%abcdeabcdeadeabcdeabcdeabcdeabcde-1a", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Value) - - _, err := IotCentralAppTemplateName(v.Value, "unit test") - if err != nil && !v.Error { - t.Fatalf("Expected pass but got an error: %s", err) - } - } -} diff --git a/azurerm/internal/services/iothub/iothub_consumer_group_resource.go b/azurerm/internal/services/iothub/iothub_consumer_group_resource.go index bc8d9390c0e7..0b9820c8aa1e 100644 --- a/azurerm/internal/services/iothub/iothub_consumer_group_resource.go +++ b/azurerm/internal/services/iothub/iothub_consumer_group_resource.go @@ -15,11 +15,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubConsumerGroup() *schema.Resource { +func resourceIotHubConsumerGroup() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubConsumerGroupCreate, - Read: resourceArmIotHubConsumerGroupRead, - Delete: resourceArmIotHubConsumerGroupDelete, + Create: resourceIotHubConsumerGroupCreate, + Read: resourceIotHubConsumerGroupRead, + Delete: resourceIotHubConsumerGroupDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -57,7 +57,7 @@ func resourceArmIotHubConsumerGroup() *schema.Resource { } } -func resourceArmIotHubConsumerGroupCreate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubConsumerGroupCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -99,10 +99,10 @@ func resourceArmIotHubConsumerGroupCreate(d *schema.ResourceData, meta interface d.SetId(*read.ID) - return resourceArmIotHubConsumerGroupRead(d, meta) + return resourceIotHubConsumerGroupRead(d, meta) } -func resourceArmIotHubConsumerGroupRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubConsumerGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -134,7 +134,7 @@ func resourceArmIotHubConsumerGroupRead(d *schema.ResourceData, meta interface{} return nil } -func resourceArmIotHubConsumerGroupDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubConsumerGroupDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -152,7 +152,6 @@ func resourceArmIotHubConsumerGroupDelete(d *schema.ResourceData, meta interface defer locks.UnlockByName(iotHubName, IothubResourceName) resp, err := client.DeleteEventHubConsumerGroup(ctx, resourceGroup, iotHubName, endpointName, name) - if err != nil { if !utils.ResponseWasNotFound(resp) { return fmt.Errorf("Error deleting Consumer Group %q (Endpoint %q / IoTHub %q / Resource Group %q): %+v", name, endpointName, iotHubName, resourceGroup, err) diff --git a/azurerm/internal/services/iothub/iothub_consumer_group_resource_test.go b/azurerm/internal/services/iothub/iothub_consumer_group_resource_test.go new file mode 100644 index 000000000000..ffdf37cb8121 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_consumer_group_resource_test.go @@ -0,0 +1,162 @@ +package iothub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubConsumerGroupResource struct { +} + +func TestAccIotHubConsumerGroup_events(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") + r := IotHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "events"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("eventhub_endpoint_name").HasValue("events"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubConsumerGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") + r := IotHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "events"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("eventhub_endpoint_name").HasValue("events"), + ), + }, + { + Config: r.requiresImport(data, "events"), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_consumer_group"), + }, + }) +} + +func TestAccIotHubConsumerGroup_operationsMonitoringEvents(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") + r := IotHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "operationsMonitoringEvents"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("eventhub_endpoint_name").HasValue("operationsMonitoringEvents"), + ), + }, data.ImportStep(), + }) +} + +func TestAccIotHubConsumerGroup_withSharedAccessPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") + r := IotHubConsumerGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withSharedAccessPolicy(data, "events"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t IotHubConsumerGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iotHubName := id.Path["IotHubs"] + endpointName := id.Path["eventHubEndpoints"] + name := id.Path["ConsumerGroups"] + + resp, err := clients.IoTHub.ResourceClient.GetEventHubConsumerGroup(ctx, resourceGroup, iotHubName, endpointName, name) + if err != nil { + return nil, fmt.Errorf("reading IotHuB Consumer Group (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (IotHubConsumerGroupResource) basic(data acceptance.TestData, eventName string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_consumer_group" "test" { + name = "test" + iothub_name = azurerm_iothub.test.name + eventhub_endpoint_name = "%s" + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, eventName) +} + +func (r IotHubConsumerGroupResource) requiresImport(data acceptance.TestData, eventName string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_consumer_group" "import" { + name = azurerm_iothub_consumer_group.test.name + iothub_name = azurerm_iothub_consumer_group.test.iothub_name + eventhub_endpoint_name = azurerm_iothub_consumer_group.test.eventhub_endpoint_name + resource_group_name = azurerm_iothub_consumer_group.test.resource_group_name +} +`, r.basic(data, eventName)) +} + +func (r IotHubConsumerGroupResource) withSharedAccessPolicy(data acceptance.TestData, eventName string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_shared_access_policy" "test" { + name = "acctestSharedAccessPolicy" + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + service_connect = true +} +`, r.basic(data, eventName)) +} diff --git a/azurerm/internal/services/iothub/iothub_dps_certificate_resource.go b/azurerm/internal/services/iothub/iothub_dps_certificate_resource.go index 6d3864f806a1..517fe71f4c54 100644 --- a/azurerm/internal/services/iothub/iothub_dps_certificate_resource.go +++ b/azurerm/internal/services/iothub/iothub_dps_certificate_resource.go @@ -15,12 +15,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubDPSCertificate() *schema.Resource { +func resourceIotHubDPSCertificate() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubDPSCertificateCreateUpdate, - Read: resourceArmIotHubDPSCertificateRead, - Update: resourceArmIotHubDPSCertificateCreateUpdate, - Delete: resourceArmIotHubDPSCertificateDelete, + Create: resourceIotHubDPSCertificateCreateUpdate, + Read: resourceIotHubDPSCertificateRead, + Update: resourceIotHubDPSCertificateCreateUpdate, + Delete: resourceIotHubDPSCertificateDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -60,7 +60,7 @@ func resourceArmIotHubDPSCertificate() *schema.Resource { } } -func resourceArmIotHubDPSCertificateCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSCertificateCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSCertificateClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -101,10 +101,10 @@ func resourceArmIotHubDPSCertificateCreateUpdate(d *schema.ResourceData, meta in d.SetId(*resp.ID) - return resourceArmIotHubDPSCertificateRead(d, meta) + return resourceIotHubDPSCertificateRead(d, meta) } -func resourceArmIotHubDPSCertificateRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSCertificateRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSCertificateClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -134,7 +134,7 @@ func resourceArmIotHubDPSCertificateRead(d *schema.ResourceData, meta interface{ return nil } -func resourceArmIotHubDPSCertificateDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSCertificateDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSCertificateClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_dps_certificate_resource_test.go b/azurerm/internal/services/iothub/iothub_dps_certificate_resource_test.go new file mode 100644 index 000000000000..806fd9f51532 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_dps_certificate_resource_test.go @@ -0,0 +1,172 @@ +package iothub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubDPSCertificateResource struct { +} + +func TestAccIotHubDPSCertificate_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_certificate", "test") + r := IotHubDPSCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("certificate_content"), + }) +} + +func TestAccIotHubDPSCertificate_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_certificate", "test") + r := IotHubDPSCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_dps_certificate"), + }, + }) +} + +func TestAccIotHubDPSCertificate_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_certificate", "test") + r := IotHubDPSCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("certificate_content"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("certificate_content"), + }) +} + +func (t IotHubDPSCertificateResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iotDPSName := id.Path["provisioningServices"] + name := id.Path["certificates"] + + resp, err := clients.IoTHub.DPSCertificateClient.Get(ctx, name, resourceGroup, iotDPSName, "") + if err != nil { + return nil, fmt.Errorf("reading IotHuB DPS Certificate (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (IotHubDPSCertificateResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} + +resource "azurerm_iothub_dps_certificate" "test" { + name = "acctestIoTDPSCertificate-%d" + resource_group_name = azurerm_resource_group.test.name + iot_dps_name = azurerm_iothub_dps.test.name + + certificate_content = filebase64("testdata/batch_certificate.cer") +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r IotHubDPSCertificateResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_dps_certificate" "import" { + name = azurerm_iothub_dps_certificate.test.name + resource_group_name = azurerm_resource_group.test.name + iot_dps_name = azurerm_iothub_dps.test.name + + certificate_content = filebase64("testdata/batch_certificate.cer") +} +`, r.basic(data)) +} + +func (IotHubDPSCertificateResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_dps_certificate" "test" { + name = "acctestIoTDPSCertificate-%d" + resource_group_name = azurerm_resource_group.test.name + iot_dps_name = azurerm_iothub_dps.test.name + + certificate_content = filebase64("testdata/application_gateway_test.cer") +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/iothub/iothub_dps_data_source.go b/azurerm/internal/services/iothub/iothub_dps_data_source.go index a46d08f9727c..b7d5dc41868a 100644 --- a/azurerm/internal/services/iothub/iothub_dps_data_source.go +++ b/azurerm/internal/services/iothub/iothub_dps_data_source.go @@ -13,7 +13,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmIotHubDPS() *schema.Resource { +func dataSourceIotHubDPS() *schema.Resource { return &schema.Resource{ Read: dataSourceIotHubDPSRead, diff --git a/azurerm/internal/services/iothub/iothub_dps_data_source_test.go b/azurerm/internal/services/iothub/iothub_dps_data_source_test.go new file mode 100644 index 000000000000..4c81f9df3020 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_dps_data_source_test.go @@ -0,0 +1,41 @@ +package iothub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type IotHubDPSDataSource struct { +} + +func TestAccDataSourceIotHubDPS_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_iothub_dps", "test") + r := IotHubDPSDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("allocation_policy").Exists(), + check.That(data.ResourceName).Key("device_provisioning_host_name").Exists(), + check.That(data.ResourceName).Key("id_scope").Exists(), + check.That(data.ResourceName).Key("service_operations_host_name").Exists(), + ), + }, + }) +} + +func (IotHubDPSDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_iothub_dps" "test" { + name = azurerm_iothub_dps.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, IotHubDPSResource{}.basic(data)) +} diff --git a/azurerm/internal/services/iothub/iothub_dps_resource.go b/azurerm/internal/services/iothub/iothub_dps_resource.go index 8e786c858911..a3c6e3d1f896 100644 --- a/azurerm/internal/services/iothub/iothub_dps_resource.go +++ b/azurerm/internal/services/iothub/iothub_dps_resource.go @@ -24,12 +24,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubDPS() *schema.Resource { +func resourceIotHubDPS() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubDPSCreateUpdate, - Read: resourceArmIotHubDPSRead, - Update: resourceArmIotHubDPSCreateUpdate, - Delete: resourceArmIotHubDPSDelete, + Create: resourceIotHubDPSCreateUpdate, + Read: resourceIotHubDPSRead, + Update: resourceIotHubDPSCreateUpdate, + Delete: resourceIotHubDPSDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -147,7 +147,7 @@ func resourceArmIotHubDPS() *schema.Resource { } } -func resourceArmIotHubDPSCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -198,10 +198,10 @@ func resourceArmIotHubDPSCreateUpdate(d *schema.ResourceData, meta interface{}) d.SetId(*resp.ID) - return resourceArmIotHubDPSRead(d, meta) + return resourceIotHubDPSRead(d, meta) } -func resourceArmIotHubDPSRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -251,7 +251,7 @@ func resourceArmIotHubDPSRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmIotHubDPSDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_dps_resource_test.go b/azurerm/internal/services/iothub/iothub_dps_resource_test.go new file mode 100644 index 000000000000..13b4618d2cc2 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_dps_resource_test.go @@ -0,0 +1,254 @@ +package iothub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubDPSResource struct { +} + +func TestAccIotHubDPS_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") + r := IotHubDPSResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("allocation_policy").Exists(), + check.That(data.ResourceName).Key("device_provisioning_host_name").Exists(), + check.That(data.ResourceName).Key("id_scope").Exists(), + check.That(data.ResourceName).Key("service_operations_host_name").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubDPS_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") + r := IotHubDPSResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_dps"), + }, + }) +} + +func TestAccIotHubDPS_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") + r := IotHubDPSResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubDPS_linkedHubs(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") + r := IotHubDPSResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.linkedHubs(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.linkedHubsUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t IotHubDPSResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + name := id.Path["provisioningServices"] + // the name path can use the ProvisioningServices in older iterations + if name == "" { + name = id.Path["ProvisioningServices"] + } + + resp, err := clients.IoTHub.DPSResourceClient.Get(ctx, name, resourceGroup) + if err != nil { + return nil, fmt.Errorf("reading IotHuB DPS (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (IotHubDPSResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r IotHubDPSResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_dps" "import" { + name = azurerm_iothub_dps.test.name + resource_group_name = azurerm_iothub_dps.test.resource_group_name + location = azurerm_iothub_dps.test.location + + sku { + name = "S1" + capacity = "1" + } +} +`, r.basic(data)) +} + +func (IotHubDPSResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubDPSResource) linkedHubs(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + linked_hub { + connection_string = "HostName=test.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=booo" + location = azurerm_resource_group.test.location + allocation_weight = 15 + apply_allocation_policy = true + } + + linked_hub { + connection_string = "HostName=test2.azure-devices.net;SharedAccessKeyName=iothubowner2;SharedAccessKey=key2" + location = azurerm_resource_group.test.location + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubDPSResource) linkedHubsUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + linked_hub { + connection_string = "HostName=test.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=booo" + location = azurerm_resource_group.test.location + allocation_weight = 150 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_data_source_test.go b/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_data_source_test.go new file mode 100644 index 000000000000..65f1cac8895d --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_data_source_test.go @@ -0,0 +1,67 @@ +package iothub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type IotHubDpsSharedAccessPolicyDataSource struct { +} + +func TestAccDataSourceIotHubDpsSharedAccessPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_iothub_dps_shared_access_policy", "test") + r := IotHubDpsSharedAccessPolicyDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + ), + }, + }) +} + +func (IotHubDpsSharedAccessPolicyDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} + +resource "azurerm_iothub_dps_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_dps_name = azurerm_iothub_dps.test.name + name = "acctest" + service_config = true +} + +data "azurerm_iothub_dps_shared_access_policy" "test" { + name = azurerm_iothub_dps_shared_access_policy.test.name + iothub_dps_name = azurerm_iothub_dps.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource.go b/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource.go index c7a677591c8d..e395afa5e1a4 100644 --- a/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource.go +++ b/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource.go @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubDPSSharedAccessPolicy() *schema.Resource { +func resourceIotHubDPSSharedAccessPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubDPSSharedAccessPolicyCreateUpdate, - Read: resourceArmIotHubDPSSharedAccessPolicyRead, - Update: resourceArmIotHubDPSSharedAccessPolicyCreateUpdate, - Delete: resourceArmIotHubDPSSharedAccessPolicyDelete, + Create: resourceIotHubDPSSharedAccessPolicyCreateUpdate, + Read: resourceIotHubDPSSharedAccessPolicyRead, + Update: resourceIotHubDPSSharedAccessPolicyCreateUpdate, + Delete: resourceIotHubDPSSharedAccessPolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -109,7 +109,7 @@ func resourceArmIotHubDPSSharedAccessPolicy() *schema.Resource { } } -func resourceArmIotHubDPSSharedAccessPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSSharedAccessPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -192,10 +192,10 @@ func resourceArmIotHubDPSSharedAccessPolicyCreateUpdate(d *schema.ResourceData, d.SetId(resourceID) - return resourceArmIotHubDPSSharedAccessPolicyRead(d, meta) + return resourceIotHubDPSSharedAccessPolicyRead(d, meta) } -func resourceArmIotHubDPSSharedAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSSharedAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -254,7 +254,7 @@ func resourceArmIotHubDPSSharedAccessPolicyRead(d *schema.ResourceData, meta int return nil } -func resourceArmIotHubDPSSharedAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDPSSharedAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.DPSResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource_test.go b/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource_test.go new file mode 100644 index 000000000000..55e49d33724e --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_dps_shared_access_policy_resource_test.go @@ -0,0 +1,220 @@ +package iothub_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubDpsSharedAccessPolicyResource struct { +} + +func TestAccIotHubDpsSharedAccessPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") + r := IotHubDpsSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue("acctest"), + check.That(data.ResourceName).Key("enrollment_read").HasValue("false"), + check.That(data.ResourceName).Key("enrollment_write").HasValue("false"), + check.That(data.ResourceName).Key("registration_read").HasValue("false"), + check.That(data.ResourceName).Key("registration_write").HasValue("false"), + check.That(data.ResourceName).Key("service_config").HasValue("true"), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + ), + }, + }) +} + +func TestAccIotHubDpsSharedAccessPolicy_writeWithoutRead(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") + r := IotHubDpsSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.writeWithoutRead(data), + ExpectError: regexp.MustCompile("If `registration_write` is set to true, `registration_read` must also be set to true"), + }, + }) +} + +func TestAccIotHubDpsSharedAccessPolicy_enrollmentReadWithoutRegistration(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") + r := IotHubDpsSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.enrollmentReadWithoutRegistration(data), + ExpectError: regexp.MustCompile("If `enrollment_read` is set to true, `registration_read` must also be set to true"), + }, + }) +} + +func TestAccIotHubDpsSharedAccessPolicy_enrollmentWriteWithoutOthers(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") + r := IotHubDpsSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.enrollmentWriteWithoutOthers(data), + ExpectError: regexp.MustCompile("If `enrollment_write` is set to true, `enrollment_read`, `registration_read`, and `registration_write` must also be set to true"), + }, + }) +} + +func (IotHubDpsSharedAccessPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} + +resource "azurerm_iothub_dps_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_dps_name = azurerm_iothub_dps.test.name + name = "acctest" + service_config = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubDpsSharedAccessPolicyResource) writeWithoutRead(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} + +resource "azurerm_iothub_dps_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_dps_name = azurerm_iothub_dps.test.name + name = "acctest" + registration_write = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubDpsSharedAccessPolicyResource) enrollmentReadWithoutRegistration(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} + +resource "azurerm_iothub_dps_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_dps_name = azurerm_iothub_dps.test.name + name = "acctest" + enrollment_read = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubDpsSharedAccessPolicyResource) enrollmentWriteWithoutOthers(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub_dps" "test" { + name = "acctestIoTDPS-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } +} + +resource "azurerm_iothub_dps_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_dps_name = azurerm_iothub_dps.test.name + name = "acctest" + enrollment_write = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (t IotHubDpsSharedAccessPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubDpsName := id.Path["provisioningServices"] + keyName := id.Path["keys"] + + accessPolicy, err := clients.IoTHub.DPSResourceClient.ListKeysForKeyName(ctx, iothubDpsName, keyName, resourceGroup) + if err != nil { + return nil, fmt.Errorf("loading Shared Access Policy (%s): %+v", id, err) + } + + return utils.Bool(accessPolicy.PrimaryKey != nil), nil +} diff --git a/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource.go b/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource.go index f3ec331050f3..3298d472fd9b 100644 --- a/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource.go +++ b/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubEndpointEventHub() *schema.Resource { +func resourceIotHubEndpointEventHub() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubEndpointEventHubCreateUpdate, - Read: resourceArmIotHubEndpointEventHubRead, - Update: resourceArmIotHubEndpointEventHubCreateUpdate, - Delete: resourceArmIotHubEndpointEventHubDelete, + Create: resourceIotHubEndpointEventHubCreateUpdate, + Read: resourceIotHubEndpointEventHubRead, + Update: resourceIotHubEndpointEventHubCreateUpdate, + Delete: resourceIotHubEndpointEventHubDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -68,7 +68,7 @@ func resourceArmIotHubEndpointEventHub() *schema.Resource { } } -func resourceArmIotHubEndpointEventHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointEventHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -146,10 +146,10 @@ func resourceArmIotHubEndpointEventHubCreateUpdate(d *schema.ResourceData, meta } d.SetId(resourceId) - return resourceArmIotHubEndpointEventHubRead(d, meta) + return resourceIotHubEndpointEventHubRead(d, meta) } -func resourceArmIotHubEndpointEventHubRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointEventHubRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -189,7 +189,7 @@ func resourceArmIotHubEndpointEventHubRead(d *schema.ResourceData, meta interfac return nil } -func resourceArmIotHubEndpointEventHubDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointEventHubDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource_test.go b/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource_test.go new file mode 100644 index 000000000000..f797ef46287d --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_endpoint_eventhub_resource_test.go @@ -0,0 +1,155 @@ +package iothub_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubEndpointEventHubResource struct { +} + +func TestAccIotHubEndpointEventHub_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_eventhub", "test") + r := IotHubEndpointEventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubEndpointEventHub_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_eventhub", "test") + r := IotHubEndpointEventHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_eventhub"), + }, + }) +} + +func (IotHubEndpointEventHubResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + name = "acctest-%[1]d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_eventhub" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r IotHubEndpointEventHubResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_endpoint_eventhub" "import" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string +} +`, r.basic(data)) +} + +func (t IotHubEndpointEventHubResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + endpointName := id.Path["Endpoints"] + + iothub, err := clients.IoTHub.ResourceClient.Get(ctx, resourceGroup, iothubName) + if err != nil || iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { + return nil, fmt.Errorf("reading IotHuB Endpoint Eventhub (%s): %+v", id, err) + } + + if endpoints := iothub.Properties.Routing.Endpoints.EventHubs; endpoints != nil { + for _, endpoint := range *endpoints { + if existingEndpointName := endpoint.Name; existingEndpointName != nil { + if strings.EqualFold(*existingEndpointName, endpointName) { + return utils.Bool(true), nil + } + } + } + } + + return utils.Bool(false), nil +} diff --git a/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource.go b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource.go index 5f5bf3ba7881..35b76717a5aa 100644 --- a/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource.go +++ b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubEndpointServiceBusQueue() *schema.Resource { +func resourceIotHubEndpointServiceBusQueue() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubEndpointServiceBusQueueCreateUpdate, - Read: resourceArmIotHubEndpointServiceBusQueueRead, - Update: resourceArmIotHubEndpointServiceBusQueueCreateUpdate, - Delete: resourceArmIotHubEndpointServiceBusQueueDelete, + Create: resourceIotHubEndpointServiceBusQueueCreateUpdate, + Read: resourceIotHubEndpointServiceBusQueueRead, + Update: resourceIotHubEndpointServiceBusQueueCreateUpdate, + Delete: resourceIotHubEndpointServiceBusQueueDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -68,7 +68,7 @@ func resourceArmIotHubEndpointServiceBusQueue() *schema.Resource { } } -func resourceArmIotHubEndpointServiceBusQueueCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointServiceBusQueueCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -147,16 +147,15 @@ func resourceArmIotHubEndpointServiceBusQueueCreateUpdate(d *schema.ResourceData d.SetId(resourceId) - return resourceArmIotHubEndpointServiceBusQueueRead(d, meta) + return resourceIotHubEndpointServiceBusQueueRead(d, meta) } -func resourceArmIotHubEndpointServiceBusQueueRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointServiceBusQueueRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubEndpointId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } @@ -191,13 +190,12 @@ func resourceArmIotHubEndpointServiceBusQueueRead(d *schema.ResourceData, meta i return nil } -func resourceArmIotHubEndpointServiceBusQueueDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointServiceBusQueueDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubEndpointId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } diff --git a/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource_test.go b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource_test.go new file mode 100644 index 000000000000..94d3317ce325 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_queue_resource_test.go @@ -0,0 +1,155 @@ +package iothub_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubEndpointServiceBusQueueResource struct { +} + +func TestAccIotHubEndpointServiceBusQueue_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_queue", "test") + r := IotHubEndpointServiceBusQueueResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubEndpointServiceBusQueue_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_queue", "test") + r := IotHubEndpointServiceBusQueueResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_servicebus_queue"), + }, + }) +} + +func (IotHubEndpointServiceBusQueueResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_servicebus_namespace" "test" { + name = "acctest-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_servicebus_queue" "test" { + name = "acctest-%[1]d" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_servicebus_namespace.test.name + + enable_partitioning = true +} + +resource "azurerm_servicebus_queue_authorization_rule" "test" { + name = "acctest-%[1]d" + namespace_name = azurerm_servicebus_namespace.test.name + queue_name = azurerm_servicebus_queue.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_servicebus_queue" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_servicebus_queue_authorization_rule.test.primary_connection_string +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r IotHubEndpointServiceBusQueueResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_endpoint_servicebus_queue" "import" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_servicebus_queue_authorization_rule.test.primary_connection_string +} +`, r.basic(data)) +} + +func (t IotHubEndpointServiceBusQueueResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + endpointName := id.Path["Endpoints"] + + iothub, err := clients.IoTHub.ResourceClient.Get(ctx, resourceGroup, iothubName) + if err != nil || iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { + return nil, fmt.Errorf("reading IotHuB Endpoint Service Bus (%s): %+v", id, err) + } + + if endpoints := iothub.Properties.Routing.Endpoints.ServiceBusQueues; endpoints != nil { + for _, endpoint := range *endpoints { + if existingEndpointName := endpoint.Name; existingEndpointName != nil { + if strings.EqualFold(*existingEndpointName, endpointName) { + return utils.Bool(true), nil + } + } + } + } + + return utils.Bool(false), nil +} diff --git a/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource.go b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource.go index b38b3737b7ba..3dcf9e514c48 100644 --- a/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource.go +++ b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubEndpointServiceBusTopic() *schema.Resource { +func resourceIotHubEndpointServiceBusTopic() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubEndpointServiceBusTopicCreateUpdate, - Read: resourceArmIotHubEndpointServiceBusTopicRead, - Update: resourceArmIotHubEndpointServiceBusTopicCreateUpdate, - Delete: resourceArmIotHubEndpointServiceBusTopicDelete, + Create: resourceIotHubEndpointServiceBusTopicCreateUpdate, + Read: resourceIotHubEndpointServiceBusTopicRead, + Update: resourceIotHubEndpointServiceBusTopicCreateUpdate, + Delete: resourceIotHubEndpointServiceBusTopicDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -68,7 +68,7 @@ func resourceArmIotHubEndpointServiceBusTopic() *schema.Resource { } } -func resourceArmIotHubEndpointServiceBusTopicCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointServiceBusTopicCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -147,16 +147,15 @@ func resourceArmIotHubEndpointServiceBusTopicCreateUpdate(d *schema.ResourceData d.SetId(resourceId) - return resourceArmIotHubEndpointServiceBusTopicRead(d, meta) + return resourceIotHubEndpointServiceBusTopicRead(d, meta) } -func resourceArmIotHubEndpointServiceBusTopicRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointServiceBusTopicRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubEndpointId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } @@ -191,13 +190,12 @@ func resourceArmIotHubEndpointServiceBusTopicRead(d *schema.ResourceData, meta i return nil } -func resourceArmIotHubEndpointServiceBusTopicDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointServiceBusTopicDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubEndpointId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } diff --git a/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource_test.go b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource_test.go new file mode 100644 index 000000000000..73f5e27a6f89 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_endpoint_servicebus_topic_resource_test.go @@ -0,0 +1,153 @@ +package iothub_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubEndpointServiceBusTopicResource struct { +} + +func TestAccIotHubEndpointServiceBusTopic_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_topic", "test") + r := IotHubEndpointServiceBusTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubEndpointServiceBusTopic_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_topic", "test") + r := IotHubEndpointServiceBusTopicResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_servicebus_topic"), + }, + }) +} + +func (IotHubEndpointServiceBusTopicResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_servicebus_namespace" "test" { + name = "acctest-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_servicebus_topic" "test" { + name = "acctestservicebustopic-%[1]d" + namespace_name = azurerm_servicebus_namespace.test.name + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_servicebus_topic_authorization_rule" "test" { + name = "acctest-%[1]d" + namespace_name = azurerm_servicebus_namespace.test.name + topic_name = azurerm_servicebus_topic.test.name + resource_group_name = azurerm_resource_group.test.name + + listen = false + send = true + manage = false +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_servicebus_topic" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r IotHubEndpointServiceBusTopicResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_endpoint_servicebus_topic" "import" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string +} +`, r.basic(data)) +} + +func (t IotHubEndpointServiceBusTopicResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + endpointName := id.Path["Endpoints"] + + iothub, err := clients.IoTHub.ResourceClient.Get(ctx, resourceGroup, iothubName) + if err != nil || iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { + return nil, fmt.Errorf("reading IotHuB Endpoint Service Bus Topics (%s): %+v", id, err) + } + + if endpoints := iothub.Properties.Routing.Endpoints.ServiceBusTopics; endpoints != nil { + for _, endpoint := range *endpoints { + if existingEndpointName := endpoint.Name; existingEndpointName != nil { + if strings.EqualFold(*existingEndpointName, endpointName) { + return utils.Bool(true), nil + } + } + } + } + + return utils.Bool(false), nil +} diff --git a/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource.go b/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource.go index d7b49804c2d5..3db143f9c68e 100644 --- a/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource.go +++ b/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubEndpointStorageContainer() *schema.Resource { +func resourceIotHubEndpointStorageContainer() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubEndpointStorageContainerCreateUpdate, - Read: resourceArmIotHubEndpointStorageContainerRead, - Update: resourceArmIotHubEndpointStorageContainerCreateUpdate, - Delete: resourceArmIotHubEndpointStorageContainerDelete, + Create: resourceIotHubEndpointStorageContainerCreateUpdate, + Read: resourceIotHubEndpointStorageContainerRead, + Update: resourceIotHubEndpointStorageContainerCreateUpdate, + Delete: resourceIotHubEndpointStorageContainerDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -104,7 +104,7 @@ func resourceArmIotHubEndpointStorageContainer() *schema.Resource { } } -func resourceArmIotHubEndpointStorageContainerCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointStorageContainerCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -197,10 +197,10 @@ func resourceArmIotHubEndpointStorageContainerCreateUpdate(d *schema.ResourceDat d.SetId(resourceId) - return resourceArmIotHubEndpointStorageContainerRead(d, meta) + return resourceIotHubEndpointStorageContainerRead(d, meta) } -func resourceArmIotHubEndpointStorageContainerRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointStorageContainerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -245,7 +245,7 @@ func resourceArmIotHubEndpointStorageContainerRead(d *schema.ResourceData, meta return nil } -func resourceArmIotHubEndpointStorageContainerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubEndpointStorageContainerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource_test.go b/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource_test.go new file mode 100644 index 000000000000..1a50d380f2ac --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_endpoint_storage_container_resource_test.go @@ -0,0 +1,159 @@ +package iothub_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubEndpointStorageContainerResource struct { +} + +func TestAccIotHubEndpointStorageContainer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_storage_container", "test") + r := IotHubEndpointStorageContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("file_name_format").HasValue("{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}"), + check.That(data.ResourceName).Key("batch_frequency_in_seconds").HasValue("60"), + check.That(data.ResourceName).Key("max_chunk_size_in_bytes").HasValue("10485760"), + check.That(data.ResourceName).Key("encoding").HasValue("JSON"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubEndpointStorageContainer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_storage_container", "test") + r := IotHubEndpointStorageContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_storage_container"), + }, + }) +} + +func (IotHubEndpointStorageContainerResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acc%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctestcont" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_storage_container" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + container_name = "acctestcont" + connection_string = azurerm_storage_account.test.primary_blob_connection_string + + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + encoding = "JSON" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r IotHubEndpointStorageContainerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_endpoint_storage_container" "import" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + container_name = "acctestcont" + connection_string = azurerm_storage_account.test.primary_blob_connection_string + + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + encoding = "JSON" +} +`, r.basic(data)) +} + +func (t IotHubEndpointStorageContainerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + endpointName := id.Path["Endpoints"] + + iothub, err := clients.IoTHub.ResourceClient.Get(ctx, resourceGroup, iothubName) + if err != nil || iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { + return nil, fmt.Errorf("reading IotHuB Endpoint Storage Container (%s): %+v", id, err) + } + + if endpoints := iothub.Properties.Routing.Endpoints.StorageContainers; endpoints != nil { + for _, endpoint := range *endpoints { + if existingEndpointName := endpoint.Name; existingEndpointName != nil { + if strings.EqualFold(*existingEndpointName, endpointName) { + return utils.Bool(true), nil + } + } + } + } + + return utils.Bool(false), nil +} diff --git a/azurerm/internal/services/iothub/iothub_fallback_route_resource.go b/azurerm/internal/services/iothub/iothub_fallback_route_resource.go index d72cbb956915..08560a32adf0 100644 --- a/azurerm/internal/services/iothub/iothub_fallback_route_resource.go +++ b/azurerm/internal/services/iothub/iothub_fallback_route_resource.go @@ -14,12 +14,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubFallbackRoute() *schema.Resource { +func resourceIotHubFallbackRoute() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubFallbackRouteCreateUpdate, - Read: resourceArmIotHubFallbackRouteRead, - Update: resourceArmIotHubFallbackRouteCreateUpdate, - Delete: resourceArmIotHubFallbackRouteDelete, + Create: resourceIotHubFallbackRouteCreateUpdate, + Read: resourceIotHubFallbackRouteRead, + Update: resourceIotHubFallbackRouteCreateUpdate, + Delete: resourceIotHubFallbackRouteDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -68,7 +68,7 @@ func resourceArmIotHubFallbackRoute() *schema.Resource { } } -func resourceArmIotHubFallbackRouteCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubFallbackRouteCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -116,10 +116,10 @@ func resourceArmIotHubFallbackRouteCreateUpdate(d *schema.ResourceData, meta int resourceId := fmt.Sprintf("%s/FallbackRoute/defined", *iothub.ID) d.SetId(resourceId) - return resourceArmIotHubFallbackRouteRead(d, meta) + return resourceIotHubFallbackRouteRead(d, meta) } -func resourceArmIotHubFallbackRouteRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubFallbackRouteRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -153,13 +153,12 @@ func resourceArmIotHubFallbackRouteRead(d *schema.ResourceData, meta interface{} return nil } -func resourceArmIotHubFallbackRouteDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubFallbackRouteDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubRouteId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } diff --git a/azurerm/internal/services/iothub/iothub_fallback_route_resource_test.go b/azurerm/internal/services/iothub/iothub_fallback_route_resource_test.go new file mode 100644 index 000000000000..2815d0d1f350 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_fallback_route_resource_test.go @@ -0,0 +1,202 @@ +package iothub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubFallbackRouteResource struct { +} + +// NOTE: this resource intentionally doesn't support Requires Import +// since a fallback route is created by default + +func TestAccIotHubFallbackRoute_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_fallback_route", "test") + r := IotHubFallbackRouteResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubFallbackRoute_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_fallback_route", "test") + r := IotHubFallbackRouteResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t IotHubFallbackRouteResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + + resp, err := clients.IoTHub.ResourceClient.Get(ctx, iothubName, resourceGroup) + if err != nil || resp.Properties == nil || resp.Properties.Routing == nil || resp.Properties.Routing.FallbackRoute == nil { + return nil, fmt.Errorf("reading IotHuB Route (%s): %+v", id, err) + } + + return utils.Bool(resp.Properties.Routing.FallbackRoute.Name != nil), nil +} + +func (IotHubFallbackRouteResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test-%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_storage_container" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_storage_account.test.primary_blob_connection_string + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + container_name = azurerm_storage_container.test.name + encoding = "Avro" + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" +} + +resource "azurerm_iothub_fallback_route" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + + condition = "true" + endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] + enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (IotHubFallbackRouteResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test-%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_storage_container" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_storage_account.test.primary_blob_connection_string + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + container_name = azurerm_storage_container.test.name + encoding = "Avro" + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" +} + +resource "azurerm_iothub_fallback_route" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + + condition = "true" + endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] + enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/iothub/iothub_resource.go b/azurerm/internal/services/iothub/iothub_resource.go index 48d992a3daa8..445c016ad7b6 100644 --- a/azurerm/internal/services/iothub/iothub_resource.go +++ b/azurerm/internal/services/iothub/iothub_resource.go @@ -53,12 +53,12 @@ func supressWhenAll(fs ...schema.SchemaDiffSuppressFunc) schema.SchemaDiffSuppre } } -func resourceArmIotHub() *schema.Resource { +func resourceIotHub() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubCreateUpdate, - Read: resourceArmIotHubRead, - Update: resourceArmIotHubCreateUpdate, - Delete: resourceArmIotHubDelete, + Create: resourceIotHubCreateUpdate, + Read: resourceIotHubRead, + Update: resourceIotHubCreateUpdate, + Delete: resourceIotHubDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.IotHubID(id) @@ -417,6 +417,15 @@ func resourceArmIotHub() *schema.Resource { }, }, + "min_tls_version": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + "1.2", + }, false), + }, + "public_network_access_enabled": { Type: schema.TypeBool, Optional: true, @@ -455,7 +464,7 @@ func resourceArmIotHub() *schema.Resource { } } -func resourceArmIotHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -552,6 +561,10 @@ func resourceArmIotHubCreateUpdate(d *schema.ResourceData, meta interface{}) err } } + if v, ok := d.GetOk("min_tls_version"); ok { + props.Properties.MinTLSVersion = utils.String(v.(string)) + } + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, props, "") if err != nil { return fmt.Errorf("Error creating/updating IotHub %q (Resource Group %q): %+v", name, resourceGroup, err) @@ -568,10 +581,10 @@ func resourceArmIotHubCreateUpdate(d *schema.ResourceData, meta interface{}) err d.SetId(*resp.ID) - return resourceArmIotHubRead(d, meta) + return resourceIotHubRead(d, meta) } -func resourceArmIotHubRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -651,6 +664,8 @@ func resourceArmIotHubRead(d *schema.ResourceData, meta interface{}) error { if enabled := properties.PublicNetworkAccess; enabled != "" { d.Set("public_network_access_enabled", enabled == devices.Enabled) } + + d.Set("min_tls_version", properties.MinTLSVersion) } d.Set("name", id.Name) @@ -666,7 +681,7 @@ func resourceArmIotHubRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, hub.Tags) } -func resourceArmIotHubDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubDelete(d *schema.ResourceData, meta interface{}) error { id, err := parse.IotHubID(d.Id()) if err != nil { return err @@ -1139,6 +1154,7 @@ func validateIoTHubFileNameFormat(v interface{}, k string) (warnings []string, e return warnings, errors } + func expandIPFilterRules(d *schema.ResourceData) *[]devices.IPFilterRule { ipFilterRuleList := d.Get("ip_filter_rule").(*schema.Set).List() if len(ipFilterRuleList) == 0 { diff --git a/azurerm/internal/services/iothub/iothub_resource_test.go b/azurerm/internal/services/iothub/iothub_resource_test.go new file mode 100644 index 000000000000..a29fccc3b6d3 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_resource_test.go @@ -0,0 +1,783 @@ +package iothub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iothub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubResource struct { +} + +func TestAccIotHub_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_ipFilterRules(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.ipFilterRules(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub"), + }, + }) +} + +func TestAccIotHub_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_customRoutes(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customRoutes(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("endpoint.#").HasValue("2"), + check.That(data.ResourceName).Key("endpoint.0.type").HasValue("AzureIotHub.StorageContainer"), + check.That(data.ResourceName).Key("endpoint.1.type").HasValue("AzureIotHub.EventHub"), + check.That(data.ResourceName).Key("route.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_removeEndpointsAndRoutes(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.customRoutes(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.removeEndpointsAndRoutes(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_fileUpload(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fileUpload(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("file_upload.#").HasValue("1"), + check.That(data.ResourceName).Key("file_upload.0.lock_duration").HasValue("PT5M"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_withDifferentEndpointResourceGroup(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withDifferentEndpointResourceGroup(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_fallbackRoute(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.fallbackRoute(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fallback_route.0.source").HasValue("DeviceMessages"), + check.That(data.ResourceName).Key("fallback_route.0.endpoint_names.#").HasValue("1"), + check.That(data.ResourceName).Key("fallback_route.0.enabled").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_publicAccess(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.publicAccessEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.publicAccessDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHub_minTLSVersion(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub", "test") + r := IotHubResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.minTLSVersion(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t IotHubResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.IotHubID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.IoTHub.ResourceClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("reading IotHuB (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (IotHubResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r IotHubResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub" "import" { + name = azurerm_iothub.test.name + resource_group_name = azurerm_iothub.test.resource_group_name + location = azurerm_iothub.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} +`, r.basic(data)) +} + +func (IotHubResource) standard(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubResource) ipFilterRules(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + ip_filter_rule { + name = "test" + ip_mask = "10.0.0.0/31" + action = "Accept" + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubResource) customRoutes(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_eventhub_namespace" "test" { + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + name = "acctest-%d" + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctest" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + name = "acctest" + send = true +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + event_hub_retention_in_days = 7 + event_hub_partition_count = 77 + + endpoint { + type = "AzureIotHub.StorageContainer" + connection_string = azurerm_storage_account.test.primary_blob_connection_string + name = "export" + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + container_name = azurerm_storage_container.test.name + encoding = "Avro" + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" + resource_group_name = azurerm_resource_group.test.name + } + + endpoint { + type = "AzureIotHub.EventHub" + connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string + name = "export2" + resource_group_name = azurerm_resource_group.test.name + } + + route { + name = "export" + source = "DeviceMessages" + condition = "true" + endpoint_names = ["export"] + enabled = true + } + + route { + name = "export2" + source = "DeviceMessages" + condition = "true" + endpoint_names = ["export2"] + enabled = true + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (IotHubResource) removeEndpointsAndRoutes(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_eventhub_namespace" "test" { + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + name = "acctest-%d" + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctest" + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + resource_group_name = azurerm_resource_group.test.name + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + name = "acctest" + send = true +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + event_hub_retention_in_days = 7 + event_hub_partition_count = 77 + + endpoint = [] + + route = [] + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (IotHubResource) fallbackRoute(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + fallback_route { + source = "DeviceMessages" + endpoint_names = ["events"] + enabled = true + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubResource) fileUpload(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + file_upload { + connection_string = azurerm_storage_account.test.primary_blob_connection_string + container_name = azurerm_storage_container.test.name + notifications = true + max_delivery_count = 12 + sas_ttl = "PT2H" + default_ttl = "PT3H" + lock_duration = "PT5M" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (IotHubResource) publicAccessEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + public_network_access_enabled = true + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubResource) publicAccessDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + public_network_access_enabled = false + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IotHubResource) withDifferentEndpointResourceGroup(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%d" + location = "%s" +} + +resource "azurerm_resource_group" "test2" { + name = "acctestRG2-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_eventhub_namespace" "test" { + resource_group_name = azurerm_resource_group.test2.name + location = azurerm_resource_group.test2.location + name = "acctest-%d" + sku = "Basic" +} + +resource "azurerm_eventhub" "test" { + name = "acctest" + resource_group_name = azurerm_resource_group.test2.name + namespace_name = azurerm_eventhub_namespace.test.name + partition_count = 2 + message_retention = 1 +} + +resource "azurerm_eventhub_authorization_rule" "test" { + resource_group_name = azurerm_resource_group.test2.name + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + name = "acctest" + send = true +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + event_hub_retention_in_days = 7 + event_hub_partition_count = 77 + + endpoint { + type = "AzureIotHub.StorageContainer" + connection_string = azurerm_storage_account.test.primary_blob_connection_string + name = "export" + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + container_name = azurerm_storage_container.test.name + encoding = "Avro" + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" + resource_group_name = azurerm_resource_group.test.name + } + + endpoint { + type = "AzureIotHub.EventHub" + connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string + name = "export2" + resource_group_name = azurerm_resource_group.test2.name + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (IotHubResource) minTLSVersion(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + min_tls_version = "1.2" + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/iothub/iothub_route_resource.go b/azurerm/internal/services/iothub/iothub_route_resource.go index 577c85c0c753..2285e9564d75 100644 --- a/azurerm/internal/services/iothub/iothub_route_resource.go +++ b/azurerm/internal/services/iothub/iothub_route_resource.go @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubRoute() *schema.Resource { +func resourceIotHubRoute() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubRouteCreateUpdate, - Read: resourceArmIotHubRouteRead, - Update: resourceArmIotHubRouteCreateUpdate, - Delete: resourceArmIotHubRouteDelete, + Create: resourceIotHubRouteCreateUpdate, + Read: resourceIotHubRouteRead, + Update: resourceIotHubRouteCreateUpdate, + Delete: resourceIotHubRouteDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -90,7 +90,7 @@ func resourceArmIotHubRoute() *schema.Resource { } } -func resourceArmIotHubRouteCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubRouteCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -174,10 +174,10 @@ func resourceArmIotHubRouteCreateUpdate(d *schema.ResourceData, meta interface{} d.SetId(resourceId) - return resourceArmIotHubRouteRead(d, meta) + return resourceIotHubRouteRead(d, meta) } -func resourceArmIotHubRouteRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubRouteRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -220,7 +220,7 @@ func resourceArmIotHubRouteRead(d *schema.ResourceData, meta interface{}) error return nil } -func resourceArmIotHubRouteDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubRouteDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_route_resource_test.go b/azurerm/internal/services/iothub/iothub_route_resource_test.go new file mode 100644 index 000000000000..7db920f6e875 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_route_resource_test.go @@ -0,0 +1,246 @@ +package iothub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IotHubRouteResource struct { +} + +func TestAccIotHubRoute_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_route", "test") + r := IotHubRouteResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubRoute_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_route", "test") + r := IotHubRouteResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_route"), + }, + }) +} + +func TestAccIotHubRoute_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_route", "test") + r := IotHubRouteResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t IotHubRouteResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + + resp, err := clients.IoTHub.ResourceClient.Get(ctx, iothubName, resourceGroup) + if err != nil || resp.Properties == nil || resp.Properties.Routing == nil { + return nil, fmt.Errorf("reading IotHuB Route (%s): %+v", id, err) + } + + if routes := resp.Properties.Routing.Routes; routes != nil { + for _, route := range *routes { + if route.Name != nil { + return utils.Bool(true), nil + } + } + } + + return utils.Bool(false), nil +} + +func (r IotHubRouteResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_route" "import" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + source = "DeviceMessages" + condition = "true" + endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] + enabled = true +} +`, r.basic(data)) +} + +func (IotHubRouteResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_storage_container" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_storage_account.test.primary_blob_connection_string + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + container_name = azurerm_storage_container.test.name + encoding = "Avro" + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" +} + +resource "azurerm_iothub_route" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + source = "DeviceMessages" + condition = "true" + endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] + enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (IotHubRouteResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-iothub-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "test%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "S1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_endpoint_storage_container" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + connection_string = azurerm_storage_account.test.primary_blob_connection_string + batch_frequency_in_seconds = 60 + max_chunk_size_in_bytes = 10485760 + container_name = azurerm_storage_container.test.name + encoding = "Avro" + file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" +} + +resource "azurerm_iothub_route" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + source = "DeviceLifecycleEvents" + condition = "true" + endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] + enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source.go b/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source.go index 5482e485016e..d7c693628a0f 100644 --- a/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source.go +++ b/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source.go @@ -14,9 +14,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmIotHubSharedAccessPolicy() *schema.Resource { +func dataSourceIotHubSharedAccessPolicy() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmIotHubSharedAccessPolicyRead, + Read: dataSourceIotHubSharedAccessPolicyRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -64,7 +64,8 @@ func dataSourceArmIotHubSharedAccessPolicy() *schema.Resource { }, } } -func dataSourceArmIotHubSharedAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { + +func dataSourceIotHubSharedAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source_test.go b/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source_test.go new file mode 100644 index 000000000000..e493f620eb7c --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_shared_access_policy_data_source_test.go @@ -0,0 +1,42 @@ +package iothub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type IoTHubSharedAccessPolicyDataSource struct { +} + +func TestAccDataSourceIotHubSharedAccessPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_iothub_shared_access_policy", "test") + r := IoTHubSharedAccessPolicyDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + ), + }, + }) +} + +func (IoTHubSharedAccessPolicyDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_iothub_shared_access_policy" "test" { + name = azurerm_iothub_shared_access_policy.test.name + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name +} +`, IoTHubSharedAccessPolicyResource{}.basic(data)) +} diff --git a/azurerm/internal/services/iothub/iothub_shared_access_policy_resource.go b/azurerm/internal/services/iothub/iothub_shared_access_policy_resource.go index ec98e5199158..8c56d632d07e 100644 --- a/azurerm/internal/services/iothub/iothub_shared_access_policy_resource.go +++ b/azurerm/internal/services/iothub/iothub_shared_access_policy_resource.go @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIotHubSharedAccessPolicy() *schema.Resource { +func resourceIotHubSharedAccessPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmIotHubSharedAccessPolicyCreateUpdate, - Read: resourceArmIotHubSharedAccessPolicyRead, - Update: resourceArmIotHubSharedAccessPolicyCreateUpdate, - Delete: resourceArmIotHubSharedAccessPolicyDelete, + Create: resourceIotHubSharedAccessPolicyCreateUpdate, + Read: resourceIotHubSharedAccessPolicyRead, + Update: resourceIotHubSharedAccessPolicyCreateUpdate, + Delete: resourceIotHubSharedAccessPolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -125,7 +125,7 @@ func iothubSharedAccessPolicyCustomizeDiff(d *schema.ResourceDiff, _ interface{} return } -func resourceArmIotHubSharedAccessPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubSharedAccessPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -202,16 +202,15 @@ func resourceArmIotHubSharedAccessPolicyCreateUpdate(d *schema.ResourceData, met d.SetId(resourceId) - return resourceArmIotHubSharedAccessPolicyRead(d, meta) + return resourceIotHubSharedAccessPolicyRead(d, meta) } -func resourceArmIotHubSharedAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubSharedAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubSAPId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } @@ -258,13 +257,12 @@ func resourceArmIotHubSharedAccessPolicyRead(d *schema.ResourceData, meta interf return nil } -func resourceArmIotHubSharedAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIotHubSharedAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).IoTHub.ResourceClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() parsedIothubSAPId, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } @@ -320,7 +318,7 @@ type accessRights struct { } func expandAccessRights(d *schema.ResourceData) string { - var possibleAccessRights = []struct { + possibleAccessRights := []struct { schema string right string }{ diff --git a/azurerm/internal/services/iothub/iothub_shared_access_policy_resource_test.go b/azurerm/internal/services/iothub/iothub_shared_access_policy_resource_test.go new file mode 100644 index 000000000000..2bcf93a27c44 --- /dev/null +++ b/azurerm/internal/services/iothub/iothub_shared_access_policy_resource_test.go @@ -0,0 +1,174 @@ +package iothub_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IoTHubSharedAccessPolicyResource struct { +} + +func TestAccIotHubSharedAccessPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_shared_access_policy", "test") + r := IoTHubSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("registry_read").HasValue("true"), + check.That(data.ResourceName).Key("registry_write").HasValue("true"), + check.That(data.ResourceName).Key("service_connect").HasValue("false"), + check.That(data.ResourceName).Key("device_connect").HasValue("false"), + check.That(data.ResourceName).Key("name").HasValue("acctest"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIotHubSharedAccessPolicy_writeWithoutRead(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_shared_access_policy", "test") + r := IoTHubSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.writeWithoutRead(data), + ExpectError: regexp.MustCompile("If `registry_write` is set to true, `registry_read` must also be set to true"), + }, + }) +} + +func TestAccIotHubSharedAccessPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iothub_shared_access_policy", "test") + r := IoTHubSharedAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_iothub_shared_access_policy"), + }, + }) +} + +func (IoTHubSharedAccessPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + registry_read = true + registry_write = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r IoTHubSharedAccessPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_iothub_shared_access_policy" "import" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + registry_read = true + registry_write = true +} +`, r.basic(data)) +} + +func (IoTHubSharedAccessPolicyResource) writeWithoutRead(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_iothub" "test" { + name = "acctestIoTHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + sku { + name = "B1" + capacity = "1" + } + + tags = { + purpose = "testing" + } +} + +resource "azurerm_iothub_shared_access_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + iothub_name = azurerm_iothub.test.name + name = "acctest" + + registry_write = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (t IoTHubSharedAccessPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + iothubName := id.Path["IotHubs"] + keyName := id.Path["IotHubKeys"] + + accessPolicy, err := clients.IoTHub.ResourceClient.GetKeysForKeyName(ctx, resourceGroup, iothubName, keyName) + if err != nil { + return nil, fmt.Errorf("loading IotHub Shared Access Policy %q: %+v", id, err) + } + + return utils.Bool(accessPolicy.PrimaryKey != nil), nil +} diff --git a/azurerm/internal/services/iothub/parse/iot_hub.go b/azurerm/internal/services/iothub/parse/iot_hub.go new file mode 100644 index 000000000000..d752fe287828 --- /dev/null +++ b/azurerm/internal/services/iothub/parse/iot_hub.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type IotHubId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewIotHubID(subscriptionId, resourceGroup, name string) IotHubId { + return IotHubId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id IotHubId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Iot Hub", segmentsStr) +} + +func (id IotHubId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Devices/IotHubs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// IotHubID parses a IotHub ID into an IotHubId struct +func IotHubID(input string) (*IotHubId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := IotHubId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("IotHubs"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/iothub/parse/iot_hub_test.go b/azurerm/internal/services/iothub/parse/iot_hub_test.go new file mode 100644 index 000000000000..2d43b44df677 --- /dev/null +++ b/azurerm/internal/services/iothub/parse/iot_hub_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = IotHubId{} + +func TestIotHubIDFormatter(t *testing.T) { + actual := NewIotHubID("12345678-1234-9876-4563-123456789012", "resGroup1", "hub1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/hub1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestIotHubID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *IotHubId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/hub1", + Expected: &IotHubId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "hub1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DEVICES/IOTHUBS/HUB1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := IotHubID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/iothub/parse/iothub.go b/azurerm/internal/services/iothub/parse/iothub.go deleted file mode 100644 index e8d0bed6e10b..000000000000 --- a/azurerm/internal/services/iothub/parse/iothub.go +++ /dev/null @@ -1,31 +0,0 @@ -package parse - -import ( - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type IotHubId struct { - Name string - ResourceGroup string -} - -func IotHubID(input string) (*IotHubId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - iothub := IotHubId{ - ResourceGroup: id.ResourceGroup, - } - - if iothub.Name, err = id.PopSegment("IotHubs"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &iothub, nil -} diff --git a/azurerm/internal/services/iothub/parse/iothub_test.go b/azurerm/internal/services/iothub/parse/iothub_test.go deleted file mode 100644 index 0a687af40628..000000000000 --- a/azurerm/internal/services/iothub/parse/iothub_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestIotHubID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *IotHubId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing IoT Hub Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/", - Expected: nil, - }, - { - Name: "IoT Hub ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/hub1", - Expected: &IotHubId{ - Name: "hub1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Devices/iotHubs/hub1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := IotHubID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/iothub/registration.go b/azurerm/internal/services/iothub/registration.go index b01c1d4d7a93..035ffb60f51f 100644 --- a/azurerm/internal/services/iothub/registration.go +++ b/azurerm/internal/services/iothub/registration.go @@ -21,25 +21,26 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_iothub_dps": dataSourceArmIotHubDPS(), + "azurerm_iothub_dps": dataSourceIotHubDPS(), "azurerm_iothub_dps_shared_access_policy": dataSourceIotHubDPSSharedAccessPolicy(), - "azurerm_iothub_shared_access_policy": dataSourceArmIotHubSharedAccessPolicy(), + "azurerm_iothub_shared_access_policy": dataSourceIotHubSharedAccessPolicy(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_iothub_dps": resourceArmIotHubDPS(), - "azurerm_iothub_dps_certificate": resourceArmIotHubDPSCertificate(), - "azurerm_iothub_dps_shared_access_policy": resourceArmIotHubDPSSharedAccessPolicy(), - "azurerm_iothub_consumer_group": resourceArmIotHubConsumerGroup(), - "azurerm_iothub": resourceArmIotHub(), - "azurerm_iothub_fallback_route": resourceArmIotHubFallbackRoute(), - "azurerm_iothub_route": resourceArmIotHubRoute(), - "azurerm_iothub_endpoint_eventhub": resourceArmIotHubEndpointEventHub(), - "azurerm_iothub_endpoint_servicebus_queue": resourceArmIotHubEndpointServiceBusQueue(), - "azurerm_iothub_endpoint_servicebus_topic": resourceArmIotHubEndpointServiceBusTopic(), - "azurerm_iothub_endpoint_storage_container": resourceArmIotHubEndpointStorageContainer(), - "azurerm_iothub_shared_access_policy": resourceArmIotHubSharedAccessPolicy()} + "azurerm_iothub_dps": resourceIotHubDPS(), + "azurerm_iothub_dps_certificate": resourceIotHubDPSCertificate(), + "azurerm_iothub_dps_shared_access_policy": resourceIotHubDPSSharedAccessPolicy(), + "azurerm_iothub_consumer_group": resourceIotHubConsumerGroup(), + "azurerm_iothub": resourceIotHub(), + "azurerm_iothub_fallback_route": resourceIotHubFallbackRoute(), + "azurerm_iothub_route": resourceIotHubRoute(), + "azurerm_iothub_endpoint_eventhub": resourceIotHubEndpointEventHub(), + "azurerm_iothub_endpoint_servicebus_queue": resourceIotHubEndpointServiceBusQueue(), + "azurerm_iothub_endpoint_servicebus_topic": resourceIotHubEndpointServiceBusTopic(), + "azurerm_iothub_endpoint_storage_container": resourceIotHubEndpointStorageContainer(), + "azurerm_iothub_shared_access_policy": resourceIotHubSharedAccessPolicy(), + } } diff --git a/azurerm/internal/services/iothub/resourceids.go b/azurerm/internal/services/iothub/resourceids.go new file mode 100644 index 000000000000..2d9fe48c78cd --- /dev/null +++ b/azurerm/internal/services/iothub/resourceids.go @@ -0,0 +1,3 @@ +package iothub + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=IotHub -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/hub1 diff --git a/azurerm/internal/services/iothub/tests/testdata/application_gateway_test.cer b/azurerm/internal/services/iothub/testdata/application_gateway_test.cer similarity index 100% rename from azurerm/internal/services/iothub/tests/testdata/application_gateway_test.cer rename to azurerm/internal/services/iothub/testdata/application_gateway_test.cer diff --git a/azurerm/internal/services/iothub/tests/testdata/batch_certificate.cer b/azurerm/internal/services/iothub/testdata/batch_certificate.cer similarity index 100% rename from azurerm/internal/services/iothub/tests/testdata/batch_certificate.cer rename to azurerm/internal/services/iothub/testdata/batch_certificate.cer diff --git a/azurerm/internal/services/iothub/tests/iothub_consumer_group_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_consumer_group_resource_test.go deleted file mode 100644 index d8b123457560..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_consumer_group_resource_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMIotHubConsumerGroup_events(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubConsumerGroup_basic(data, "events"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubConsumerGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub_endpoint_name", "events"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubConsumerGroup_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubConsumerGroup_basic(data, "events"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubConsumerGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub_endpoint_name", "events"), - ), - }, - { - Config: testAccAzureRMIotHubConsumerGroup_requiresImport(data, "events"), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_consumer_group"), - }, - }, - }) -} - -func TestAccAzureRMIotHubConsumerGroup_operationsMonitoringEvents(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubConsumerGroup_basic(data, "operationsMonitoringEvents"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubConsumerGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "eventhub_endpoint_name", "operationsMonitoringEvents"), - ), - }, data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubConsumerGroup_withSharedAccessPolicy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_consumer_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubConsumerGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubConsumerGroup_withSharedAccessPolicy(data, "events"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubConsumerGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIotHubConsumerGroupDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_consumer_group" { - continue - } - - name := rs.Primary.Attributes["name"] - iotHubName := rs.Primary.Attributes["iothub_name"] - endpointName := rs.Primary.Attributes["eventhub_endpoint_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetEventHubConsumerGroup(ctx, resourceGroup, iotHubName, endpointName, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Consumer Group %q still exists in Endpoint %q / IotHub %q / Resource Group %q", name, endpointName, iotHubName, resourceGroup) - } - } - return nil -} - -func testCheckAzureRMIotHubConsumerGroupExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - iotHubName := rs.Primary.Attributes["iothub_name"] - endpointName := rs.Primary.Attributes["eventhub_endpoint_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.GetEventHubConsumerGroup(ctx, resourceGroup, iotHubName, endpointName, name) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Consumer Group %q (Endpoint %q / IotHub %q / Resource Group: %q) does not exist", name, endpointName, iotHubName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMIotHubConsumerGroup_basic(data acceptance.TestData, eventName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_consumer_group" "test" { - name = "test" - iothub_name = azurerm_iothub.test.name - eventhub_endpoint_name = "%s" - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, eventName) -} - -func testAccAzureRMIotHubConsumerGroup_requiresImport(data acceptance.TestData, eventName string) string { - template := testAccAzureRMIotHubConsumerGroup_basic(data, eventName) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_consumer_group" "import" { - name = azurerm_iothub_consumer_group.test.name - iothub_name = azurerm_iothub_consumer_group.test.iothub_name - eventhub_endpoint_name = azurerm_iothub_consumer_group.test.eventhub_endpoint_name - resource_group_name = azurerm_iothub_consumer_group.test.resource_group_name -} -`, template) -} - -func testAccAzureRMIotHubConsumerGroup_withSharedAccessPolicy(data acceptance.TestData, eventName string) string { - template := testAccAzureRMIotHubConsumerGroup_basic(data, eventName) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_shared_access_policy" "test" { - name = "acctestSharedAccessPolicy" - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - service_connect = true -} -`, template) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_dps_certificate_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_dps_certificate_resource_test.go deleted file mode 100644 index ceb782fc658c..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_dps_certificate_resource_test.go +++ /dev/null @@ -1,218 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMIotHubDPSCertificate_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPSCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSCertificateExists(data.ResourceName), - ), - }, - data.ImportStep("certificate_content"), - }, - }) -} - -func TestAccAzureRMIotHubDPSCertificate_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPSCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSCertificateExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubDPSCertificate_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_dps_certificate"), - }, - }, - }) -} - -func TestAccAzureRMIotHubDPSCertificate_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPSCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSCertificateExists(data.ResourceName), - ), - }, - data.ImportStep("certificate_content"), - { - Config: testAccAzureRMIotHubDPSCertificate_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSCertificateExists(data.ResourceName), - ), - }, - data.ImportStep("certificate_content"), - }, - }) -} - -func testCheckAzureRMIotHubDPSCertificateDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.DPSCertificateClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_dps_certificate" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - iotDPSName := rs.Primary.Attributes["iot_dps_name"] - - resp, err := client.Get(ctx, name, resourceGroup, iotDPSName, "") - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("IoT Device Provisioning Service Certificate %s still exists in (device provisioning service %s / resource group %s)", name, iotDPSName, resourceGroup) - } - } - return nil -} - -func testCheckAzureRMIotHubDPSCertificateExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.DPSCertificateClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - iotDPSName := rs.Primary.Attributes["iot_dps_name"] - - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for IoT Device Provisioning Service Certificate: %s", name) - } - - resp, err := client.Get(ctx, name, resourceGroup, iotDPSName, "") - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: IoT Device Provisioning Service Certificate %q (Device Provisioning Service %q / Resource Group %q) does not exist", name, iotDPSName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on iothubDPSCertificateClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMIotHubDPSCertificate_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} - -resource "azurerm_iothub_dps_certificate" "test" { - name = "acctestIoTDPSCertificate-%d" - resource_group_name = azurerm_resource_group.test.name - iot_dps_name = azurerm_iothub_dps.test.name - - certificate_content = filebase64("testdata/batch_certificate.cer") -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMIotHubDPSCertificate_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubDPSCertificate_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_dps_certificate" "import" { - name = azurerm_iothub_dps_certificate.test.name - resource_group_name = azurerm_resource_group.test.name - iot_dps_name = azurerm_iothub_dps.test.name - - certificate_content = filebase64("testdata/batch_certificate.cer") -} -`, template) -} - -func testAccAzureRMIotHubDPSCertificate_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_dps_certificate" "test" { - name = "acctestIoTDPSCertificate-%d" - resource_group_name = azurerm_resource_group.test.name - iot_dps_name = azurerm_iothub_dps.test.name - - certificate_content = filebase64("testdata/application_gateway_test.cer") -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_dps_data_source_test.go b/azurerm/internal/services/iothub/tests/iothub_dps_data_source_test.go deleted file mode 100644 index 995e9896b82f..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_dps_data_source_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMIotHubDPS_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_iothub_dps", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMIotHubDPS_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "allocation_policy"), - resource.TestCheckResourceAttrSet(data.ResourceName, "device_provisioning_host_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "id_scope"), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_operations_host_name"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMIotHubDPS_basic(data acceptance.TestData) string { - template := testAccAzureRMIotHubDPS_basic(data) - - return fmt.Sprintf(` -%s - -data "azurerm_iothub_dps" "test" { - name = azurerm_iothub_dps.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_dps_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_dps_resource_test.go deleted file mode 100644 index 8d5c7145a288..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_dps_resource_test.go +++ /dev/null @@ -1,298 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMIotHubDPS_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPS_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "allocation_policy"), - resource.TestCheckResourceAttrSet(data.ResourceName, "device_provisioning_host_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "id_scope"), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_operations_host_name"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubDPS_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPS_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubDPS_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_dps"), - }, - }, - }) -} - -func TestAccAzureRMIotHubDPS_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPS_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHubDPS_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubDPS_linkedHubs(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDPSDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDPS_linkedHubs(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHubDPS_linkedHubsUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDPSExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIotHubDPSDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.DPSResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_dps" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("IoT Device Provisioning Service %s still exists in resource group %s", name, resourceGroup) - } - } - return nil -} - -func testCheckAzureRMIotHubDPSExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.DPSResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - iotdpsName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for IoT Device Provisioning Service: %s", iotdpsName) - } - - resp, err := client.Get(ctx, iotdpsName, resourceGroup) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: IoT Device Provisioning Service %q (Resource Group %q) does not exist", iotdpsName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on iothubDPSResourceClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMIotHubDPS_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubDPS_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubDPS_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_dps" "import" { - name = azurerm_iothub_dps.test.name - resource_group_name = azurerm_iothub_dps.test.resource_group_name - location = azurerm_iothub_dps.test.location - - sku { - name = "S1" - capacity = "1" - } -} -`, template) -} - -func testAccAzureRMIotHubDPS_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubDPS_linkedHubs(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - linked_hub { - connection_string = "HostName=test.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=booo" - location = azurerm_resource_group.test.location - allocation_weight = 15 - apply_allocation_policy = true - } - - linked_hub { - connection_string = "HostName=test2.azure-devices.net;SharedAccessKeyName=iothubowner2;SharedAccessKey=key2" - location = azurerm_resource_group.test.location - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubDPS_linkedHubsUpdated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - linked_hub { - connection_string = "HostName=test.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=booo" - location = azurerm_resource_group.test.location - allocation_weight = 150 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_dps_shared_access_policy_data_source_test.go b/azurerm/internal/services/iothub/tests/iothub_dps_shared_access_policy_data_source_test.go deleted file mode 100644 index 495196340555..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_dps_shared_access_policy_data_source_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMIotHubDpsSharedAccessPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_iothub_dps_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDpsSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMIotHubDpsSharedAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDpsSharedAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMIotHubDpsSharedAccessPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} - -resource "azurerm_iothub_dps_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_dps_name = azurerm_iothub_dps.test.name - name = "acctest" - service_config = true -} - -data "azurerm_iothub_dps_shared_access_policy" "test" { - name = azurerm_iothub_dps_shared_access_policy.test.name - iothub_dps_name = azurerm_iothub_dps.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_dps_shared_access_policy_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_dps_shared_access_policy_resource_test.go deleted file mode 100644 index da169bc8f863..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_dps_shared_access_policy_resource_test.go +++ /dev/null @@ -1,272 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubDpsSharedAccessPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDpsSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDpsSharedAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubDpsSharedAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctest"), - resource.TestCheckResourceAttr(data.ResourceName, "enrollment_read", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "enrollment_write", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "registration_read", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "registration_write", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "service_config", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - ), - }, - }, - }) -} - -func TestAccAzureRMIotHubDpsSharedAccessPolicy_writeWithoutRead(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDpsSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDpsSharedAccessPolicy_writeWithoutRead(data), - ExpectError: regexp.MustCompile("If `registration_write` is set to true, `registration_read` must also be set to true"), - }, - }, - }) -} - -func TestAccAzureRMIotHubDpsSharedAccessPolicy_enrollmentReadWithoutRegistration(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDpsSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDpsSharedAccessPolicy_enrollmentReadWithoutRegistration(data), - ExpectError: regexp.MustCompile("If `enrollment_read` is set to true, `registration_read` must also be set to true"), - }, - }, - }) -} - -func TestAccAzureRMIotHubDpsSharedAccessPolicy_enrollmentWriteWithoutOthers(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_dps_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDpsSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubDpsSharedAccessPolicy_enrollmentWriteWithoutOthers(data), - ExpectError: regexp.MustCompile("If `enrollment_write` is set to true, `enrollment_read`, `registration_read`, and `registration_write` must also be set to true"), - }, - }, - }) -} - -func testAccAzureRMIotHubDpsSharedAccessPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} - -resource "azurerm_iothub_dps_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_dps_name = azurerm_iothub_dps.test.name - name = "acctest" - service_config = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubDpsSharedAccessPolicy_writeWithoutRead(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} - -resource "azurerm_iothub_dps_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_dps_name = azurerm_iothub_dps.test.name - name = "acctest" - registration_write = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubDpsSharedAccessPolicy_enrollmentReadWithoutRegistration(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} - -resource "azurerm_iothub_dps_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_dps_name = azurerm_iothub_dps.test.name - name = "acctest" - enrollment_read = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubDpsSharedAccessPolicy_enrollmentWriteWithoutOthers(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub_dps" "test" { - name = "acctestIoTDPS-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } -} - -resource "azurerm_iothub_dps_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_dps_name = azurerm_iothub_dps.test.name - name = "acctest" - enrollment_write = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testCheckAzureRMIotHubDpsSharedAccessPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.DPSResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - _, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - - keyName := rs.Primary.Attributes["name"] - iothubDpsName := rs.Primary.Attributes["iothub_dps_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - _, err = client.ListKeysForKeyName(ctx, iothubDpsName, keyName, resourceGroup) - if err != nil { - return fmt.Errorf("Bad: No shared access policy %s defined for IotHub DPS %s", keyName, iothubDpsName) - } - - return nil - } -} - -func testCheckAzureRMIotHubDpsSharedAccessPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.DPSResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_dps_shared_access_policy" { - continue - } - - keyName := rs.Primary.Attributes["name"] - iothubDpsName := rs.Primary.Attributes["iothub_dps_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, iothubDpsName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubDPSResourceClient: %+v", err) - } - - for _, sharedAccessPolicy := range *resp.Properties.AuthorizationPolicies { - if *sharedAccessPolicy.KeyName == keyName { - return fmt.Errorf("Bad: Shared Access Policy %s still exists on IoTHub DPS %s", keyName, iothubDpsName) - } - } - } - return nil -} diff --git a/azurerm/internal/services/iothub/tests/iothub_endpoint_eventhub_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_endpoint_eventhub_resource_test.go deleted file mode 100644 index 71f7b68edb27..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_endpoint_eventhub_resource_test.go +++ /dev/null @@ -1,217 +0,0 @@ -package tests - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubEndpointEventHub_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointStorageContainerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointEventHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointEventHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubEndpointEventHub_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_eventhub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointEventHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointEventHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointEventHubExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubEndpointEventHub_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_eventhub"), - }, - }, - }) -} - -func testAccAzureRMIotHubEndpointEventHub_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventhub_authorization_rule" "test" { - name = "acctest-%[1]d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = false - send = true - manage = false -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_eventhub" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMIotHubEndpointEventHub_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubEndpointEventHub_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_endpoint_eventhub" "import" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string -} -`, template) -} - -func testAccAzureRMIotHubEndpointEventHubExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - iothubName := parsedIothubId.Path["IotHubs"] - endpointName := parsedIothubId.Path["Endpoints"] - resourceGroup := parsedIothubId.ResourceGroup - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return fmt.Errorf("IotHub %q (Resource Group %q) was not found", iothubName, resourceGroup) - } - - return fmt.Errorf("Error loading IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return fmt.Errorf("Bad: No endpoint %s defined for IotHub %s", endpointName, iothubName) - } - endpoints := iothub.Properties.Routing.Endpoints.EventHubs - - if endpoints == nil { - return fmt.Errorf("Bad: No EventHub endpoint %s defined for IotHub %s", endpointName, iothubName) - } - - for _, endpoint := range *endpoints { - if strings.EqualFold(*endpoint.Name, endpointName) { - return nil - } - } - - return fmt.Errorf("Bad: No EventHub endpoint %s defined for IotHub %s", endpointName, iothubName) - } -} - -func testAccAzureRMIotHubEndpointEventHubDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_endpoint_eventhub" { - continue - } - endpointName := rs.Primary.Attributes["name"] - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return nil - } - endpoints := iothub.Properties.Routing.Endpoints.EventHubs - - if endpoints == nil { - return nil - } - - for _, endpoint := range *endpoints { - if existingEndpointName := endpoint.Name; existingEndpointName != nil { - if strings.EqualFold(*existingEndpointName, endpointName) { - return fmt.Errorf("Bad: EventHub endpoint %s still exists on IoTHb %s", endpointName, iothubName) - } - } - } - } - return nil -} diff --git a/azurerm/internal/services/iothub/tests/iothub_endpoint_servicebus_queue_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_endpoint_servicebus_queue_resource_test.go deleted file mode 100644 index 93ec708cfa77..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_endpoint_servicebus_queue_resource_test.go +++ /dev/null @@ -1,217 +0,0 @@ -package tests - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubEndpointServiceBusQueue_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_queue", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointStorageContainerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointServiceBusQueue_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointServiceBusQueueExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubEndpointServiceBusQueue_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_queue", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointServiceBusQueueDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointServiceBusQueue_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointServiceBusQueueExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubEndpointServiceBusQueue_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_servicebus_queue"), - }, - }, - }) -} - -func testAccAzureRMIotHubEndpointServiceBusQueue_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_servicebus_namespace" "test" { - name = "acctest-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_servicebus_queue" "test" { - name = "acctest-%[1]d" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_servicebus_namespace.test.name - - enable_partitioning = true -} - -resource "azurerm_servicebus_queue_authorization_rule" "test" { - name = "acctest-%[1]d" - namespace_name = azurerm_servicebus_namespace.test.name - queue_name = azurerm_servicebus_queue.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = false - send = true - manage = false -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_servicebus_queue" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_servicebus_queue_authorization_rule.test.primary_connection_string -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMIotHubEndpointServiceBusQueue_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubEndpointServiceBusQueue_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_endpoint_servicebus_queue" "import" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_servicebus_queue_authorization_rule.test.primary_connection_string -} -`, template) -} - -func testAccAzureRMIotHubEndpointServiceBusQueueExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - iothubName := parsedIothubId.Path["IotHubs"] - endpointName := parsedIothubId.Path["Endpoints"] - resourceGroup := parsedIothubId.ResourceGroup - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return fmt.Errorf("IotHub %q (Resource Group %q) was not found", iothubName, resourceGroup) - } - - return fmt.Errorf("Error loading IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return fmt.Errorf("Bad: No endpoint %s defined for IotHub %s", endpointName, iothubName) - } - endpoints := iothub.Properties.Routing.Endpoints.ServiceBusQueues - - if endpoints == nil { - return fmt.Errorf("Bad: No ServiceBus Queue endpoint %s defined for IotHub %s", endpointName, iothubName) - } - - for _, endpoint := range *endpoints { - if strings.EqualFold(*endpoint.Name, endpointName) { - return nil - } - } - - return fmt.Errorf("Bad: No ServiceBus Queue endpoint %s defined for IotHub %s", endpointName, iothubName) - } -} - -func testAccAzureRMIotHubEndpointServiceBusQueueDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_endpoint_servicebus_queue" { - continue - } - endpointName := rs.Primary.Attributes["name"] - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return nil - } - endpoints := iothub.Properties.Routing.Endpoints.ServiceBusQueues - - if endpoints == nil { - return nil - } - - for _, endpoint := range *endpoints { - if existingEndpointName := endpoint.Name; existingEndpointName != nil { - if strings.EqualFold(*existingEndpointName, endpointName) { - return fmt.Errorf("Bad: ServiceBus Queue endpoint %s still exists on IoTHb %s", endpointName, iothubName) - } - } - } - } - return nil -} diff --git a/azurerm/internal/services/iothub/tests/iothub_endpoint_servicebus_topic_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_endpoint_servicebus_topic_resource_test.go deleted file mode 100644 index 7bd4e3831629..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_endpoint_servicebus_topic_resource_test.go +++ /dev/null @@ -1,216 +0,0 @@ -package tests - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubEndpointServiceBusTopic_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointStorageContainerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointServiceBusTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointServiceBusTopicExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubEndpointServiceBusTopic_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_servicebus_topic", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointServiceBusTopicDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointServiceBusTopic_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointServiceBusTopicExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubEndpointServiceBusTopic_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_servicebus_topic"), - }, - }, - }) -} - -func testAccAzureRMIotHubEndpointServiceBusTopic_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_servicebus_namespace" "test" { - name = "acctest-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_servicebus_topic" "test" { - name = "acctestservicebustopic-%[1]d" - namespace_name = azurerm_servicebus_namespace.test.name - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_servicebus_topic_authorization_rule" "test" { - name = "acctest-%[1]d" - namespace_name = azurerm_servicebus_namespace.test.name - topic_name = azurerm_servicebus_topic.test.name - resource_group_name = azurerm_resource_group.test.name - - listen = false - send = true - manage = false -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_servicebus_topic" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMIotHubEndpointServiceBusTopic_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubEndpointServiceBusTopic_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_endpoint_servicebus_topic" "import" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_servicebus_topic_authorization_rule.test.primary_connection_string -} -`, template) -} - -func testAccAzureRMIotHubEndpointServiceBusTopicExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - iothubName := parsedIothubId.Path["IotHubs"] - endpointName := parsedIothubId.Path["Endpoints"] - resourceGroup := parsedIothubId.ResourceGroup - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return fmt.Errorf("IotHub %q (Resource Group %q) was not found", iothubName, resourceGroup) - } - - return fmt.Errorf("Error loading IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return fmt.Errorf("Bad: No endpoint %s defined for IotHub %s", endpointName, iothubName) - } - endpoints := iothub.Properties.Routing.Endpoints.ServiceBusTopics - - if endpoints == nil { - return fmt.Errorf("Bad: No ServiceBus Topic endpoint %s defined for IotHub %s", endpointName, iothubName) - } - - for _, endpoint := range *endpoints { - if existingEndpointName := endpoint.Name; existingEndpointName != nil { - if strings.EqualFold(*existingEndpointName, endpointName) { - return nil - } - } - } - return fmt.Errorf("Bad: No ServiceBus Topic endpoint %s defined for IotHub %s", endpointName, iothubName) - } -} - -func testAccAzureRMIotHubEndpointServiceBusTopicDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_endpoint_servicebus_topic" { - continue - } - - endpointName := rs.Primary.Attributes["name"] - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return nil - } - endpoints := iothub.Properties.Routing.Endpoints.ServiceBusTopics - - if endpoints == nil { - return nil - } - - for _, endpoint := range *endpoints { - if existingEndpointName := endpoint.Name; existingEndpointName != nil { - if strings.EqualFold(*existingEndpointName, endpointName) { - return fmt.Errorf("Bad: ServiceBus Topic endpoint %s still exists on IoTHb %s", endpointName, iothubName) - } - } - } - } - return nil -} diff --git a/azurerm/internal/services/iothub/tests/iothub_endpoint_storage_container_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_endpoint_storage_container_resource_test.go deleted file mode 100644 index f3b24b657a36..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_endpoint_storage_container_resource_test.go +++ /dev/null @@ -1,223 +0,0 @@ -package tests - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubEndpointStorageContainer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_storage_container", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointStorageContainerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointStorageContainer_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointStorageContainerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "file_name_format", "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}"), - resource.TestCheckResourceAttr(data.ResourceName, "batch_frequency_in_seconds", "60"), - resource.TestCheckResourceAttr(data.ResourceName, "max_chunk_size_in_bytes", "10485760"), - resource.TestCheckResourceAttr(data.ResourceName, "encoding", "JSON"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubEndpointStorageContainer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_endpoint_storage_container", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testAccAzureRMIotHubEndpointStorageContainerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubEndpointStorageContainer_basic(data), - Check: resource.ComposeTestCheckFunc( - testAccAzureRMIotHubEndpointStorageContainerExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubEndpointStorageContainer_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_endpoint_storage_container"), - }, - }, - }) -} - -func testAccAzureRMIotHubEndpointStorageContainer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "acc%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctestcont" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_storage_container" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - container_name = "acctestcont" - connection_string = azurerm_storage_account.test.primary_blob_connection_string - - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - encoding = "JSON" -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMIotHubEndpointStorageContainer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubEndpointStorageContainer_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_endpoint_storage_container" "import" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - container_name = "acctestcont" - connection_string = azurerm_storage_account.test.primary_blob_connection_string - - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - encoding = "JSON" -} -`, template) -} - -func testAccAzureRMIotHubEndpointStorageContainerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - - iothubName := parsedIothubId.Path["IotHubs"] - endpointName := parsedIothubId.Path["Endpoints"] - resourceGroup := parsedIothubId.ResourceGroup - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return fmt.Errorf("IotHub %q (Resource Group %q) was not found", iothubName, resourceGroup) - } - - return fmt.Errorf("Error loading IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return fmt.Errorf("Bad: No endpoint %s defined for IotHub %s", endpointName, iothubName) - } - endpoints := iothub.Properties.Routing.Endpoints.StorageContainers - - if endpoints == nil { - return fmt.Errorf("Bad: No Storage Container endpoint %s defined for IotHub %s", endpointName, iothubName) - } - - for _, endpoint := range *endpoints { - if existingEndpointName := endpoint.Name; existingEndpointName != nil { - if strings.EqualFold(*existingEndpointName, endpointName) { - return nil - } - } - } - - return fmt.Errorf("Bad: No Storage Container endpoint %s defined for IotHub %s", endpointName, iothubName) - } -} - -func testAccAzureRMIotHubEndpointStorageContainerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_endpoint_storage_container" { - continue - } - - endpointName := rs.Primary.Attributes["name"] - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return nil - } - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.Endpoints == nil { - return nil - } - - endpoints := iothub.Properties.Routing.Endpoints.StorageContainers - if endpoints == nil { - return nil - } - - for _, endpoint := range *endpoints { - if existingEndpointName := endpoint.Name; existingEndpointName != nil { - if strings.EqualFold(*existingEndpointName, endpointName) { - return fmt.Errorf("Bad: Storage Container endpoint %s still exists on IoTHb %s", endpointName, iothubName) - } - } - } - } - return nil -} diff --git a/azurerm/internal/services/iothub/tests/iothub_fallback_route_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_fallback_route_resource_test.go deleted file mode 100644 index 09fd167e5bc8..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_fallback_route_resource_test.go +++ /dev/null @@ -1,252 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -// NOTE: this resource intentionally doesn't support Requires Import -// since a fallback route is created by default - -func TestAccAzureRMIotHubFallbackRoute_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_fallback_route", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubFallbackRouteDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubFallbackRoute_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubFallbackRouteExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubFallbackRoute_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_fallback_route", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubFallbackRouteDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubFallbackRoute_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubFallbackRouteExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHubFallbackRoute_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubFallbackRouteExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIotHubFallbackRouteDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_fallback_route" { - continue - } - - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - if iothub.Properties == nil || iothub.Properties.Routing == nil { - return nil - } - if iothub.Properties.Routing.FallbackRoute != nil { - return fmt.Errorf("Bad: fallback route still exists on IoTHb %s", iothubName) - } - } - return nil -} - -func testCheckAzureRMIotHubFallbackRouteExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - iothubName := parsedIothubId.Path["IotHubs"] - resourceGroup := parsedIothubId.ResourceGroup - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return fmt.Errorf("IotHub %q (Resource Group %q) was not found", iothubName, resourceGroup) - } - - return fmt.Errorf("Error loading IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if iothub.Properties == nil || iothub.Properties.Routing == nil || iothub.Properties.Routing.FallbackRoute == nil { - return fmt.Errorf("Bad: No fallbackroute defined for IotHub %s", iothubName) - } - - return nil - } -} - -func testAccAzureRMIotHubFallbackRoute_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test-%[1]d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_storage_container" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_storage_account.test.primary_blob_connection_string - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - container_name = azurerm_storage_container.test.name - encoding = "Avro" - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" -} - -resource "azurerm_iothub_fallback_route" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - - condition = "true" - endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] - enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMIotHubFallbackRoute_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test-%[1]d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_storage_container" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_storage_account.test.primary_blob_connection_string - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - container_name = azurerm_storage_container.test.name - encoding = "Avro" - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" -} - -resource "azurerm_iothub_fallback_route" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - - condition = "true" - endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] - enabled = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_resource_test.go deleted file mode 100644 index 42ed6ed521fc..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_resource_test.go +++ /dev/null @@ -1,812 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMIotHub_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_ipFilterRules(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_ipFilterRules(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHub_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub"), - }, - }, - }) -} - -func TestAccAzureRMIotHub_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_customRoutes(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_customRoutes(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "endpoint.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "endpoint.0.type", "AzureIotHub.StorageContainer"), - resource.TestCheckResourceAttr(data.ResourceName, "endpoint.1.type", "AzureIotHub.EventHub"), - resource.TestCheckResourceAttr(data.ResourceName, "route.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_removeEndpointsAndRoutes(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_customRoutes(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHub_removeEndpointsAndRoutes(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_fileUpload(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_fileUpload(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "file_upload.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "file_upload.0.lock_duration", "PT5M"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_withDifferentEndpointResourceGroup(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_withDifferentEndpointResourceGroup(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_fallbackRoute(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_fallbackRoute(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "fallback_route.0.source", "DeviceMessages"), - resource.TestCheckResourceAttr(data.ResourceName, "fallback_route.0.endpoint_names.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "fallback_route.0.enabled", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHub_publicAccess(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHub_publicAccessEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHub_publicAccessDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIotHubDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("IotHub %s still exists in resource group %s", name, resourceGroup) - } - } - return nil -} - -func testCheckAzureRMIotHubExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - iothubName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for IotHub: %s", iothubName) - } - - resp, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: IotHub %q (resource group: %q) does not exist", iothubName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMIotHub_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHub_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHub_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub" "import" { - name = azurerm_iothub.test.name - resource_group_name = azurerm_iothub.test.resource_group_name - location = azurerm_iothub.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} -`, template) -} - -func testAccAzureRMIotHub_standard(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHub_ipFilterRules(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - ip_filter_rule { - name = "test" - ip_mask = "10.0.0.0/31" - action = "Accept" - } - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHub_customRoutes(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_eventhub_namespace" "test" { - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - name = "acctest-%d" - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctest" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventhub_authorization_rule" "test" { - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - name = "acctest" - send = true -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - event_hub_retention_in_days = 7 - event_hub_partition_count = 77 - - endpoint { - type = "AzureIotHub.StorageContainer" - connection_string = azurerm_storage_account.test.primary_blob_connection_string - name = "export" - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - container_name = azurerm_storage_container.test.name - encoding = "Avro" - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" - resource_group_name = azurerm_resource_group.test.name - } - - endpoint { - type = "AzureIotHub.EventHub" - connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string - name = "export2" - resource_group_name = azurerm_resource_group.test.name - } - - route { - name = "export" - source = "DeviceMessages" - condition = "true" - endpoint_names = ["export"] - enabled = true - } - - route { - name = "export2" - source = "DeviceMessages" - condition = "true" - endpoint_names = ["export2"] - enabled = true - } - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMIotHub_removeEndpointsAndRoutes(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_eventhub_namespace" "test" { - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - name = "acctest-%d" - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctest" - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventhub_authorization_rule" "test" { - resource_group_name = azurerm_resource_group.test.name - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - name = "acctest" - send = true -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - event_hub_retention_in_days = 7 - event_hub_partition_count = 77 - - endpoint = [] - - route = [] - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMIotHub_fallbackRoute(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - fallback_route { - source = "DeviceMessages" - endpoint_names = ["events"] - enabled = true - } - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHub_fileUpload(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - file_upload { - connection_string = azurerm_storage_account.test.primary_blob_connection_string - container_name = azurerm_storage_container.test.name - notifications = true - max_delivery_count = 12 - sas_ttl = "PT2H" - default_ttl = "PT3H" - lock_duration = "PT5M" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMIotHub_publicAccessEnabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - public_network_access_enabled = true - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHub_publicAccessDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - public_network_access_enabled = false - - tags = { - purpose = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHub_withDifferentEndpointResourceGroup(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%d" - location = "%s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG2-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_eventhub_namespace" "test" { - resource_group_name = azurerm_resource_group.test2.name - location = azurerm_resource_group.test2.location - name = "acctest-%d" - sku = "Basic" -} - -resource "azurerm_eventhub" "test" { - name = "acctest" - resource_group_name = azurerm_resource_group.test2.name - namespace_name = azurerm_eventhub_namespace.test.name - partition_count = 2 - message_retention = 1 -} - -resource "azurerm_eventhub_authorization_rule" "test" { - resource_group_name = azurerm_resource_group.test2.name - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - name = "acctest" - send = true -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - event_hub_retention_in_days = 7 - event_hub_partition_count = 77 - - endpoint { - type = "AzureIotHub.StorageContainer" - connection_string = azurerm_storage_account.test.primary_blob_connection_string - name = "export" - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - container_name = azurerm_storage_container.test.name - encoding = "Avro" - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" - resource_group_name = azurerm_resource_group.test.name - } - - endpoint { - type = "AzureIotHub.EventHub" - connection_string = azurerm_eventhub_authorization_rule.test.primary_connection_string - name = "export2" - resource_group_name = azurerm_resource_group.test2.name - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_route_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_route_resource_test.go deleted file mode 100644 index 7892fec14a9d..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_route_resource_test.go +++ /dev/null @@ -1,315 +0,0 @@ -package tests - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubRoute_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_route", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubRouteDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubRoute_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubRouteExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubRoute_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_route", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubRouteDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubRoute_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubRouteExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubRoute_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_route"), - }, - }, - }) -} - -func TestAccAzureRMIotHubRoute_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_route", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubRouteDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubRoute_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubRouteExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIotHubRoute_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubRouteExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIotHubRouteDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_route" { - continue - } - - routeName := rs.Primary.Attributes["name"] - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - if iothub.Properties == nil || iothub.Properties.Routing == nil { - return nil - } - routes := iothub.Properties.Routing.Routes - - if routes == nil { - return nil - } - - for _, route := range *routes { - if strings.EqualFold(*route.Name, routeName) { - return fmt.Errorf("Bad: route %s still exists on IoTHb %s", routeName, iothubName) - } - } - } - return nil -} - -func testCheckAzureRMIotHubRouteExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - iothubName := parsedIothubId.Path["IotHubs"] - routeName := parsedIothubId.Path["Routes"] - resourceGroup := parsedIothubId.ResourceGroup - - iothub, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(iothub.Response) { - return fmt.Errorf("IotHub %q (Resource Group %q) was not found", iothubName, resourceGroup) - } - - return fmt.Errorf("Error loading IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if iothub.Properties == nil || iothub.Properties.Routing == nil { - return fmt.Errorf("Bad: No route %s defined for IotHub %s", routeName, iothubName) - } - routes := iothub.Properties.Routing.Routes - - if routes == nil { - return fmt.Errorf("Bad: No route %s defined for IotHub %s", routeName, iothubName) - } - - for _, route := range *routes { - if strings.EqualFold(*route.Name, routeName) { - return nil - } - } - - return fmt.Errorf("Bad: No route %s defined for IotHub %s", routeName, iothubName) - } -} - -func testAccAzureRMIotHubRoute_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubRoute_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_route" "import" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - source = "DeviceMessages" - condition = "true" - endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] - enabled = true -} -`, template) -} - -func testAccAzureRMIotHubRoute_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test%[1]d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_storage_container" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_storage_account.test.primary_blob_connection_string - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - container_name = azurerm_storage_container.test.name - encoding = "Avro" - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" -} - -resource "azurerm_iothub_route" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - source = "DeviceMessages" - condition = "true" - endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] - enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMIotHubRoute_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-iothub-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsa%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_container" "test" { - name = "test%[1]d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "S1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_endpoint_storage_container" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - connection_string = azurerm_storage_account.test.primary_blob_connection_string - batch_frequency_in_seconds = 60 - max_chunk_size_in_bytes = 10485760 - container_name = azurerm_storage_container.test.name - encoding = "Avro" - file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}" -} - -resource "azurerm_iothub_route" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - source = "DeviceLifecycleEvents" - condition = "true" - endpoint_names = [azurerm_iothub_endpoint_storage_container.test.name] - enabled = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_shared_access_policy_data_source_test.go b/azurerm/internal/services/iothub/tests/iothub_shared_access_policy_data_source_test.go deleted file mode 100644 index 25fed4bb454d..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_shared_access_policy_data_source_test.go +++ /dev/null @@ -1,44 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMIotHubSharedAccessPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_iothub_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMIotHubSharedAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMIotHubSharedAccessPolicy_basic(data acceptance.TestData) string { - template := testAccAzureRMIotHubSharedAccessPolicy_basic(data) - - return fmt.Sprintf(` -%s - -data "azurerm_iothub_shared_access_policy" "test" { - name = azurerm_iothub_shared_access_policy.test.name - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name -} -`, template) -} diff --git a/azurerm/internal/services/iothub/tests/iothub_shared_access_policy_resource_test.go b/azurerm/internal/services/iothub/tests/iothub_shared_access_policy_resource_test.go deleted file mode 100644 index 314dccf38c60..000000000000 --- a/azurerm/internal/services/iothub/tests/iothub_shared_access_policy_resource_test.go +++ /dev/null @@ -1,228 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIotHubSharedAccessPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubSharedAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubSharedAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "registry_read", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "registry_write", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "service_connect", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "device_connect", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctest"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIotHubSharedAccessPolicy_writeWithoutRead(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubSharedAccessPolicy_writeWithoutRead(data), - ExpectError: regexp.MustCompile("If `registry_write` is set to true, `registry_read` must also be set to true"), - }, - }, - }) -} - -func TestAccAzureRMIotHubSharedAccessPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iothub_shared_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIotHubSharedAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIotHubSharedAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIotHubSharedAccessPolicyExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMIotHubSharedAccessPolicy_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_iothub_shared_access_policy"), - }, - }, - }) -} - -func testAccAzureRMIotHubSharedAccessPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - registry_read = true - registry_write = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIotHubSharedAccessPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMIotHubSharedAccessPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_iothub_shared_access_policy" "import" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - registry_read = true - registry_write = true -} -`, template) -} - -func testAccAzureRMIotHubSharedAccessPolicy_writeWithoutRead(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_iothub" "test" { - name = "acctestIoTHub-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - sku { - name = "B1" - capacity = "1" - } - - tags = { - purpose = "testing" - } -} - -resource "azurerm_iothub_shared_access_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - iothub_name = azurerm_iothub.test.name - name = "acctest" - - registry_write = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testCheckAzureRMIotHubSharedAccessPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - parsedIothubId, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - iothubName := parsedIothubId.Path["IotHubs"] - keyName := parsedIothubId.Path["IotHubKeys"] - resourceGroup := parsedIothubId.ResourceGroup - - for accessPolicyIterator, err := client.ListKeysComplete(ctx, resourceGroup, iothubName); accessPolicyIterator.NotDone(); err = accessPolicyIterator.NextWithContext(ctx) { - if err != nil { - return fmt.Errorf("Error loading Shared Access Profiles of IotHub %q (Resource Group %q): %+v", iothubName, resourceGroup, err) - } - - if strings.EqualFold(*accessPolicyIterator.Value().KeyName, keyName) { - return nil - } - } - - return fmt.Errorf("Bad: No shared access policy %s defined for IotHub %s", keyName, iothubName) - } -} - -func testCheckAzureRMIotHubSharedAccessPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTHub.ResourceClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iothub_shared_access_policy" { - continue - } - - keyName := rs.Primary.Attributes["name"] - iothubName := rs.Primary.Attributes["iothub_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, iothubName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Get on iothubResourceClient: %+v", err) - } - - for _, sharedAccessPolicy := range *resp.Properties.AuthorizationPolicies { - if *sharedAccessPolicy.KeyName == keyName { - return fmt.Errorf("Bad: Shared Access Policy %s still exists on IoTHb %s", keyName, iothubName) - } - } - } - return nil -} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_consumer_group.go b/azurerm/internal/services/iothub/validate/iot_hub_consumer_group.go new file mode 100644 index 000000000000..8064d8eeddcd --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_consumer_group.go @@ -0,0 +1,17 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func IoTHubConsumerGroupName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + // Portal: The value must contain only alphanumeric characters or the following: - . _ + if matched := regexp.MustCompile(`^[0-9a-zA-Z-._]{1,}$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and dashes, periods and underscores", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_consumer_group_test.go b/azurerm/internal/services/iothub/validate/iot_hub_consumer_group_test.go new file mode 100644 index 000000000000..de7db6e04dc9 --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_consumer_group_test.go @@ -0,0 +1,33 @@ +package validate + +import "testing" + +func TestIoTHubConsumerGroupName(t *testing.T) { + validNames := []string{ + "valid-name", + "valid02-name", + "validName1", + "-validname1", + "valid_name", + "double-hyphen--valid", + "hello.world", + } + for _, v := range validNames { + _, errors := IoTHubConsumerGroupName(v, "example") + if len(errors) != 0 { + t.Fatalf("%q should be a valid IoT Hub Consumer Group Name: %q", v, errors) + } + } + + invalidNames := []string{ + "", + "invalid!", + "!@£", + } + for _, v := range invalidNames { + _, errors := IoTHubConsumerGroupName(v, "name") + if len(errors) == 0 { + t.Fatalf("%q should be an invalid IoT Hub Consumer Group Name", v) + } + } +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_endpoint_name.go b/azurerm/internal/services/iothub/validate/iot_hub_endpoint_name.go new file mode 100644 index 000000000000..1b8bab6a20a3 --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_endpoint_name.go @@ -0,0 +1,22 @@ +package validate + +import "fmt" + +func IoTHubEndpointName(v interface{}, _ string) (warnings []string, errors []error) { + value := v.(string) + + reservedNames := []string{ + "events", + "operationsMonitoringEvents", + "fileNotifications", + "$default", + } + + for _, name := range reservedNames { + if name == value { + errors = append(errors, fmt.Errorf("The reserved endpoint name %s could not be used as a name for a custom endpoint", name)) + } + } + + return warnings, errors +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_id.go b/azurerm/internal/services/iothub/validate/iot_hub_id.go new file mode 100644 index 000000000000..8ff4e85ec007 --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iothub/parse" +) + +func IotHubID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.IotHubID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_id_test.go b/azurerm/internal/services/iothub/validate/iot_hub_id_test.go new file mode 100644 index 000000000000..13d9349783b9 --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestIotHubID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Devices/IotHubs/hub1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DEVICES/IOTHUBS/HUB1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := IotHubID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_name.go b/azurerm/internal/services/iothub/validate/iot_hub_name.go new file mode 100644 index 000000000000..287092f4ca9d --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_name.go @@ -0,0 +1,17 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func IoTHubName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + // Portal: The value must contain only alphanumeric characters or the following: - + if matched := regexp.MustCompile(`^[0-9a-zA-Z-]{1,}$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and dashes", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_name_test.go b/azurerm/internal/services/iothub/validate/iot_hub_name_test.go new file mode 100644 index 000000000000..00fda2ec34e1 --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_name_test.go @@ -0,0 +1,33 @@ +package validate + +import "testing" + +func TestIoTHubName(t *testing.T) { + validNames := []string{ + "valid-name", + "valid02-name", + "validName1", + "-validname1", + "double-hyphen--valid", + } + for _, v := range validNames { + _, errors := IoTHubName(v, "example") + if len(errors) != 0 { + t.Fatalf("%q should be a valid IoT Hub Name: %q", v, errors) + } + } + + invalidNames := []string{ + "", + "invalid_name", + "invalid!", + "!@£", + "hello.world", + } + for _, v := range invalidNames { + _, errors := IoTHubName(v, "name") + if len(errors) == 0 { + t.Fatalf("%q should be an invalid IoT Hub Name", v) + } + } +} diff --git a/azurerm/internal/services/iothub/validate/iot_hub_shared_access_policy_name.go b/azurerm/internal/services/iothub/validate/iot_hub_shared_access_policy_name.go new file mode 100644 index 000000000000..dc2f6c04294f --- /dev/null +++ b/azurerm/internal/services/iothub/validate/iot_hub_shared_access_policy_name.go @@ -0,0 +1,24 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func IotHubSharedAccessPolicyName(i interface{}, k string) (_ []string, errors []error) { + v, ok := i.(string) + if !ok { + return nil, append(errors, fmt.Errorf("expected type of %s to be string", k)) + } + + // The name attribute rules are : + // 1. must not be empty. + // 2. must not exceed 64 characters in length. + // 3. can only contain alphanumeric characters, exclamation marks, periods, underscores and hyphens + + if !regexp.MustCompile(`[a-zA-Z0-9!._-]{1,64}`).MatchString(v) { + errors = append(errors, fmt.Errorf("%s must not be empty, and must not exceed 64 characters in length, and can only contain alphanumeric characters, exclamation marks, periods, underscores and hyphens", k)) + } + + return nil, errors +} diff --git a/azurerm/internal/services/iothub/validate/iothub.go b/azurerm/internal/services/iothub/validate/iothub.go deleted file mode 100644 index 519071464c80..000000000000 --- a/azurerm/internal/services/iothub/validate/iothub.go +++ /dev/null @@ -1,82 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iothub/parse" -) - -func IotHubID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.IotHubID(v); err != nil { - errors = append(errors, fmt.Errorf("can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} - -func IoTHubName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - // Portal: The value must contain only alphanumeric characters or the following: - - if matched := regexp.MustCompile(`^[0-9a-zA-Z-]{1,}$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and dashes", k)) - } - - return warnings, errors -} - -func IoTHubConsumerGroupName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - // Portal: The value must contain only alphanumeric characters or the following: - . _ - if matched := regexp.MustCompile(`^[0-9a-zA-Z-._]{1,}$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and dashes, periods and underscores", k)) - } - - return warnings, errors -} - -func IoTHubEndpointName(v interface{}, _ string) (warnings []string, errors []error) { - value := v.(string) - - reservedNames := []string{ - "events", - "operationsMonitoringEvents", - "fileNotifications", - "$default", - } - - for _, name := range reservedNames { - if name == value { - errors = append(errors, fmt.Errorf("The reserved endpoint name %s could not be used as a name for a custom endpoint", name)) - } - } - - return warnings, errors -} - -func IotHubSharedAccessPolicyName(i interface{}, k string) (_ []string, errors []error) { - v, ok := i.(string) - if !ok { - return nil, append(errors, fmt.Errorf("expected type of %s to be string", k)) - } - - // The name attribute rules are : - // 1. must not be empty. - // 2. must not exceed 64 characters in length. - // 3. can only contain alphanumeric characters, exclamation marks, periods, underscores and hyphens - - if !regexp.MustCompile(`[a-zA-Z0-9!._-]{1,64}`).MatchString(v) { - errors = append(errors, fmt.Errorf("%s must not be empty, and must not exceed 64 characters in length, and can only contain alphanumeric characters, exclamation marks, periods, underscores and hyphens", k)) - } - - return nil, errors -} diff --git a/azurerm/internal/services/iothub/validate/iothub_test.go b/azurerm/internal/services/iothub/validate/iothub_test.go deleted file mode 100644 index 120775b2d54e..000000000000 --- a/azurerm/internal/services/iothub/validate/iothub_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package validate - -import "testing" - -func TestIoTHubName(t *testing.T) { - validNames := []string{ - "valid-name", - "valid02-name", - "validName1", - "-validname1", - "double-hyphen--valid", - } - for _, v := range validNames { - _, errors := IoTHubName(v, "example") - if len(errors) != 0 { - t.Fatalf("%q should be a valid IoT Hub Name: %q", v, errors) - } - } - - invalidNames := []string{ - "", - "invalid_name", - "invalid!", - "!@£", - "hello.world", - } - for _, v := range invalidNames { - _, errors := IoTHubName(v, "name") - if len(errors) == 0 { - t.Fatalf("%q should be an invalid IoT Hub Name", v) - } - } -} - -func TestIoTHubConsumerGroupName(t *testing.T) { - validNames := []string{ - "valid-name", - "valid02-name", - "validName1", - "-validname1", - "valid_name", - "double-hyphen--valid", - "hello.world", - } - for _, v := range validNames { - _, errors := IoTHubConsumerGroupName(v, "example") - if len(errors) != 0 { - t.Fatalf("%q should be a valid IoT Hub Consumer Group Name: %q", v, errors) - } - } - - invalidNames := []string{ - "", - "invalid!", - "!@£", - } - for _, v := range invalidNames { - _, errors := IoTHubConsumerGroupName(v, "name") - if len(errors) == 0 { - t.Fatalf("%q should be an invalid IoT Hub Consumer Group Name", v) - } - } -} diff --git a/azurerm/internal/services/iottimeseriesinsights/client/client.go b/azurerm/internal/services/iottimeseriesinsights/client/client.go index e4cc50c1a582..09a1214db78c 100644 --- a/azurerm/internal/services/iottimeseriesinsights/client/client.go +++ b/azurerm/internal/services/iottimeseriesinsights/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/preview/timeseriesinsights/mgmt/2018-08-15-preview/timeseriesinsights" + "github.com/Azure/azure-sdk-for-go/services/timeseriesinsights/mgmt/2020-05-15/timeseriesinsights" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_access_policy_resource.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_access_policy_resource.go new file mode 100644 index 000000000000..bf78ca035f4c --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_access_policy_resource.go @@ -0,0 +1,207 @@ +package iottimeseriesinsights + +import ( + "fmt" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/timeseriesinsights/mgmt/2020-05-15/timeseriesinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/migration" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceIoTTimeSeriesInsightsAccessPolicy() *schema.Resource { + return &schema.Resource{ + Create: resourceIoTTimeSeriesInsightsAccessPolicyCreateUpdate, + Read: resourceIoTTimeSeriesInsightsAccessPolicyRead, + Update: resourceIoTTimeSeriesInsightsAccessPolicyCreateUpdate, + Delete: resourceIoTTimeSeriesInsightsAccessPolicyDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.AccessPolicyID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + migration.TimeSeriesInsightsAccessPolicyV0(), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[-\w\._\(\)]+$`), + "IoT Time Series Insights Access Policy name must contain only word characters, periods, underscores, hyphens, and parentheses.", + ), + }, + + "time_series_insights_environment_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.TimeSeriesInsightsEnvironmentID, + }, + + "principal_object_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "roles": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringInSlice([]string{ + string(timeseriesinsights.Contributor), + string(timeseriesinsights.Reader), + }, false), + }, + }, + }, + } +} + +func resourceIoTTimeSeriesInsightsAccessPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + environmentId, err := parse.EnvironmentID(d.Get("time_series_insights_environment_id").(string)) + if err != nil { + return err + } + + resourceId := parse.NewAccessPolicyID(subscriptionId, environmentId.ResourceGroup, environmentId.Name, name).ID() + if d.IsNewResource() { + existing, err := client.Get(ctx, environmentId.ResourceGroup, environmentId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing IoT Time Series Insights Access Policy %q (Resource Group %q): %s", name, environmentId.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_iot_time_series_insights_access_policy", resourceId) + } + } + + policy := timeseriesinsights.AccessPolicyCreateOrUpdateParameters{ + AccessPolicyResourceProperties: ×eriesinsights.AccessPolicyResourceProperties{ + Description: utils.String(d.Get("description").(string)), + PrincipalObjectID: utils.String(d.Get("principal_object_id").(string)), + Roles: expandIoTTimeSeriesInsightsAccessPolicyRoles(d.Get("roles").(*schema.Set).List()), + }, + } + + if _, err := client.CreateOrUpdate(ctx, environmentId.ResourceGroup, environmentId.Name, name, policy); err != nil { + return fmt.Errorf("creating/updating IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", name, environmentId.ResourceGroup, err) + } + + d.SetId(resourceId) + return resourceIoTTimeSeriesInsightsAccessPolicyRead(d, meta) +} + +func resourceIoTTimeSeriesInsightsAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.AccessPolicyID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + environmentId := parse.NewEnvironmentID(id.SubscriptionId, id.ResourceGroup, id.EnvironmentName).ID() + + d.Set("name", resp.Name) + d.Set("time_series_insights_environment_id", environmentId) + + if props := resp.AccessPolicyResourceProperties; props != nil { + d.Set("description", props.Description) + d.Set("principal_object_id", props.PrincipalObjectID) + d.Set("roles", flattenIoTTimeSeriesInsightsAccessPolicyRoles(resp.Roles)) + } + + return nil +} + +func resourceIoTTimeSeriesInsightsAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.AccessPolicyID(d.Id()) + if err != nil { + return err + } + + response, err := client.Delete(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("deleting IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + } + + return nil +} + +func expandIoTTimeSeriesInsightsAccessPolicyRoles(input []interface{}) *[]timeseriesinsights.AccessPolicyRole { + roles := make([]timeseriesinsights.AccessPolicyRole, 0) + + for _, v := range input { + if v == nil { + continue + } + roles = append(roles, timeseriesinsights.AccessPolicyRole(v.(string))) + } + + return &roles +} + +func flattenIoTTimeSeriesInsightsAccessPolicyRoles(input *[]timeseriesinsights.AccessPolicyRole) []interface{} { + result := make([]interface{}, 0) + if input != nil { + for _, item := range *input { + result = append(result, string(item)) + } + } + return result +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_access_policy_resource_test.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_access_policy_resource_test.go new file mode 100644 index 000000000000..fdf09e199964 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_access_policy_resource_test.go @@ -0,0 +1,129 @@ +package iottimeseriesinsights_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IoTTimeSeriesInsightsAccessPolicyResource struct { +} + +func TestAccIoTTimeSeriesInsightsAccessPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_access_policy", "test") + r := IoTTimeSeriesInsightsAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTTimeSeriesInsightsAccessPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_access_policy", "test") + r := IoTTimeSeriesInsightsAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (IoTTimeSeriesInsightsAccessPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AccessPolicyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.IoTTimeSeriesInsights.AccessPoliciesClient.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving IoT Time Series INsights Access Policy (%q): %+v", id.String(), err) + } + + return utils.Bool(resp.AccessPolicyResourceProperties != nil), nil +} + +func (IoTTimeSeriesInsightsAccessPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" +} +resource "azurerm_iot_time_series_insights_access_policy" "test" { + name = "accTEst_tsiap%d" + time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id + + principal_object_id = "aGUID" + roles = ["Reader"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (IoTTimeSeriesInsightsAccessPolicyResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" +} +resource "azurerm_iot_time_series_insights_access_policy" "test" { + name = "accTEst_tsiap%d" + time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id + + principal_object_id = "aGUID" + roles = ["Contributor"] + description = "Test Access Policy" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_gen2_environment_resource.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_gen2_environment_resource.go new file mode 100644 index 000000000000..ace3d726a2aa --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_gen2_environment_resource.go @@ -0,0 +1,325 @@ +package iottimeseriesinsights + +import ( + "fmt" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/timeseriesinsights/mgmt/2020-05-15/timeseriesinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceIoTTimeSeriesInsightsGen2Environment() *schema.Resource { + return &schema.Resource{ + Create: resourceIoTTimeSeriesInsightsGen2EnvironmentCreateUpdate, + Read: resourceIoTTimeSeriesInsightsGen2EnvironmentRead, + Update: resourceIoTTimeSeriesInsightsGen2EnvironmentCreateUpdate, + Delete: resourceIoTTimeSeriesInsightsGen2EnvironmentDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.EnvironmentID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[-\w\._\(\)]+$`), + "IoT Time Series Insights Gen2 Environment name must contain only word characters, periods, underscores, and parentheses.", + ), + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "sku_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + "L1", + }, false), + }, + + "warm_store_data_retention_time": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: azValidate.ISO8601Duration, + }, + "id_properties": { + Type: schema.TypeSet, + Required: true, + ForceNew: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + "storage": { + Type: schema.TypeList, + MaxItems: 1, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "key": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "data_access_fqdn": { + Type: schema.TypeString, + Computed: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceIoTTimeSeriesInsightsGen2EnvironmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + location := azure.NormalizeLocation(d.Get("location").(string)) + resourceGroup := d.Get("resource_group_name").(string) + t := d.Get("tags").(map[string]interface{}) + sku, err := convertEnvironmentSkuName(d.Get("sku_name").(string)) + if err != nil { + return fmt.Errorf("expanding sku: %+v", err) + } + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing IoT Time Series Insights Gen2 Environment %q (Resource Group %q): %s", name, resourceGroup, err) + } + } + + if existing.Value != nil { + environment, ok := existing.Value.AsGen2EnvironmentResource() + if !ok { + return fmt.Errorf("exisiting resource was not IoT Time Series Insights Gen2 Environment %q (Resource Group %q)", name, resourceGroup) + } + + if environment.ID != nil && *environment.ID != "" { + return tf.ImportAsExistsError("azurerm_iot_time_series_insights_gen2_environment", *environment.ID) + } + } + } + + environment := timeseriesinsights.Gen2EnvironmentCreateOrUpdateParameters{ + Location: &location, + Tags: tags.Expand(t), + Sku: sku, + Gen2EnvironmentCreationProperties: ×eriesinsights.Gen2EnvironmentCreationProperties{ + TimeSeriesIDProperties: expandIdProperties(d.Get("id_properties").(*schema.Set).List()), + StorageConfiguration: expandStorage(d.Get("storage").([]interface{})), + }, + } + + if v, ok := d.GetOk("warm_store_data_retention_time"); ok { + environment.WarmStoreConfiguration = ×eriesinsights.WarmStoreConfigurationProperties{ + DataRetention: utils.String(v.(string)), + } + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, environment) + if err != nil { + return fmt.Errorf("creating/updating IoT Time Series Gen2 Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for completion of IoT Time Series Insights Gen2 Environment %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + return fmt.Errorf("retrieving IoT Time Series Insights Gen2 Environment %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resource, ok := resp.Value.AsGen2EnvironmentResource() + if !ok { + return fmt.Errorf("resource was not IoT Time Series Insights Gen2 Environment %q (Resource Group %q)", name, resourceGroup) + } + + if resource.ID == nil || *resource.ID == "" { + return fmt.Errorf("cannot read IoT Time Series Insights Gen2 Environment %q (Resource Group %q) ID", name, resourceGroup) + } + + d.SetId(*resource.ID) + + return resourceIoTTimeSeriesInsightsGen2EnvironmentRead(d, meta) +} + +func resourceIoTTimeSeriesInsightsGen2EnvironmentRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.EnvironmentID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil || resp.Value == nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + environment, ok := resp.Value.AsGen2EnvironmentResource() + if !ok { + return fmt.Errorf("exisiting resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", id.Name, id.ResourceGroup) + } + + d.Set("name", environment.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("sku_name", environment.Sku.Name) + d.Set("location", location.NormalizeNilable(environment.Location)) + d.Set("data_access_fqdn", environment.DataAccessFqdn) + if err := d.Set("id_properties", flattenIdProperties(environment.TimeSeriesIDProperties)); err != nil { + return fmt.Errorf("setting `id_properties`: %+v", err) + } + if props := environment.WarmStoreConfiguration; props != nil { + d.Set("warm_store_data_retention_time", props.DataRetention) + } + if err := d.Set("storage", flattenIoTTimeSeriesGen2EnvironmentStorage(environment.StorageConfiguration, d.Get("storage.0.key").(string))); err != nil { + return fmt.Errorf("setting `storage`: %+v", err) + } + + return tags.FlattenAndSet(d, environment.Tags) +} + +func resourceIoTTimeSeriesInsightsGen2EnvironmentDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.EnvironmentID(d.Id()) + if err != nil { + return err + } + + response, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("deleting IoT Time Series Insights Gen2 Environment %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + } + + return nil +} + +func convertEnvironmentSkuName(skuName string) (*timeseriesinsights.Sku, error) { + var name timeseriesinsights.SkuName + switch skuName { + case "L1": + name = timeseriesinsights.L1 + default: + return nil, fmt.Errorf("sku_name %s has unknown sku tier %s", skuName, skuName) + } + + // Gen2 cannot set capacity manually but SDK requires capacity + capacity := utils.Int32(1) + + return ×eriesinsights.Sku{ + Name: name, + Capacity: capacity, + }, nil +} + +func expandStorage(input []interface{}) *timeseriesinsights.Gen2StorageConfigurationInput { + if input == nil || input[0] == nil { + return nil + } + storageMap := input[0].(map[string]interface{}) + accountName := storageMap["name"].(string) + managementKey := storageMap["key"].(string) + + return ×eriesinsights.Gen2StorageConfigurationInput{ + AccountName: &accountName, + ManagementKey: &managementKey, + } +} + +func expandIdProperties(input []interface{}) *[]timeseriesinsights.TimeSeriesIDProperty { + if input == nil || input[0] == nil { + return nil + } + result := make([]timeseriesinsights.TimeSeriesIDProperty, 0) + for _, item := range input { + result = append(result, timeseriesinsights.TimeSeriesIDProperty{ + Name: utils.String(item.(string)), + Type: "String", + }) + } + return &result +} + +func flattenIdProperties(input *[]timeseriesinsights.TimeSeriesIDProperty) []string { + output := make([]string, 0) + if input == nil { + return output + } + + for _, v := range *input { + if v.Name != nil { + output = append(output, *v.Name) + } + } + + return output +} + +func flattenIoTTimeSeriesGen2EnvironmentStorage(input *timeseriesinsights.Gen2StorageConfigurationOutput, key string) []interface{} { + if input == nil { + return []interface{}{} + } + + attr := make(map[string]interface{}) + if input.AccountName != nil { + attr["name"] = *input.AccountName + } + // Key is not returned by the api so we'll set it to the key from config to help with diffs + attr["key"] = key + + return []interface{}{attr} +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_gen2_environment_resource_test.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_gen2_environment_resource_test.go new file mode 100644 index 000000000000..064daa272f62 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_gen2_environment_resource_test.go @@ -0,0 +1,188 @@ +package iottimeseriesinsights_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IoTTimeSeriesInsightsGen2EnvironmentResource struct { +} + +func TestAccIoTTimeSeriesInsightsGen2Environment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_gen2_environment", "test") + r := IoTTimeSeriesInsightsGen2EnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage.0.key"), + }) +} + +func TestAccIoTTimeSeriesInsightsGen2Environment_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_gen2_environment", "test") + r := IoTTimeSeriesInsightsGen2EnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage.0.key"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage.0.key"), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccIoTTimeSeriesInsightsGen2Environment_multiple_property_ids(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_gen2_environment", "test") + r := IoTTimeSeriesInsightsGen2EnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiple_property_ids(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage.0.key"), + }) +} + +func (IoTTimeSeriesInsightsGen2EnvironmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EnvironmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.IoTTimeSeriesInsights.EnvironmentsClient.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + return nil, fmt.Errorf("retrieving IoT Time Series Insights Gen2 Environment (%q): %+v", id.String(), err) + } + + return utils.Bool(!utils.ResponseWasNotFound(resp.Response)), nil +} + +func (IoTTimeSeriesInsightsGen2EnvironmentResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_storage_account" "storage" { + name = "acctestsatsi%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_iot_time_series_insights_gen2_environment" "test" { + name = "acctest_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "L1" + id_properties = ["id"] + + storage { + name = azurerm_storage_account.storage.name + key = azurerm_storage_account.storage.primary_access_key + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (IoTTimeSeriesInsightsGen2EnvironmentResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_storage_account" "storage" { + name = "acctestsatsi%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_iot_time_series_insights_gen2_environment" "test" { + name = "acctest_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "L1" + id_properties = ["id"] + + warm_store_data_retention_time = "P30D" + + storage { + name = azurerm_storage_account.storage.name + key = azurerm_storage_account.storage.primary_access_key + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (IoTTimeSeriesInsightsGen2EnvironmentResource) multiple_property_ids(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_storage_account" "storage" { + name = "acctestsatsi%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_iot_time_series_insights_gen2_environment" "test" { + name = "acctest_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "L1" + id_properties = ["id", "secondId"] + + storage { + name = azurerm_storage_account.storage.name + key = azurerm_storage_account.storage.primary_access_key + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_reference_data_set_resource.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_reference_data_set_resource.go new file mode 100644 index 000000000000..628a2ccfe6d3 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_reference_data_set_resource.go @@ -0,0 +1,243 @@ +package iottimeseriesinsights + +import ( + "fmt" + "regexp" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/timeseriesinsights/mgmt/2020-05-15/timeseriesinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceIoTTimeSeriesInsightsReferenceDataSet() *schema.Resource { + return &schema.Resource{ + Create: resourceIoTTimeSeriesInsightsReferenceDataSetCreateUpdate, + Read: resourceIoTTimeSeriesInsightsReferenceDataSetRead, + Update: resourceIoTTimeSeriesInsightsReferenceDataSetCreateUpdate, + Delete: resourceIoTTimeSeriesInsightsReferenceDataSetDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ReferenceDataSetID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[A-Za-z0-9]{3,63}`), + "IoT Time Series Insights Reference Data Set name must contain only alphanumeric characters and be between 3 and 63 characters.", + ), + }, + + "time_series_insights_environment_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.TimeSeriesInsightsEnvironmentID, + }, + + "data_string_comparison_behavior": { + Type: schema.TypeString, + Optional: true, + Default: string(timeseriesinsights.Ordinal), + ValidateFunc: validation.StringInSlice([]string{ + string(timeseriesinsights.Ordinal), + string(timeseriesinsights.OrdinalIgnoreCase), + }, false), + }, + + "key_property": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(timeseriesinsights.ReferenceDataKeyPropertyTypeBool), + string(timeseriesinsights.ReferenceDataKeyPropertyTypeDateTime), + string(timeseriesinsights.ReferenceDataKeyPropertyTypeDouble), + string(timeseriesinsights.ReferenceDataKeyPropertyTypeString), + }, false), + }, + }, + }, + }, + + "location": azure.SchemaLocation(), + + "tags": tags.Schema(), + }, + } +} + +func resourceIoTTimeSeriesInsightsReferenceDataSetCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + environmentID := d.Get("time_series_insights_environment_id").(string) + id, err := parse.EnvironmentID(environmentID) + if err != nil { + return err + } + location := azure.NormalizeLocation(d.Get("location").(string)) + t := d.Get("tags").(map[string]interface{}) + + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing IoT Time Series Insights Reference Data Set %q (Resource Group %q): %s", name, id.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_iot_time_series_insights_reference_data_set", *existing.ID) + } + } + + dataset := timeseriesinsights.ReferenceDataSetCreateOrUpdateParameters{ + Location: &location, + Tags: tags.Expand(t), + ReferenceDataSetCreationProperties: ×eriesinsights.ReferenceDataSetCreationProperties{ + DataStringComparisonBehavior: timeseriesinsights.DataStringComparisonBehavior(d.Get("data_string_comparison_behavior").(string)), + KeyProperties: expandIoTTimeSeriesInsightsReferenceDataSetKeyProperties(d.Get("key_property").(*schema.Set).List()), + }, + } + + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, dataset); err != nil { + return fmt.Errorf("creating/updating IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", name, id.ResourceGroup, err) + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + return fmt.Errorf("retrieving IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", name, id.ResourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("cannot read IoT Time Series Insights Reference Data Set %q (Resource Group %q) ID", name, id.ResourceGroup) + } + + d.SetId(*resp.ID) + + return resourceIoTTimeSeriesInsightsReferenceDataSetRead(d, meta) +} + +func resourceIoTTimeSeriesInsightsReferenceDataSetRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ReferenceDataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) + if err != nil || resp.ID == nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("time_series_insights_environment_id", strings.Split(d.Id(), "/referenceDataSets")[0]) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := resp.ReferenceDataSetResourceProperties; props != nil { + d.Set("data_string_comparison_behavior", string(props.DataStringComparisonBehavior)) + if err := d.Set("key_property", flattenIoTTimeSeriesInsightsReferenceDataSetKeyProperties(props.KeyProperties)); err != nil { + return fmt.Errorf("setting `key_property`: %+v", err) + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceIoTTimeSeriesInsightsReferenceDataSetDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ReferenceDataSetID(d.Id()) + if err != nil { + return err + } + + response, err := client.Delete(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("deleting IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + } + + return nil +} + +func expandIoTTimeSeriesInsightsReferenceDataSetKeyProperties(input []interface{}) *[]timeseriesinsights.ReferenceDataSetKeyProperty { + properties := make([]timeseriesinsights.ReferenceDataSetKeyProperty, 0) + + for _, v := range input { + if v == nil { + continue + } + attr := v.(map[string]interface{}) + + properties = append(properties, timeseriesinsights.ReferenceDataSetKeyProperty{ + Type: timeseriesinsights.ReferenceDataKeyPropertyType(attr["type"].(string)), + Name: utils.String(attr["name"].(string)), + }) + } + + return &properties +} + +func flattenIoTTimeSeriesInsightsReferenceDataSetKeyProperties(input *[]timeseriesinsights.ReferenceDataSetKeyProperty) []interface{} { + if input == nil { + return []interface{}{} + } + properties := make([]interface{}, 0) + for _, property := range *input { + attr := make(map[string]interface{}) + attr["type"] = string(property.Type) + if name := property.Name; name != nil { + attr["name"] = *property.Name + } + properties = append(properties, attr) + } + + return properties +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_reference_data_set_resource_test.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_reference_data_set_resource_test.go new file mode 100644 index 000000000000..b37d743ec543 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_reference_data_set_resource_test.go @@ -0,0 +1,144 @@ +package iottimeseriesinsights_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IoTTimeSeriesInsightsReferenceDataSetResource struct { +} + +func TestAccIoTTimeSeriesInsightsReferenceDataSet_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_reference_data_set", "test") + r := IoTTimeSeriesInsightsReferenceDataSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTTimeSeriesInsightsReferenceDataSet_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_reference_data_set", "test") + r := IoTTimeSeriesInsightsReferenceDataSetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (IoTTimeSeriesInsightsReferenceDataSetResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ReferenceDataSetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.IoTTimeSeriesInsights.ReferenceDataSetsClient.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving IoT Time Series Insights Reference Data Set (%q): %+v", id.String(), err) + } + + return utils.Bool(resp.ReferenceDataSetResourceProperties != nil), nil +} + +func (IoTTimeSeriesInsightsReferenceDataSetResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" +} + +resource "azurerm_iot_time_series_insights_reference_data_set" "test" { + name = "accTEsttsd%d" + time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id + location = azurerm_resource_group.test.location + + key_property { + name = "keyProperty1" + type = "String" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (IoTTimeSeriesInsightsReferenceDataSetResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" +} +resource "azurerm_iot_time_series_insights_reference_data_set" "test" { + name = "accTEsttsd%d" + time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id + location = azurerm_resource_group.test.location + + key_property { + name = "keyProperty1" + type = "String" + } + + key_property { + name = "keyProperty2" + type = "Bool" + } + + tags = { + Environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_standard_environment_resource.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_standard_environment_resource.go new file mode 100644 index 000000000000..88c351bea384 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_standard_environment_resource.go @@ -0,0 +1,294 @@ +package iottimeseriesinsights + +import ( + "fmt" + "regexp" + "strconv" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/timeseriesinsights/mgmt/2020-05-15/timeseriesinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceIoTTimeSeriesInsightsStandardEnvironment() *schema.Resource { + return &schema.Resource{ + Create: resourceIoTTimeSeriesInsightsStandardEnvironmentCreateUpdate, + Read: resourceIoTTimeSeriesInsightsStandardEnvironmentRead, + Update: resourceIoTTimeSeriesInsightsStandardEnvironmentCreateUpdate, + Delete: resourceIoTTimeSeriesInsightsStandardEnvironmentDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.EnvironmentID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[-\w\._\(\)]+$`), + "IoT Time Series Insights Standard Environment name must contain only word characters, periods, underscores, and parentheses.", + ), + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "sku_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + "S1_1", + "S1_2", + "S1_3", + "S1_4", + "S1_5", + "S1_6", + "S1_7", + "S1_8", + "S1_9", + "S1_10", + "S2_1", + "S2_2", + "S2_3", + "S2_4", + "S2_5", + "S2_6", + "S2_7", + "S2_8", + "S2_9", + "S2_10", + }, false), + }, + + "data_retention_time": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.ISO8601Duration, + }, + + "storage_limit_exceeded_behavior": { + Type: schema.TypeString, + Optional: true, + Default: string(timeseriesinsights.PurgeOldData), + ValidateFunc: validation.StringInSlice([]string{ + string(timeseriesinsights.PurgeOldData), + string(timeseriesinsights.PauseIngress), + }, false), + }, + + "partition_key": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.NoZeroValues, + ForceNew: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceIoTTimeSeriesInsightsStandardEnvironmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + location := azure.NormalizeLocation(d.Get("location").(string)) + resourceGroup := d.Get("resource_group_name").(string) + t := d.Get("tags").(map[string]interface{}) + sku, err := expandEnvironmentSkuName(d.Get("sku_name").(string)) + if err != nil { + return fmt.Errorf("expanding sku: %+v", err) + } + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing IoT Time Series Insights Standard Environment %q (Resource Group %q): %s", name, resourceGroup, err) + } + } + + if existing.Value != nil { + environment, ok := existing.Value.AsGen1EnvironmentResource() + if !ok { + return fmt.Errorf("exisiting resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", name, resourceGroup) + } + + if environment.ID != nil && *environment.ID != "" { + return tf.ImportAsExistsError("azurerm_iot_time_series_insights_environment", *environment.ID) + } + } + } + + environment := timeseriesinsights.Gen1EnvironmentCreateOrUpdateParameters{ + Location: &location, + Tags: tags.Expand(t), + Sku: sku, + Gen1EnvironmentCreationProperties: ×eriesinsights.Gen1EnvironmentCreationProperties{ + StorageLimitExceededBehavior: timeseriesinsights.StorageLimitExceededBehavior(d.Get("storage_limit_exceeded_behavior").(string)), + DataRetentionTime: utils.String(d.Get("data_retention_time").(string)), + }, + } + + if v, ok := d.GetOk("partition_key"); ok { + partition := make([]timeseriesinsights.TimeSeriesIDProperty, 1) + partition[0] = timeseriesinsights.TimeSeriesIDProperty{ + Name: utils.String(v.(string)), + Type: timeseriesinsights.String, + } + environment.Gen1EnvironmentCreationProperties.PartitionKeyProperties = &partition + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, environment) + if err != nil { + return fmt.Errorf("creating/updating IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for completion of IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + return fmt.Errorf("retrieving IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resource, ok := resp.Value.AsGen1EnvironmentResource() + if !ok { + return fmt.Errorf("resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", name, resourceGroup) + } + + if resource.ID == nil || *resource.ID == "" { + return fmt.Errorf("cannot read IoT Time Series Insights Standard Environment %q (Resource Group %q) ID", name, resourceGroup) + } + + d.SetId(*resource.ID) + + return resourceIoTTimeSeriesInsightsStandardEnvironmentRead(d, meta) +} + +func resourceIoTTimeSeriesInsightsStandardEnvironmentRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.EnvironmentID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil || resp.Value == nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + environment, ok := resp.Value.AsGen1EnvironmentResource() + if !ok { + return fmt.Errorf("exisiting resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", id.Name, id.ResourceGroup) + } + + d.Set("name", environment.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("sku_name", flattenEnvironmentSkuName(environment.Sku)) + if location := environment.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := environment.Gen1EnvironmentResourceProperties; props != nil { + d.Set("storage_limit_exceeded_behavior", string(props.StorageLimitExceededBehavior)) + d.Set("data_retention_time", props.DataRetentionTime) + + if partition := props.PartitionKeyProperties; partition != nil && len(*partition) > 0 { + for _, v := range *partition { + d.Set("partition_key", v.Name) + } + } + } + + return tags.FlattenAndSet(d, environment.Tags) +} + +func resourceIoTTimeSeriesInsightsStandardEnvironmentDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.EnvironmentID(d.Id()) + if err != nil { + return err + } + + response, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("deleting IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + } + + return nil +} + +func expandEnvironmentSkuName(skuName string) (*timeseriesinsights.Sku, error) { + parts := strings.Split(skuName, "_") + if len(parts) != 2 { + return nil, fmt.Errorf("sku_name (%s) has the worng number of parts (%d) after splitting on _", skuName, len(parts)) + } + + var name timeseriesinsights.SkuName + switch parts[0] { + case "S1": + name = timeseriesinsights.S1 + case "S2": + name = timeseriesinsights.S2 + default: + return nil, fmt.Errorf("sku_name %s has unknown sku tier %s", skuName, parts[0]) + } + + capacity, err := strconv.Atoi(parts[1]) + if err != nil { + return nil, fmt.Errorf("cannot convert skuname %s capcity %s to int", skuName, parts[2]) + } + + return ×eriesinsights.Sku{ + Name: name, + Capacity: utils.Int32(int32(capacity)), + }, nil +} + +func flattenEnvironmentSkuName(input *timeseriesinsights.Sku) string { + if input == nil || input.Capacity == nil { + return "" + } + + return fmt.Sprintf("%s_%d", string(input.Name), *input.Capacity) +} diff --git a/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_standard_environment_resource_test.go b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_standard_environment_resource_test.go new file mode 100644 index 000000000000..7fb020d4e765 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/iot_time_series_insights_standard_environment_resource_test.go @@ -0,0 +1,161 @@ +package iottimeseriesinsights_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IoTTimeSeriesInsightsStandardEnvironmentResource struct { +} + +func TestAccIoTTimeSeriesInsightsStandardEnvironment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_standard_environment", "test") + r := IoTTimeSeriesInsightsStandardEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTTimeSeriesInsightsStandardEnvironment_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_standard_environment", "test") + r := IoTTimeSeriesInsightsStandardEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIoTTimeSeriesInsightsStandardEnvironment_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_standard_environment", "test") + r := IoTTimeSeriesInsightsStandardEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (IoTTimeSeriesInsightsStandardEnvironmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EnvironmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.IoTTimeSeriesInsights.EnvironmentsClient.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + return nil, fmt.Errorf("retrieving IoT Time Series Insights Insights Standard Environment (%q): %+v", id.String(), err) + } + + return utils.Bool(!utils.ResponseWasNotFound(resp.Response)), nil +} + +func (IoTTimeSeriesInsightsStandardEnvironmentResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IoTTimeSeriesInsightsStandardEnvironmentResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" + + storage_limit_exceeded_behavior = "PauseIngress" + + tags = { + Environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (IoTTimeSeriesInsightsStandardEnvironmentResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-tsi-%d" + location = "%s" +} +resource "azurerm_iot_time_series_insights_standard_environment" "test" { + name = "accTEst_tsie%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1_1" + data_retention_time = "P30D" + + storage_limit_exceeded_behavior = "PauseIngress" + partition_key = "foo" + + tags = { + Environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/iottimeseriesinsights/migration/resource_iot_time_series_insights_standard_environment.go b/azurerm/internal/services/iottimeseriesinsights/migration/resource_iot_time_series_insights_standard_environment.go new file mode 100644 index 000000000000..e76d92022ed6 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/migration/resource_iot_time_series_insights_standard_environment.go @@ -0,0 +1,65 @@ +package migration + +import ( + "log" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +func TimeSeriesInsightsAccessPolicyV0() schema.StateUpgrader { + return schema.StateUpgrader{ + Type: timeSeriesInsightsAccessPolicyV0StateMigration().CoreConfigSchema().ImpliedType(), + Upgrade: timeSeriesInsightsAccessPolicyV0StateUpgradeV0ToV1, + Version: 0, + } +} + +func timeSeriesInsightsAccessPolicyV0StateMigration() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "time_series_insights_environment_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "principal_object_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "roles": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + } +} + +func timeSeriesInsightsAccessPolicyV0StateUpgradeV0ToV1(rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + log.Println("[DEBUG] Migrating ResourceType from v0 to v1 format") + oldId := rawState["id"].(string) + newId := strings.Replace(oldId, "/accesspolicies/", "/accessPolicies/", 1) + + log.Printf("[DEBUG] Updating ID from %q to %q", oldId, newId) + + rawState["id"] = newId + + return rawState, nil +} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/access_policy.go b/azurerm/internal/services/iottimeseriesinsights/parse/access_policy.go new file mode 100644 index 000000000000..bb0ae0ca32d0 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/parse/access_policy.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccessPolicyId struct { + SubscriptionId string + ResourceGroup string + EnvironmentName string + Name string +} + +func NewAccessPolicyID(subscriptionId, resourceGroup, environmentName, name string) AccessPolicyId { + return AccessPolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + EnvironmentName: environmentName, + Name: name, + } +} + +func (id AccessPolicyId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Environment Name %q", id.EnvironmentName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Access Policy", segmentsStr) +} + +func (id AccessPolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.TimeSeriesInsights/environments/%s/accessPolicies/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.EnvironmentName, id.Name) +} + +// AccessPolicyID parses a AccessPolicy ID into an AccessPolicyId struct +func AccessPolicyID(input string) (*AccessPolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccessPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.EnvironmentName, err = id.PopSegment("environments"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("accessPolicies"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/access_policy_test.go b/azurerm/internal/services/iottimeseriesinsights/parse/access_policy_test.go new file mode 100644 index 000000000000..95d872222dbd --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/parse/access_policy_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccessPolicyId{} + +func TestAccessPolicyIDFormatter(t *testing.T) { + actual := NewAccessPolicyID("12345678-1234-9876-4563-123456789012", "resGroup1", "environment1", "policy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/accessPolicies/policy1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccessPolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccessPolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/", + Error: true, + }, + + { + // missing value for EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/accessPolicies/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/accessPolicies/policy1", + Expected: &AccessPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + EnvironmentName: "environment1", + Name: "policy1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.TIMESERIESINSIGHTS/ENVIRONMENTS/ENVIRONMENT1/ACCESSPOLICIES/POLICY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccessPolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.EnvironmentName != v.Expected.EnvironmentName { + t.Fatalf("Expected %q but got %q for EnvironmentName", v.Expected.EnvironmentName, actual.EnvironmentName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/environment.go b/azurerm/internal/services/iottimeseriesinsights/parse/environment.go new file mode 100644 index 000000000000..06a02f77425a --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/parse/environment.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type EnvironmentId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewEnvironmentID(subscriptionId, resourceGroup, name string) EnvironmentId { + return EnvironmentId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id EnvironmentId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Environment", segmentsStr) +} + +func (id EnvironmentId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.TimeSeriesInsights/environments/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// EnvironmentID parses a Environment ID into an EnvironmentId struct +func EnvironmentID(input string) (*EnvironmentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := EnvironmentId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("environments"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/environment_test.go b/azurerm/internal/services/iottimeseriesinsights/parse/environment_test.go new file mode 100644 index 000000000000..8126319190fd --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/parse/environment_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = EnvironmentId{} + +func TestEnvironmentIDFormatter(t *testing.T) { + actual := NewEnvironmentID("12345678-1234-9876-4563-123456789012", "resGroup1", "environment1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestEnvironmentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *EnvironmentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1", + Expected: &EnvironmentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "environment1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.TIMESERIESINSIGHTS/ENVIRONMENTS/ENVIRONMENT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := EnvironmentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_access_policy.go b/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_access_policy.go deleted file mode 100644 index bfbc1b020826..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_access_policy.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type TimeSeriesInsightsAccessPolicyId struct { - ResourceGroup string - Name string - EnvironmentName string -} - -func TimeSeriesInsightsAccessPolicyID(input string) (*TimeSeriesInsightsAccessPolicyId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Time Series Insights Access Policy ID %q: %+v", input, err) - } - - service := TimeSeriesInsightsAccessPolicyId{ - ResourceGroup: id.ResourceGroup, - } - - if service.EnvironmentName, err = id.PopSegment("environments"); err != nil { - return nil, err - } - - if service.Name, err = id.PopSegment("accesspolicies"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_access_policy_test.go b/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_access_policy_test.go deleted file mode 100644 index dc1723a2e44a..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_access_policy_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestTimeSeriesInsightsAccessPolicyId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *TimeSeriesInsightsAccessPolicyId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Time Series Insight AccessPolicy Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/Environment1/accesspolicies/", - Expected: nil, - }, - { - Name: "Time Series Insight Access Policy ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/Environment1/accesspolicies/Policy1", - Expected: &TimeSeriesInsightsAccessPolicyId{ - EnvironmentName: "Environment1", - ResourceGroup: "resGroup1", - Name: "Policy1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/Environments/Environment1/AccessPolicies/Policy1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := TimeSeriesInsightsAccessPolicyID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_environment.go b/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_environment.go deleted file mode 100644 index b09122b7be12..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_environment.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type TimeSeriesInsightsEnvironmentId struct { - ResourceGroup string - Name string -} - -func TimeSeriesInsightsEnvironmentID(input string) (*TimeSeriesInsightsEnvironmentId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Time Series Insights Environment ID %q: %+v", input, err) - } - - service := TimeSeriesInsightsEnvironmentId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("environments"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_environment_test.go b/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_environment_test.go deleted file mode 100644 index 9197231728c8..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_environment_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestTimeSeriesInsightsEnvironmentId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *TimeSeriesInsightsEnvironmentId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Time Series Insight Environment Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", - Expected: nil, - }, - { - Name: "Time Series Insight Environment ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/Environment1", - Expected: &TimeSeriesInsightsEnvironmentId{ - Name: "Environment1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/Environments/Environment1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := TimeSeriesInsightsEnvironmentID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_reference_data_set.go b/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_reference_data_set.go deleted file mode 100644 index 798f4fd016b8..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_reference_data_set.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type TimeSeriesInsightsReferenceDataSetId struct { - ResourceGroup string - EnvironmentName string - Name string -} - -func TimeSeriesInsightsReferenceDataSetID(input string) (*TimeSeriesInsightsReferenceDataSetId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Time Series Insights Reference Dataset ID %q: %+v", input, err) - } - - service := TimeSeriesInsightsReferenceDataSetId{ - ResourceGroup: id.ResourceGroup, - } - - if service.EnvironmentName, err = id.PopSegment("environments"); err != nil { - return nil, err - } - - if service.Name, err = id.PopSegment("referenceDataSets"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_reference_data_set_test.go b/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_reference_data_set_test.go deleted file mode 100644 index 9643e7635a5c..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/parse/iot_time_series_insights_reference_data_set_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestTimeSeriesInsightsReferenceDataSetId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *TimeSeriesInsightsReferenceDataSetId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Time Series Insight ReferenceDataset Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/Environment1/referenceDataSets/", - Expected: nil, - }, - { - Name: "Time Series Insight Environment ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/Environment1/referenceDataSets/DataSet1", - Expected: &TimeSeriesInsightsReferenceDataSetId{ - Name: "DataSet1", - EnvironmentName: "Environment1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/Environments/Environment1/ReferenceDataSets/DataSet1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := TimeSeriesInsightsReferenceDataSetID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/reference_data_set.go b/azurerm/internal/services/iottimeseriesinsights/parse/reference_data_set.go new file mode 100644 index 000000000000..62220fa0f2d9 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/parse/reference_data_set.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ReferenceDataSetId struct { + SubscriptionId string + ResourceGroup string + EnvironmentName string + Name string +} + +func NewReferenceDataSetID(subscriptionId, resourceGroup, environmentName, name string) ReferenceDataSetId { + return ReferenceDataSetId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + EnvironmentName: environmentName, + Name: name, + } +} + +func (id ReferenceDataSetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Environment Name %q", id.EnvironmentName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Reference Data Set", segmentsStr) +} + +func (id ReferenceDataSetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.TimeSeriesInsights/environments/%s/referenceDataSets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.EnvironmentName, id.Name) +} + +// ReferenceDataSetID parses a ReferenceDataSet ID into an ReferenceDataSetId struct +func ReferenceDataSetID(input string) (*ReferenceDataSetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ReferenceDataSetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.EnvironmentName, err = id.PopSegment("environments"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("referenceDataSets"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/iottimeseriesinsights/parse/reference_data_set_test.go b/azurerm/internal/services/iottimeseriesinsights/parse/reference_data_set_test.go new file mode 100644 index 000000000000..c8e9beed68e2 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/parse/reference_data_set_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ReferenceDataSetId{} + +func TestReferenceDataSetIDFormatter(t *testing.T) { + actual := NewReferenceDataSetID("12345678-1234-9876-4563-123456789012", "resGroup1", "environment1", "dataSet1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/referenceDataSets/dataSet1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestReferenceDataSetID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ReferenceDataSetId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/", + Error: true, + }, + + { + // missing value for EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/referenceDataSets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/referenceDataSets/dataSet1", + Expected: &ReferenceDataSetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + EnvironmentName: "environment1", + Name: "dataSet1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.TIMESERIESINSIGHTS/ENVIRONMENTS/ENVIRONMENT1/REFERENCEDATASETS/DATASET1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ReferenceDataSetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.EnvironmentName != v.Expected.EnvironmentName { + t.Fatalf("Expected %q but got %q for EnvironmentName", v.Expected.EnvironmentName, actual.EnvironmentName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/iottimeseriesinsights/registration.go b/azurerm/internal/services/iottimeseriesinsights/registration.go index c2ca9c9a0110..edf798de68b8 100644 --- a/azurerm/internal/services/iottimeseriesinsights/registration.go +++ b/azurerm/internal/services/iottimeseriesinsights/registration.go @@ -26,8 +26,9 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_iot_time_series_insights_access_policy": resourceArmIoTTimeSeriesInsightsAccessPolicy(), - "azurerm_iot_time_series_insights_standard_environment": resourceArmIoTTimeSeriesInsightsStandardEnvironment(), - "azurerm_iot_time_series_insights_reference_data_set": resourceArmIoTTimeSeriesInsightsReferenceDataSet(), + "azurerm_iot_time_series_insights_access_policy": resourceIoTTimeSeriesInsightsAccessPolicy(), + "azurerm_iot_time_series_insights_standard_environment": resourceIoTTimeSeriesInsightsStandardEnvironment(), + "azurerm_iot_time_series_insights_gen2_environment": resourceIoTTimeSeriesInsightsGen2Environment(), + "azurerm_iot_time_series_insights_reference_data_set": resourceIoTTimeSeriesInsightsReferenceDataSet(), } } diff --git a/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_access_policy.go b/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_access_policy.go deleted file mode 100644 index 0da396cb3f62..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_access_policy.go +++ /dev/null @@ -1,209 +0,0 @@ -package iottimeseriesinsights - -import ( - "fmt" - "regexp" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/preview/timeseriesinsights/mgmt/2018-08-15-preview/timeseriesinsights" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/validate" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmIoTTimeSeriesInsightsAccessPolicy() *schema.Resource { - return &schema.Resource{ - Create: resourceArmIoTTimeSeriesInsightsAccessPolicyCreateUpdate, - Read: resourceArmIoTTimeSeriesInsightsAccessPolicyRead, - Update: resourceArmIoTTimeSeriesInsightsAccessPolicyCreateUpdate, - Delete: resourceArmIoTTimeSeriesInsightsAccessPolicyDelete, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.TimeSeriesInsightsAccessPolicyID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile(`^[-\w\._\(\)]+$`), - "IoT Time Series Insights Access Policy name must contain only word characters, periods, underscores, hyphens, and parentheses.", - ), - }, - - "time_series_insights_environment_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.TimeSeriesInsightsEnvironmentID, - }, - - "principal_object_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "description": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "roles": { - Type: schema.TypeSet, - Required: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringInSlice([]string{ - string(timeseriesinsights.Contributor), - string(timeseriesinsights.Reader), - }, false), - }, - }, - }, - } -} - -func resourceArmIoTTimeSeriesInsightsAccessPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - environmentID := d.Get("time_series_insights_environment_id").(string) - id, err := parse.TimeSeriesInsightsEnvironmentID(environmentID) - if err != nil { - return err - } - - if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing IoT Time Series Insights Access Policy %q (Resource Group %q): %s", name, id.ResourceGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_iot_time_series_insights_access_policy", *existing.ID) - } - } - - policy := timeseriesinsights.AccessPolicyCreateOrUpdateParameters{ - AccessPolicyResourceProperties: ×eriesinsights.AccessPolicyResourceProperties{ - Description: utils.String(d.Get("description").(string)), - PrincipalObjectID: utils.String(d.Get("principal_object_id").(string)), - Roles: expandIoTTimeSeriesInsightsAccessPolicyRoles(d.Get("roles").(*schema.Set).List()), - }, - } - - if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, policy); err != nil { - return fmt.Errorf("creating/updating IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", name, id.ResourceGroup, err) - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) - if err != nil { - return fmt.Errorf("retrieving IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", name, id.ResourceGroup, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("cannot read IoT Time Series Insights Access Policy %q (Resource Group %q) ID", name, id.ResourceGroup) - } - - d.SetId(*resp.ID) - - return resourceArmIoTTimeSeriesInsightsAccessPolicyRead(d, meta) -} - -func resourceArmIoTTimeSeriesInsightsAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.TimeSeriesInsightsAccessPolicyID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("retrieving IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - d.Set("name", resp.Name) - d.Set("time_series_insights_environment_id", strings.Split(d.Id(), "/accesspolicies")[0]) - - if props := resp.AccessPolicyResourceProperties; props != nil { - d.Set("description", props.Description) - d.Set("principal_object_id", props.PrincipalObjectID) - d.Set("roles", flattenIoTTimeSeriesInsightsAccessPolicyRoles(resp.Roles)) - } - - return nil -} - -func resourceArmIoTTimeSeriesInsightsAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.TimeSeriesInsightsAccessPolicyID(d.Id()) - if err != nil { - return err - } - - response, err := client.Delete(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting IoT Time Series Insights Access Policy %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - } - - return nil -} - -func expandIoTTimeSeriesInsightsAccessPolicyRoles(input []interface{}) *[]timeseriesinsights.AccessPolicyRole { - roles := make([]timeseriesinsights.AccessPolicyRole, 0) - - for _, v := range input { - if v == nil { - continue - } - roles = append(roles, timeseriesinsights.AccessPolicyRole(v.(string))) - } - - return &roles -} - -func flattenIoTTimeSeriesInsightsAccessPolicyRoles(input *[]timeseriesinsights.AccessPolicyRole) []interface{} { - result := make([]interface{}, 0) - if input != nil { - for _, item := range *input { - result = append(result, string(item)) - } - } - return result -} diff --git a/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_reference_data_set.go b/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_reference_data_set.go deleted file mode 100644 index 5f9bd56a3687..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_reference_data_set.go +++ /dev/null @@ -1,243 +0,0 @@ -package iottimeseriesinsights - -import ( - "fmt" - "regexp" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/preview/timeseriesinsights/mgmt/2018-08-15-preview/timeseriesinsights" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmIoTTimeSeriesInsightsReferenceDataSet() *schema.Resource { - return &schema.Resource{ - Create: resourceArmIoTTimeSeriesInsightsReferenceDataSetCreateUpdate, - Read: resourceArmIoTTimeSeriesInsightsReferenceDataSetRead, - Update: resourceArmIoTTimeSeriesInsightsReferenceDataSetCreateUpdate, - Delete: resourceArmIoTTimeSeriesInsightsReferenceDataSetDelete, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.TimeSeriesInsightsReferenceDataSetID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile(`^[A-Za-z0-9]{3,63}`), - "IoT Time Series Insights Reference Data Set name must contain only alphanumeric characters and be between 3 and 63 characters.", - ), - }, - - "time_series_insights_environment_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.TimeSeriesInsightsEnvironmentID, - }, - - "data_string_comparison_behavior": { - Type: schema.TypeString, - Optional: true, - Default: string(timeseriesinsights.Ordinal), - ValidateFunc: validation.StringInSlice([]string{ - string(timeseriesinsights.Ordinal), - string(timeseriesinsights.OrdinalIgnoreCase), - }, false), - }, - - "key_property": { - Type: schema.TypeSet, - Required: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "type": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{ - string(timeseriesinsights.ReferenceDataKeyPropertyTypeBool), - string(timeseriesinsights.ReferenceDataKeyPropertyTypeDateTime), - string(timeseriesinsights.ReferenceDataKeyPropertyTypeDouble), - string(timeseriesinsights.ReferenceDataKeyPropertyTypeString), - }, false), - }, - }, - }, - }, - - "location": azure.SchemaLocation(), - - "tags": tags.Schema(), - }, - } -} - -func resourceArmIoTTimeSeriesInsightsReferenceDataSetCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - environmentID := d.Get("time_series_insights_environment_id").(string) - id, err := parse.TimeSeriesInsightsEnvironmentID(environmentID) - if err != nil { - return err - } - location := azure.NormalizeLocation(d.Get("location").(string)) - t := d.Get("tags").(map[string]interface{}) - - if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing IoT Time Series Insights Reference Data Set %q (Resource Group %q): %s", name, id.ResourceGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_iot_time_series_insights_reference_data_set", *existing.ID) - } - } - - dataset := timeseriesinsights.ReferenceDataSetCreateOrUpdateParameters{ - Location: &location, - Tags: tags.Expand(t), - ReferenceDataSetCreationProperties: ×eriesinsights.ReferenceDataSetCreationProperties{ - DataStringComparisonBehavior: timeseriesinsights.DataStringComparisonBehavior(d.Get("data_string_comparison_behavior").(string)), - KeyProperties: expandIoTTimeSeriesInsightsReferenceDataSetKeyProperties(d.Get("key_property").(*schema.Set).List()), - }, - } - - if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, dataset); err != nil { - return fmt.Errorf("creating/updating IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", name, id.ResourceGroup, err) - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) - if err != nil { - return fmt.Errorf("retrieving IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", name, id.ResourceGroup, err) - } - - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("cannot read IoT Time Series Insights Reference Data Set %q (Resource Group %q) ID", name, id.ResourceGroup) - } - - d.SetId(*resp.ID) - - return resourceArmIoTTimeSeriesInsightsReferenceDataSetRead(d, meta) -} - -func resourceArmIoTTimeSeriesInsightsReferenceDataSetRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.TimeSeriesInsightsReferenceDataSetID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - if err != nil || resp.ID == nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("retrieving IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - d.Set("name", resp.Name) - d.Set("time_series_insights_environment_id", strings.Split(d.Id(), "/referenceDataSets")[0]) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if props := resp.ReferenceDataSetResourceProperties; props != nil { - d.Set("data_string_comparison_behavior", string(props.DataStringComparisonBehavior)) - if err := d.Set("key_property", flattenIoTTimeSeriesInsightsReferenceDataSetKeyProperties(props.KeyProperties)); err != nil { - return fmt.Errorf("setting `key_property`: %+v", err) - } - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmIoTTimeSeriesInsightsReferenceDataSetDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.TimeSeriesInsightsReferenceDataSetID(d.Id()) - if err != nil { - return err - } - - response, err := client.Delete(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting IoT Time Series Insights Reference Data Set %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - } - - return nil -} - -func expandIoTTimeSeriesInsightsReferenceDataSetKeyProperties(input []interface{}) *[]timeseriesinsights.ReferenceDataSetKeyProperty { - properties := make([]timeseriesinsights.ReferenceDataSetKeyProperty, 0) - - for _, v := range input { - if v == nil { - continue - } - attr := v.(map[string]interface{}) - - properties = append(properties, timeseriesinsights.ReferenceDataSetKeyProperty{ - Type: timeseriesinsights.ReferenceDataKeyPropertyType(attr["type"].(string)), - Name: utils.String(attr["name"].(string)), - }) - } - - return &properties -} - -func flattenIoTTimeSeriesInsightsReferenceDataSetKeyProperties(input *[]timeseriesinsights.ReferenceDataSetKeyProperty) []interface{} { - if input == nil { - return []interface{}{} - } - properties := make([]interface{}, 0) - for _, property := range *input { - attr := make(map[string]interface{}) - attr["type"] = string(property.Type) - if name := property.Name; name != nil { - attr["name"] = *property.Name - } - properties = append(properties, attr) - } - - return properties -} diff --git a/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_standard_environment.go b/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_standard_environment.go deleted file mode 100644 index 3e7012f5a0bf..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/resource_iot_time_series_insights_standard_environment.go +++ /dev/null @@ -1,294 +0,0 @@ -package iottimeseriesinsights - -import ( - "fmt" - "regexp" - "strconv" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/preview/timeseriesinsights/mgmt/2018-08-15-preview/timeseriesinsights" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmIoTTimeSeriesInsightsStandardEnvironment() *schema.Resource { - return &schema.Resource{ - Create: resourceArmIoTTimeSeriesInsightsStandardEnvironmentCreateUpdate, - Read: resourceArmIoTTimeSeriesInsightsStandardEnvironmentRead, - Update: resourceArmIoTTimeSeriesInsightsStandardEnvironmentCreateUpdate, - Delete: resourceArmIoTTimeSeriesInsightsStandardEnvironmentDelete, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.TimeSeriesInsightsEnvironmentID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile(`^[-\w\._\(\)]+$`), - "IoT Time Series Insights Standard Environment name must contain only word characters, periods, underscores, and parentheses.", - ), - }, - - "location": azure.SchemaLocation(), - - "resource_group_name": azure.SchemaResourceGroupName(), - - "sku_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - "S1_1", - "S1_2", - "S1_3", - "S1_4", - "S1_5", - "S1_6", - "S1_7", - "S1_8", - "S1_9", - "S1_10", - "S2_1", - "S2_2", - "S2_3", - "S2_4", - "S2_5", - "S2_6", - "S2_7", - "S2_8", - "S2_9", - "S2_10", - }, false), - }, - - "data_retention_time": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azValidate.ISO8601Duration, - }, - - "storage_limit_exceeded_behavior": { - Type: schema.TypeString, - Optional: true, - Default: string(timeseriesinsights.PurgeOldData), - ValidateFunc: validation.StringInSlice([]string{ - string(timeseriesinsights.PurgeOldData), - string(timeseriesinsights.PauseIngress), - }, false), - }, - - "partition_key": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.NoZeroValues, - ForceNew: true, - }, - - "tags": tags.Schema(), - }, - } -} - -func resourceArmIoTTimeSeriesInsightsStandardEnvironmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - location := azure.NormalizeLocation(d.Get("location").(string)) - resourceGroup := d.Get("resource_group_name").(string) - t := d.Get("tags").(map[string]interface{}) - sku, err := expandEnvironmentSkuName(d.Get("sku_name").(string)) - if err != nil { - return fmt.Errorf("expanding sku: %+v", err) - } - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing IoT Time Series Insights Standard Environment %q (Resource Group %q): %s", name, resourceGroup, err) - } - } - - if existing.Value != nil { - environment, ok := existing.Value.AsStandardEnvironmentResource() - if !ok { - return fmt.Errorf("exisiting resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", name, resourceGroup) - } - - if environment.ID != nil && *environment.ID != "" { - return tf.ImportAsExistsError("azurerm_iot_time_series_insights_environment", *environment.ID) - } - } - } - - environment := timeseriesinsights.StandardEnvironmentCreateOrUpdateParameters{ - Location: &location, - Tags: tags.Expand(t), - Sku: sku, - StandardEnvironmentCreationProperties: ×eriesinsights.StandardEnvironmentCreationProperties{ - StorageLimitExceededBehavior: timeseriesinsights.StorageLimitExceededBehavior(d.Get("storage_limit_exceeded_behavior").(string)), - DataRetentionTime: utils.String(d.Get("data_retention_time").(string)), - }, - } - - if v, ok := d.GetOk("partition_key"); ok { - partition := make([]timeseriesinsights.TimeSeriesIDProperty, 1) - partition[0] = timeseriesinsights.TimeSeriesIDProperty{ - Name: utils.String(v.(string)), - Type: timeseriesinsights.String, - } - environment.StandardEnvironmentCreationProperties.PartitionKeyProperties = &partition - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, name, environment) - if err != nil { - return fmt.Errorf("creating/updating IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for completion of IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { - return fmt.Errorf("retrieving IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - resource, ok := resp.Value.AsStandardEnvironmentResource() - if !ok { - return fmt.Errorf("resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", name, resourceGroup) - } - - if resource.ID == nil || *resource.ID == "" { - return fmt.Errorf("cannot read IoT Time Series Insights Standard Environment %q (Resource Group %q) ID", name, resourceGroup) - } - - d.SetId(*resource.ID) - - return resourceArmIoTTimeSeriesInsightsStandardEnvironmentRead(d, meta) -} - -func resourceArmIoTTimeSeriesInsightsStandardEnvironmentRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.TimeSeriesInsightsEnvironmentID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - if err != nil || resp.Value == nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("retrieving IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - environment, ok := resp.Value.AsStandardEnvironmentResource() - if !ok { - return fmt.Errorf("exisiting resource was not a standard IoT Time Series Insights Standard Environment %q (Resource Group %q)", id.Name, id.ResourceGroup) - } - - d.Set("name", environment.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("sku_name", flattenEnvironmentSkuName(environment.Sku)) - if location := environment.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if props := environment.StandardEnvironmentResourceProperties; props != nil { - d.Set("storage_limit_exceeded_behavior", string(props.StorageLimitExceededBehavior)) - d.Set("data_retention_time", props.DataRetentionTime) - - if partition := props.PartitionKeyProperties; partition != nil && len(*partition) > 0 { - for _, v := range *partition { - d.Set("partition_key", *v.Name) - } - } - } - - return tags.FlattenAndSet(d, environment.Tags) -} - -func resourceArmIoTTimeSeriesInsightsStandardEnvironmentDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.TimeSeriesInsightsEnvironmentID(d.Id()) - if err != nil { - return err - } - - response, err := client.Delete(ctx, id.ResourceGroup, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting IoT Time Series Insights Standard Environment %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - } - - return nil -} - -func expandEnvironmentSkuName(skuName string) (*timeseriesinsights.Sku, error) { - parts := strings.Split(skuName, "_") - if len(parts) != 2 { - return nil, fmt.Errorf("sku_name (%s) has the worng number of parts (%d) after splitting on _", skuName, len(parts)) - } - - var name timeseriesinsights.SkuName - switch parts[0] { - case "S1": - name = timeseriesinsights.S1 - case "S2": - name = timeseriesinsights.S2 - default: - return nil, fmt.Errorf("sku_name %s has unknown sku tier %s", skuName, parts[0]) - } - - capacity, err := strconv.Atoi(parts[1]) - if err != nil { - return nil, fmt.Errorf("cannot convert skuname %s capcity %s to int", skuName, parts[2]) - } - - return ×eriesinsights.Sku{ - Name: name, - Capacity: utils.Int32(int32(capacity)), - }, nil -} - -func flattenEnvironmentSkuName(input *timeseriesinsights.Sku) string { - if input == nil || input.Capacity == nil { - return "" - } - - return fmt.Sprintf("%s_%d", string(input.Name), *input.Capacity) -} diff --git a/azurerm/internal/services/iottimeseriesinsights/resourceids.go b/azurerm/internal/services/iottimeseriesinsights/resourceids.go new file mode 100644 index 000000000000..9d7032684afd --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/resourceids.go @@ -0,0 +1,5 @@ +package iottimeseriesinsights + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=AccessPolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/accessPolicies/policy1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Environment -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ReferenceDataSet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/referenceDataSets/dataSet1 diff --git a/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_access_policy_test.go b/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_access_policy_test.go deleted file mode 100644 index ddee0ab06387..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_access_policy_test.go +++ /dev/null @@ -1,170 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" -) - -func TestAccAzureRMIoTTimeSeriesInsightsAccessPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTTimeSeriesInsightsAccessPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIoTTimeSeriesInsightsAccessPolicy_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIoTTimeSeriesInsightsAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.TimeSeriesInsightsAccessPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - _, err = client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on TimeSeriesInsightsAccessPolicyClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMIoTTimeSeriesInsightsAccessPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTTimeSeriesInsights.AccessPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iot_time_series_insights_access_policy" { - continue - } - - id, err := parse.TimeSeriesInsightsAccessPolicyID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("time Series Insights Access Policy still exists: %q", id.Name) - } - } - - return nil -} - -func testAccAzureRMIoTTimeSeriesInsightsAccessPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" -} -resource "azurerm_iot_time_series_insights_access_policy" "test" { - name = "accTEst_tsiap%d" - time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id - - principal_object_id = "aGUID" - roles = ["Reader"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMIoTTimeSeriesInsightsAccessPolicy_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" -} -resource "azurerm_iot_time_series_insights_access_policy" "test" { - name = "accTEst_tsiap%d" - time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id - - principal_object_id = "aGUID" - roles = ["Contributor"] - description = "Test Access Policy" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_reference_data_set_test.go b/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_reference_data_set_test.go deleted file mode 100644 index f48adf0cc231..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_reference_data_set_test.go +++ /dev/null @@ -1,190 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_reference_data_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_reference_data_set", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.TimeSeriesInsightsReferenceDataSetID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on TimeSeriesInsightsReferenceDataSetClient: %+v", err) - } - - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Time Series Insights Reference Data Set %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMIoTTimeSeriesInsightsReferenceDataSetDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTTimeSeriesInsights.ReferenceDataSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iot_time_series_insights_reference_data_set" { - continue - } - - id, err := parse.TimeSeriesInsightsReferenceDataSetID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceGroup, id.EnvironmentName, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("time Series Insights Reference Data Set still exists: %q", id.Name) - } - } - - return nil -} - -func testAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" -} - -resource "azurerm_iot_time_series_insights_reference_data_set" "test" { - name = "accTEsttsd%d" - time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id - location = azurerm_resource_group.test.location - - key_property { - name = "keyProperty1" - type = "String" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMIoTTimeSeriesInsightsReferenceDataSet_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" -} -resource "azurerm_iot_time_series_insights_reference_data_set" "test" { - name = "accTEsttsd%d" - time_series_insights_environment_id = azurerm_iot_time_series_insights_standard_environment.test.id - location = azurerm_resource_group.test.location - - key_property { - name = "keyProperty1" - type = "String" - } - - key_property { - name = "keyProperty2" - type = "Bool" - } - - tags = { - Environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_standard_environment_test.go b/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_standard_environment_test.go deleted file mode 100644 index 709362d57d8f..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/tests/resource_iot_time_series_insights_standard_environment_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_standard_environment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_standard_environment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_iot_time_series_insights_standard_environment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.TimeSeriesInsightsEnvironmentID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - if err != nil { - return fmt.Errorf("Bad: Get on TimeSeriesInsightsStandardEnvironmentClient: %+v", err) - } - - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Time Series Insights Standard Environment %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMIoTTimeSeriesInsightsStandardEnvironmentDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).IoTTimeSeriesInsights.EnvironmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_iot_time_series_insights_standard_environment" { - continue - } - - id, err := parse.TimeSeriesInsightsEnvironmentID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("time Series Insights Standard Environment still exists: %q", id.Name) - } - } - - return nil -} - -func testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" - - storage_limit_exceeded_behavior = "PauseIngress" - - tags = { - Environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMIoTTimeSeriesInsightsStandardEnvironment_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} -resource "azurerm_resource_group" "test" { - name = "acctestRG-tsi-%d" - location = "%s" -} -resource "azurerm_iot_time_series_insights_standard_environment" "test" { - name = "accTEst_tsie%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1_1" - data_retention_time = "P30D" - - storage_limit_exceeded_behavior = "PauseIngress" - partition_key = "foo" - - tags = { - Environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/access_policy_id.go b/azurerm/internal/services/iottimeseriesinsights/validate/access_policy_id.go new file mode 100644 index 000000000000..de26fd32c817 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/access_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" +) + +func AccessPolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccessPolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/access_policy_id_test.go b/azurerm/internal/services/iottimeseriesinsights/validate/access_policy_id_test.go new file mode 100644 index 000000000000..f0a0618f7c7f --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/access_policy_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccessPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/", + Valid: false, + }, + + { + // missing value for EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/accessPolicies/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/accessPolicies/policy1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.TIMESERIESINSIGHTS/ENVIRONMENTS/ENVIRONMENT1/ACCESSPOLICIES/POLICY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccessPolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/environment_id.go b/azurerm/internal/services/iottimeseriesinsights/validate/environment_id.go new file mode 100644 index 000000000000..22b36219d7b7 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/environment_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" +) + +func EnvironmentID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.EnvironmentID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/environment_id_test.go b/azurerm/internal/services/iottimeseriesinsights/validate/environment_id_test.go new file mode 100644 index 000000000000..c60c404cee56 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/environment_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestEnvironmentID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.TIMESERIESINSIGHTS/ENVIRONMENTS/ENVIRONMENT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := EnvironmentID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/reference_data_set_id.go b/azurerm/internal/services/iottimeseriesinsights/validate/reference_data_set_id.go new file mode 100644 index 000000000000..a2a3fcffa1d2 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/reference_data_set_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" +) + +func ReferenceDataSetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ReferenceDataSetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/reference_data_set_id_test.go b/azurerm/internal/services/iottimeseriesinsights/validate/reference_data_set_id_test.go new file mode 100644 index 000000000000..043054cf70c7 --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/reference_data_set_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestReferenceDataSetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/", + Valid: false, + }, + + { + // missing value for EnvironmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/referenceDataSets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.TimeSeriesInsights/environments/environment1/referenceDataSets/dataSet1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.TIMESERIESINSIGHTS/ENVIRONMENTS/ENVIRONMENT1/REFERENCEDATASETS/DATASET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ReferenceDataSetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment.go b/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment.go deleted file mode 100644 index 2edbc944e6aa..000000000000 --- a/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" -) - -func TimeSeriesInsightsEnvironmentID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.TimeSeriesInsightsEnvironmentID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_id.go b/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_id.go new file mode 100644 index 000000000000..f01b146265bb --- /dev/null +++ b/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_id.go @@ -0,0 +1,22 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/iottimeseriesinsights/parse" +) + +func TimeSeriesInsightsEnvironmentID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if _, err := parse.EnvironmentID(v); err != nil { + errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) + return + } + + return warnings, errors +} diff --git a/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_test.go b/azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_id_test.go similarity index 100% rename from azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_test.go rename to azurerm/internal/services/iottimeseriesinsights/validate/time_series_insights_environment_id_test.go diff --git a/azurerm/internal/services/keyvault/internal.go b/azurerm/internal/services/keyvault/internal.go new file mode 100644 index 000000000000..08bba13f4f73 --- /dev/null +++ b/azurerm/internal/services/keyvault/internal.go @@ -0,0 +1,144 @@ +package keyvault + +import ( + "context" + "fmt" + "log" + "net/http" + "time" + + "github.com/Azure/go-autorest/autorest" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type deleteAndPurgeNestedItem interface { + DeleteNestedItem(ctx context.Context) (autorest.Response, error) + NestedItemHasBeenDeleted(ctx context.Context) (autorest.Response, error) + + PurgeNestedItem(ctx context.Context) (autorest.Response, error) + NestedItemHasBeenPurged(ctx context.Context) (autorest.Response, error) +} + +func deleteAndOptionallyPurge(ctx context.Context, description string, shouldPurge bool, helper deleteAndPurgeNestedItem) error { + timeout, ok := ctx.Deadline() + if !ok { + return fmt.Errorf("context is missing a timeout") + } + + log.Printf("[DEBUG] Deleting %s..", description) + if resp, err := helper.DeleteNestedItem(ctx); err != nil { + if utils.ResponseWasNotFound(resp) { + return nil + } + + return fmt.Errorf("deleting %s: %+v", description, err) + } + log.Printf("[DEBUG] Waiting for %s to finish deleting..", description) + stateConf := &resource.StateChangeConf{ + Pending: []string{"InProgress"}, + Target: []string{"NotFound"}, + Refresh: func() (interface{}, string, error) { + item, err := helper.NestedItemHasBeenDeleted(ctx) + if err != nil { + if utils.ResponseWasNotFound(item) { + return item, "NotFound", nil + } + + return nil, "Error", err + } + + return item, "InProgress", nil + }, + ContinuousTargetOccurence: 3, + PollInterval: 5 * time.Second, + Timeout: time.Until(timeout), + } + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for %s to be deleted: %+v", description, err) + } + log.Printf("[DEBUG] Deleted %s.", description) + + if !shouldPurge { + log.Printf("[DEBUG] Skipping purging of %s as opted-out..", description) + return nil + } + + log.Printf("[DEBUG] Purging %s..", description) + if _, err := helper.PurgeNestedItem(ctx); err != nil { + return fmt.Errorf("purging %s: %+v", description, err) + } + + log.Printf("[DEBUG] Waiting for %s to finish purging..", description) + stateConf = &resource.StateChangeConf{ + Pending: []string{"InProgress"}, + Target: []string{"NotFound"}, + Refresh: func() (interface{}, string, error) { + item, err := helper.NestedItemHasBeenPurged(ctx) + if err != nil { + if utils.ResponseWasNotFound(item) { + return item, "NotFound", nil + } + + return nil, "Error", err + } + + return item, "InProgress", nil + }, + ContinuousTargetOccurence: 3, + PollInterval: 5 * time.Second, + Timeout: time.Until(timeout), + } + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for %s to finish purging: %+v", description, err) + } + log.Printf("[DEBUG] Purged %s.", description) + + return nil +} + +func keyVaultChildItemRefreshFunc(secretUri string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + log.Printf("[DEBUG] Checking to see if KeyVault Secret %q is available..", secretUri) + + PTransport := &http.Transport{Proxy: http.ProxyFromEnvironment} + + client := &http.Client{ + Transport: PTransport, + } + + conn, err := client.Get(secretUri) + if err != nil { + log.Printf("[DEBUG] Didn't find KeyVault secret at %q", secretUri) + return nil, "pending", fmt.Errorf("Error checking secret at %q: %s", secretUri, err) + } + + defer conn.Body.Close() + + log.Printf("[DEBUG] Found KeyVault Secret %q", secretUri) + return "available", "available", nil + } +} + +func nestedItemResourceImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + client := meta.(*clients.Client).KeyVault.VaultsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseKeyVaultChildID(d.Id()) + if err != nil { + return []*schema.ResourceData{d}, fmt.Errorf("parsing ID %q for Key Vault Child import: %v", d.Id(), err) + } + + keyVaultId, err := azure.GetKeyVaultIDFromBaseUrl(ctx, client, id.KeyVaultBaseUrl) + if err != nil { + return []*schema.ResourceData{d}, fmt.Errorf("retrieving the Resource ID the Key Vault at URL %q: %s", id.KeyVaultBaseUrl, err) + } + d.Set("key_vault_id", keyVaultId) + + return []*schema.ResourceData{d}, nil +} diff --git a/azurerm/internal/services/keyvault/key_vault_access_policy_data_source.go b/azurerm/internal/services/keyvault/key_vault_access_policy_data_source.go index 9dda28f2f937..b57b6f76920f 100644 --- a/azurerm/internal/services/keyvault/key_vault_access_policy_data_source.go +++ b/azurerm/internal/services/keyvault/key_vault_access_policy_data_source.go @@ -9,9 +9,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/validation" ) -func dataSourceArmKeyVaultAccessPolicy() *schema.Resource { +func dataSourceKeyVaultAccessPolicy() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKeyVaultAccessPolicyRead, + Read: dataSourceKeyVaultAccessPolicyRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -58,7 +58,7 @@ func dataSourceArmKeyVaultAccessPolicy() *schema.Resource { } } -func dataSourceArmKeyVaultAccessPolicyRead(d *schema.ResourceData, _ interface{}) error { +func dataSourceKeyVaultAccessPolicyRead(d *schema.ResourceData, _ interface{}) error { name := d.Get("name").(string) templateManagementPermissions := map[string][]string{ "key": { diff --git a/azurerm/internal/services/keyvault/key_vault_access_policy_data_source_test.go b/azurerm/internal/services/keyvault/key_vault_access_policy_data_source_test.go new file mode 100644 index 000000000000..14efad2b7c88 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_access_policy_data_source_test.go @@ -0,0 +1,130 @@ +package keyvault_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type KeyVaultAccessPolicyDataSource struct { +} + +func TestAccDataSourceKeyVaultAccessPolicy_key(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Key Management"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("key_permissions.#").HasValue("9"), + resource.TestCheckNoResourceAttr(data.ResourceName, "secret_permissions"), + resource.TestCheckNoResourceAttr(data.ResourceName, "certificate_permissions"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultAccessPolicy_secret(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Secret Management"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr(data.ResourceName, "key_permissions"), + check.That(data.ResourceName).Key("secret_permissions.#").HasValue("7"), + resource.TestCheckNoResourceAttr(data.ResourceName, "certificate_permissions"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultAccessPolicy_certificate(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Certificate Management"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr(data.ResourceName, "key_permissions"), + resource.TestCheckNoResourceAttr(data.ResourceName, "secret_permissions"), + check.That(data.ResourceName).Key("certificate_permissions.#").HasValue("12"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultAccessPolicy_keySecret(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Key & Secret Management"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("key_permissions.#").HasValue("9"), + check.That(data.ResourceName).Key("secret_permissions.#").HasValue("7"), + resource.TestCheckNoResourceAttr(data.ResourceName, "certificate_permissions"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultAccessPolicy_keyCertificate(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Key & Certificate Management"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("key_permissions.#").HasValue("9"), + resource.TestCheckNoResourceAttr(data.ResourceName, "secret_permissions"), + check.That(data.ResourceName).Key("certificate_permissions.#").HasValue("12"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultAccessPolicy_secretCertificate(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Secret & Certificate Management"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr(data.ResourceName, "key_permissions"), + check.That(data.ResourceName).Key("secret_permissions.#").HasValue("7"), + check.That(data.ResourceName).Key("certificate_permissions.#").HasValue("12"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultAccessPolicy_keySecretCertificate(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyDataSource{} + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.testAccDataSourceKeyVaultAccessPolicy("Key, Secret, & Certificate Management"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("key_permissions.#").HasValue("9"), + check.That(data.ResourceName).Key("secret_permissions.#").HasValue("7"), + check.That(data.ResourceName).Key("certificate_permissions.#").HasValue("12"), + ), + }, + }) +} + +func (r KeyVaultAccessPolicyDataSource) testAccDataSourceKeyVaultAccessPolicy(name string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_key_vault_access_policy" "test" { + name = "%s" +} +`, name) +} diff --git a/azurerm/internal/services/keyvault/key_vault_access_policy_resource.go b/azurerm/internal/services/keyvault/key_vault_access_policy_resource.go index 2cd4832533a9..24b6c3e5eaee 100644 --- a/azurerm/internal/services/keyvault/key_vault_access_policy_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_access_policy_resource.go @@ -1,12 +1,14 @@ package keyvault import ( + "context" "fmt" "log" "strings" "time" "github.com/Azure/azure-sdk-for-go/services/keyvault/mgmt/2019-09-01/keyvault" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" uuid "github.com/satori/go.uuid" @@ -18,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKeyVaultAccessPolicy() *schema.Resource { +func resourceKeyVaultAccessPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmKeyVaultAccessPolicyCreate, - Read: resourceArmKeyVaultAccessPolicyRead, - Update: resourceArmKeyVaultAccessPolicyUpdate, - Delete: resourceArmKeyVaultAccessPolicyDelete, + Create: resourceKeyVaultAccessPolicyCreate, + Read: resourceKeyVaultAccessPolicyRead, + Update: resourceKeyVaultAccessPolicyUpdate, + Delete: resourceKeyVaultAccessPolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -75,7 +77,7 @@ func resourceArmKeyVaultAccessPolicy() *schema.Resource { } } -func resourceArmKeyVaultAccessPolicyCreateOrDelete(d *schema.ResourceData, meta interface{}, action keyvault.AccessPolicyUpdateKind) error { +func resourceKeyVaultAccessPolicyCreateOrDelete(d *schema.ResourceData, meta interface{}, action keyvault.AccessPolicyUpdateKind) error { client := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -203,6 +205,28 @@ func resourceArmKeyVaultAccessPolicyCreateOrDelete(d *schema.ResourceData, meta if _, err = client.UpdateAccessPolicy(ctx, resourceGroup, vaultName, action, parameters); err != nil { return fmt.Errorf("Error updating Access Policy (Object ID %q / Application ID %q) for Key Vault %q (Resource Group %q): %+v", objectId, applicationIdRaw, vaultName, resourceGroup, err) } + stateConf := &resource.StateChangeConf{ + Pending: []string{"notfound", "vaultnotfound"}, + Target: []string{"found"}, + Refresh: accessPolicyRefreshFunc(ctx, client, resourceGroup, vaultName, objectId, applicationIdRaw), + Delay: 5 * time.Second, + ContinuousTargetOccurence: 3, + Timeout: d.Timeout(schema.TimeoutCreate), + } + + if action == keyvault.Remove { + stateConf.Target = []string{"notfound"} + stateConf.Pending = []string{"found", "vaultnotfound"} + stateConf.Timeout = d.Timeout(schema.TimeoutDelete) + } + + if action == keyvault.Replace { + stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("failed waiting for Key Vault Access Policy (Object ID: %q) to apply: %+v", objectId, err) + } read, err := client.Get(ctx, resourceGroup, vaultName) if err != nil { @@ -220,25 +244,24 @@ func resourceArmKeyVaultAccessPolicyCreateOrDelete(d *schema.ResourceData, meta return nil } -func resourceArmKeyVaultAccessPolicyCreate(d *schema.ResourceData, meta interface{}) error { - return resourceArmKeyVaultAccessPolicyCreateOrDelete(d, meta, keyvault.Add) +func resourceKeyVaultAccessPolicyCreate(d *schema.ResourceData, meta interface{}) error { + return resourceKeyVaultAccessPolicyCreateOrDelete(d, meta, keyvault.Add) } -func resourceArmKeyVaultAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { - return resourceArmKeyVaultAccessPolicyCreateOrDelete(d, meta, keyvault.Remove) +func resourceKeyVaultAccessPolicyDelete(d *schema.ResourceData, meta interface{}) error { + return resourceKeyVaultAccessPolicyCreateOrDelete(d, meta, keyvault.Remove) } -func resourceArmKeyVaultAccessPolicyUpdate(d *schema.ResourceData, meta interface{}) error { - return resourceArmKeyVaultAccessPolicyCreateOrDelete(d, meta, keyvault.Replace) +func resourceKeyVaultAccessPolicyUpdate(d *schema.ResourceData, meta interface{}) error { + return resourceKeyVaultAccessPolicyCreateOrDelete(d, meta, keyvault.Replace) } -func resourceArmKeyVaultAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultAccessPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { return err } @@ -258,10 +281,7 @@ func resourceArmKeyVaultAccessPolicyRead(d *schema.ResourceData, meta interface{ return fmt.Errorf("Error making Read request on Azure KeyVault %q (Resource Group %q): %+v", vaultName, resGroup, err) } - policy, err := FindKeyVaultAccessPolicy(resp.Properties.AccessPolicies, objectId, applicationId) - if err != nil { - return fmt.Errorf("Error locating Access Policy (Object ID %q / Application ID %q) in Key Vault %q (Resource Group %q)", objectId, applicationId, vaultName, resGroup) - } + policy := FindKeyVaultAccessPolicy(resp.Properties.AccessPolicies, objectId, applicationId) if policy == nil { log.Printf("[ERROR] Access Policy (Object ID %q / Application ID %q) was not found in Key Vault %q (Resource Group %q) - removing from state", objectId, applicationId, vaultName, resGroup) @@ -305,9 +325,9 @@ func resourceArmKeyVaultAccessPolicyRead(d *schema.ResourceData, meta interface{ return nil } -func FindKeyVaultAccessPolicy(policies *[]keyvault.AccessPolicyEntry, objectId string, applicationId string) (*keyvault.AccessPolicyEntry, error) { +func FindKeyVaultAccessPolicy(policies *[]keyvault.AccessPolicyEntry, objectId string, applicationId string) *keyvault.AccessPolicyEntry { if policies == nil { - return nil, nil + return nil } for _, policy := range *policies { @@ -319,11 +339,33 @@ func FindKeyVaultAccessPolicy(policies *[]keyvault.AccessPolicyEntry, objectId s } if strings.EqualFold(aid, applicationId) { - return &policy, nil + return &policy } } } } - return nil, nil + return nil +} + +func accessPolicyRefreshFunc(ctx context.Context, client *keyvault.VaultsClient, resourceGroup string, vaultName string, objectId string, applicationId string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + log.Printf("[DEBUG] Checking for completion of Access Policy create/update") + + read, err := client.Get(ctx, resourceGroup, vaultName) + if err != nil { + if utils.ResponseWasNotFound(read.Response) { + return "vaultnotfound", "vaultnotfound", fmt.Errorf("failed to find vault %q (resource group %q)", vaultName, resourceGroup) + } + } + + if read.Properties != nil && read.Properties.AccessPolicies != nil { + policy := FindKeyVaultAccessPolicy(read.Properties.AccessPolicies, objectId, applicationId) + if policy != nil { + return "found", "found", nil + } + } + + return "notfound", "notfound", nil + } } diff --git a/azurerm/internal/services/keyvault/key_vault_access_policy_resource_test.go b/azurerm/internal/services/keyvault/key_vault_access_policy_resource_test.go new file mode 100644 index 000000000000..35787e7fecff --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_access_policy_resource_test.go @@ -0,0 +1,354 @@ +package keyvault_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/keyvault" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KeyVaultAccessPolicyResource struct { +} + +func TestAccKeyVaultAccessPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.1").HasValue("set"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultAccessPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.1").HasValue("set"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_key_vault_access_policy"), + }, + }) +} + +func TestAccKeyVaultAccessPolicy_multiple(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test_with_application_id") + r := KeyVaultAccessPolicyResource{} + resourceName2 := "azurerm_key_vault_access_policy.test_no_application_id" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiple(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_permissions.0").HasValue("create"), + check.That(data.ResourceName).Key("key_permissions.1").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.1").HasValue("delete"), + check.That(data.ResourceName).Key("certificate_permissions.0").HasValue("create"), + check.That(data.ResourceName).Key("certificate_permissions.1").HasValue("delete"), + resource.TestCheckResourceAttr(resourceName2, "key_permissions.0", "list"), + resource.TestCheckResourceAttr(resourceName2, "key_permissions.1", "encrypt"), + resource.TestCheckResourceAttr(resourceName2, "secret_permissions.0", "list"), + resource.TestCheckResourceAttr(resourceName2, "secret_permissions.1", "delete"), + resource.TestCheckResourceAttr(resourceName2, "certificate_permissions.0", "list"), + resource.TestCheckResourceAttr(resourceName2, "certificate_permissions.1", "delete"), + ), + }, + data.ImportStep(), + { + ResourceName: resourceName2, + ImportState: true, + ImportStateVerify: true, + }, + }) +} + +func TestAccKeyVaultAccessPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("secret_permissions.1").HasValue("set"), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_permissions.0").HasValue("list"), + check.That(data.ResourceName).Key("key_permissions.1").HasValue("encrypt"), + ), + }, + }) +} + +func TestAccKeyVaultAccessPolicy_nonExistentVault(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") + r := KeyVaultAccessPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nonExistentVault(data), + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`Error retrieving Key Vault`), + }, + }) +} + +func (t KeyVaultAccessPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + objectId := id.Path["objectId"] + applicationId := id.Path["applicationId"] + + resp, err := clients.KeyVault.VaultsClient.Get(ctx, resGroup, vaultName) + if err != nil { + return nil, fmt.Errorf("reading Key Vault (%s): %+v", id, err) + } + + return utils.Bool(keyvault.FindKeyVaultAccessPolicy(resp.Properties.AccessPolicies, objectId, applicationId) != nil), nil +} + +func (r KeyVaultAccessPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + "set", + ] + + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id +} +`, r.template(data)) +} + +func (r KeyVaultAccessPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_access_policy" "import" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = azurerm_key_vault_access_policy.test.tenant_id + object_id = azurerm_key_vault_access_policy.test.object_id + + key_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + "set", + ] +} +`, r.basic(data)) +} + +func (r KeyVaultAccessPolicyResource) multiple(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_access_policy" "test_with_application_id" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "create", + "get", + ] + + secret_permissions = [ + "get", + "delete", + ] + + certificate_permissions = [ + "create", + "delete", + ] + + application_id = data.azurerm_client_config.current.client_id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id +} + +resource "azurerm_key_vault_access_policy" "test_no_application_id" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "list", + "encrypt", + ] + + secret_permissions = [ + "list", + "delete", + ] + + certificate_permissions = [ + "list", + "delete", + ] + + storage_permissions = [ + "backup", + "delete", + "deletesas", + "get", + "getsas", + "list", + "listsas", + "purge", + "recover", + "regeneratekey", + "restore", + "set", + "setsas", + "update", + ] + + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id +} +`, r.template(data)) +} + +func (r KeyVaultAccessPolicyResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "list", + "encrypt", + ] + + secret_permissions = [] + + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id +} +`, r.template(data)) +} + +func (KeyVaultAccessPolicyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "premium" + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KeyVaultAccessPolicyResource) nonExistentVault(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + tags = { + environment = "Production" + } +} + +resource "azurerm_key_vault_access_policy" "test" { + # Must appear to be URL, but not actually exist - appending a string works + key_vault_id = "${azurerm_key_vault.test.id}NOPE" + + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + ] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_data_source.go b/azurerm/internal/services/keyvault/key_vault_certificate_data_source.go index 64218ce1d77e..563675801ef0 100644 --- a/azurerm/internal/services/keyvault/key_vault_certificate_data_source.go +++ b/azurerm/internal/services/keyvault/key_vault_certificate_data_source.go @@ -17,9 +17,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmKeyVaultCertificate() *schema.Resource { +func dataSourceKeyVaultCertificate() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKeyVaultCertificateRead, + Read: dataSourceKeyVaultCertificateRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -220,7 +220,7 @@ func dataSourceArmKeyVaultCertificate() *schema.Resource { } } -func dataSourceArmKeyVaultCertificateRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceKeyVaultCertificateRead(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -288,6 +288,10 @@ func dataSourceArmKeyVaultCertificateRead(d *schema.ResourceData, meta interface } func flattenKeyVaultCertificatePolicyForDataSource(input *keyvault.CertificatePolicy) []interface{} { + if input == nil { + return []interface{}{} + } + policy := make(map[string]interface{}) if params := input.IssuerParameters; params != nil { diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_data_source_test.go b/azurerm/internal/services/keyvault/key_vault_certificate_data_source_test.go new file mode 100644 index 000000000000..20dce7ee0830 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_certificate_data_source_test.go @@ -0,0 +1,75 @@ +package keyvault_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type KeyVaultCertificateDataSource struct { +} + +func TestAccDataSourceKeyVaultCertificate_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("certificate_policy.0.key_properties.0.key_size").HasValue("2048"), + check.That(data.ResourceName).Key("certificate_policy.0.key_properties.0.key_type").HasValue("RSA"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultCertificate_generated(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.generated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("certificate_policy.0.issuer_parameters.0.name").HasValue("Self"), + check.That(data.ResourceName).Key("certificate_policy.0.key_properties.0.exportable").HasValue("true"), + check.That(data.ResourceName).Key("certificate_policy.0.key_properties.0.key_size").HasValue("2048"), + check.That(data.ResourceName).Key("certificate_policy.0.key_properties.0.key_type").HasValue("RSA"), + check.That(data.ResourceName).Key("certificate_policy.0.key_properties.0.reuse_key").HasValue("true"), + check.That(data.ResourceName).Key("certificate_policy.0.lifetime_action.0.action.0.action_type").HasValue("AutoRenew"), + check.That(data.ResourceName).Key("certificate_policy.0.lifetime_action.0.trigger.0.days_before_expiry").HasValue("30"), + check.That(data.ResourceName).Key("certificate_policy.0.secret_properties.0.content_type").HasValue("application/x-pkcs12"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.subject").HasValue("CN=hello-world"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.validity_in_months").HasValue("12"), + ), + }, + }) +} + +func (KeyVaultCertificateDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault_certificate" "test" { + name = azurerm_key_vault_certificate.test.name + key_vault_id = azurerm_key_vault.test.id +} +`, KeyVaultCertificateResource{}.basicImportPFX(data)) +} + +func (KeyVaultCertificateDataSource) generated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault_certificate" "test" { + name = azurerm_key_vault_certificate.test.name + key_vault_id = azurerm_key_vault.test.id +} +`, KeyVaultCertificateResource{}.basicGenerate(data)) +} diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source.go b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source.go index 885e623255c7..5de1b9866ebd 100644 --- a/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source.go +++ b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmKeyVaultCertificateIssuer() *schema.Resource { +func dataSourceKeyVaultCertificateIssuer() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKeyVaultCertificateIssuerRead, + Read: dataSourceKeyVaultCertificateIssuerRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -24,7 +24,6 @@ func dataSourceArmKeyVaultCertificateIssuer() *schema.Resource { "key_vault_id": { Type: schema.TypeString, Required: true, - ForceNew: true, ValidateFunc: azure.ValidateResourceID, }, @@ -77,7 +76,7 @@ func dataSourceArmKeyVaultCertificateIssuer() *schema.Resource { } } -func dataSourceArmKeyVaultCertificateIssuerRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceKeyVaultCertificateIssuerRead(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source_test.go b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source_test.go new file mode 100644 index 000000000000..90cc4bb21d80 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_data_source_test.go @@ -0,0 +1,44 @@ +package keyvault_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type KeyVaultCertificateIssuerDataSource struct { +} + +func TestAccDataSourceKeyVaultCertificateIssuer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("account_id").HasValue("test-account"), + check.That(data.ResourceName).Key("provider_name").HasValue("DigiCert"), + check.That(data.ResourceName).Key("org_id").HasValue("accTestOrg"), + check.That(data.ResourceName).Key("admin.0.email_address").HasValue("admin@contoso.com"), + check.That(data.ResourceName).Key("admin.0.first_name").HasValue("First"), + check.That(data.ResourceName).Key("admin.0.last_name").HasValue("Last"), + check.That(data.ResourceName).Key("admin.0.phone").HasValue("01234567890"), + ), + }, + }) +} + +func (KeyVaultCertificateIssuerDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault_certificate_issuer" "test" { + name = azurerm_key_vault_certificate_issuer.test.name + key_vault_id = azurerm_key_vault.test.id +} +`, KeyVaultCertificateIssuerResource{}.complete(data)) +} diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource.go b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource.go index a6f5e3b6d11e..dab10f1608c4 100644 --- a/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource.go @@ -17,14 +17,14 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKeyVaultCertificateIssuer() *schema.Resource { +func resourceKeyVaultCertificateIssuer() *schema.Resource { return &schema.Resource{ - Create: resourceArmKeyVaultCertificateIssuerCreateOrUpdate, - Update: resourceArmKeyVaultCertificateIssuerCreateOrUpdate, - Read: resourceArmKeyVaultCertificateIssuerRead, - Delete: resourceArmKeyVaultCertificateIssuerDelete, + Create: resourceKeyVaultCertificateIssuerCreateOrUpdate, + Update: resourceKeyVaultCertificateIssuerCreateOrUpdate, + Read: resourceKeyVaultCertificateIssuerRead, + Delete: resourceKeyVaultCertificateIssuerDelete, Importer: &schema.ResourceImporter{ - State: resourceArmKeyVaultChildResourceImporter, + State: nestedItemResourceImporter, }, Timeouts: &schema.ResourceTimeout{ @@ -106,7 +106,7 @@ func resourceArmKeyVaultCertificateIssuer() *schema.Resource { } } -func resourceArmKeyVaultCertificateIssuerCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCertificateIssuerCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -171,16 +171,16 @@ func resourceArmKeyVaultCertificateIssuerCreateOrUpdate(d *schema.ResourceData, } d.SetId(*resp.ID) - return resourceArmKeyVaultCertificateIssuerRead(d, meta) + return resourceKeyVaultCertificateIssuerRead(d, meta) } -func resourceArmKeyVaultCertificateIssuerRead(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCertificateIssuerRead(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KeyVaultCertificateIssuerId(d.Id()) + id, err := parse.IssuerID(d.Id()) if err != nil { return err } @@ -236,13 +236,13 @@ func resourceArmKeyVaultCertificateIssuerRead(d *schema.ResourceData, meta inter return nil } -func resourceArmKeyVaultCertificateIssuerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCertificateIssuerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.ManagementClient keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KeyVaultCertificateIssuerId(d.Id()) + id, err := parse.IssuerID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource_test.go b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource_test.go new file mode 100644 index 000000000000..186c6067af00 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_certificate_issuer_resource_test.go @@ -0,0 +1,317 @@ +package keyvault_test + +import ( + "context" + "fmt" + "log" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/keyvault/parse" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KeyVaultCertificateIssuerResource struct { +} + +func TestAccKeyVaultCertificateIssuer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("password"), + }) +} + +func TestAccKeyVaultCertificateIssuer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccKeyVaultCertificateIssuer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("password"), + }) +} + +func TestAccKeyVaultCertificateIssuer_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("password"), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("password"), + }) +} + +func TestAccKeyVaultCertificateIssuer_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckKeyVaultCertificateIssuerDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultCertificateIssuer_disappearsWhenParentKeyVaultDeleted(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") + r := KeyVaultCertificateIssuerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckKeyVaultCertificateIssuerDisappears("azurerm_key_vault.test"), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func (t KeyVaultCertificateIssuerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + client := clients.KeyVault.ManagementClient + keyVaultClient := clients.KeyVault.VaultsClient + + id, err := parse.IssuerID(state.ID) + if err != nil { + return nil, err + } + + keyVaultId, err := azure.GetKeyVaultIDFromBaseUrl(ctx, keyVaultClient, id.KeyVaultBaseUrl) + if err != nil || keyVaultId == nil { + return nil, fmt.Errorf("retrieving the Resource ID the Key Vault at URL %q: %s", id.KeyVaultBaseUrl, err) + } + + ok, err := azure.KeyVaultExists(ctx, keyVaultClient, *keyVaultId) + if err != nil || !ok { + return nil, fmt.Errorf("checking if key vault %q for Certificate %q in Vault at url %q exists: %v", *keyVaultId, id.Name, id.KeyVaultBaseUrl, err) + } + + resp, err := client.GetCertificateIssuer(ctx, id.KeyVaultBaseUrl, id.Name) + if err != nil { + return nil, fmt.Errorf("failed to make Read request on Azure KeyVault Certificate Issuer %s: %+v", id.Name, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckKeyVaultCertificateIssuerDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient + vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + name := rs.Primary.Attributes["name"] + keyVaultId := rs.Primary.Attributes["key_vault_id"] + vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) + if err != nil { + return fmt.Errorf("failed to look up base URI from id %q: %+v", keyVaultId, err) + } + + ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) + if err != nil { + return fmt.Errorf("failed to check if key vault %q for Certificate Issuer %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) + } + if !ok { + log.Printf("[DEBUG] Certificate Issuer %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) + return nil + } + + resp, err := client.DeleteCertificateIssuer(ctx, vaultBaseUrl, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil + } + + return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) + } + + return nil + } +} + +func (KeyVaultCertificateIssuerResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "delete", + "import", + "get", + "manageissuers", + "setissuers", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } +} + +resource "azurerm_key_vault_certificate_issuer" "test" { + name = "acctestKVCI-%d" + key_vault_id = azurerm_key_vault.test.id + provider_name = "OneCertV2-PrivateCA" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (r KeyVaultCertificateIssuerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_certificate_issuer" "import" { + name = azurerm_key_vault_certificate_issuer.test.name + key_vault_id = azurerm_key_vault_certificate_issuer.test.key_vault_id + org_id = azurerm_key_vault_certificate_issuer.test.org_id + account_id = azurerm_key_vault_certificate_issuer.test.account_id + password = "test" + provider_name = azurerm_key_vault_certificate_issuer.test.provider_name +} + +`, r.basic(data)) +} + +func (KeyVaultCertificateIssuerResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "delete", + "import", + "get", + "manageissuers", + "setissuers", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } +} + +resource "azurerm_key_vault_certificate_issuer" "test" { + name = "acctestKVCI-%d" + key_vault_id = azurerm_key_vault.test.id + account_id = "test-account" + password = "test" + provider_name = "DigiCert" + + org_id = "accTestOrg" + admin { + email_address = "admin@contoso.com" + first_name = "First" + last_name = "Last" + phone = "01234567890" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_resource.go b/azurerm/internal/services/keyvault/key_vault_certificate_resource.go index 42041d89738a..ab8086e1c982 100644 --- a/azurerm/internal/services/keyvault/key_vault_certificate_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_certificate_resource.go @@ -2,6 +2,7 @@ package keyvault import ( "context" + "crypto/x509" "encoding/base64" "encoding/hex" "fmt" @@ -11,6 +12,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" + "github.com/Azure/go-autorest/autorest" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -23,36 +25,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -// todo refactor and find a home for this wayward func -func resourceArmKeyVaultChildResourceImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - client := meta.(*clients.Client).KeyVault.VaultsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseKeyVaultChildID(d.Id()) - if err != nil { - return []*schema.ResourceData{d}, fmt.Errorf("Error Unable to parse ID (%s) for Key Vault Child import: %v", d.Id(), err) - } - - kvid, err := azure.GetKeyVaultIDFromBaseUrl(ctx, client, id.KeyVaultBaseUrl) - if err != nil { - return []*schema.ResourceData{d}, fmt.Errorf("Error retrieving the Resource ID the Key Vault at URL %q: %s", id.KeyVaultBaseUrl, err) - } - - d.Set("key_vault_id", kvid) - - return []*schema.ResourceData{d}, nil -} - -func resourceArmKeyVaultCertificate() *schema.Resource { +func resourceKeyVaultCertificate() *schema.Resource { return &schema.Resource{ // TODO: support Updating once we have more information about what can be updated - Create: resourceArmKeyVaultCertificateCreate, - Read: resourceArmKeyVaultCertificateRead, - Delete: resourceArmKeyVaultCertificateDelete, + Create: resourceKeyVaultCertificateCreate, + Read: resourceKeyVaultCertificateRead, + Delete: resourceKeyVaultCertificateDelete, Importer: &schema.ResourceImporter{ - State: resourceArmKeyVaultChildResourceImporter, + State: nestedItemResourceImporter, }, Timeouts: &schema.ResourceTimeout{ @@ -137,6 +118,7 @@ func resourceArmKeyVaultCertificate() *schema.Resource { ForceNew: true, ValidateFunc: validation.IntInSlice([]int{ 2048, + 3072, 4096, }), }, @@ -233,6 +215,7 @@ func resourceArmKeyVaultCertificate() *schema.Resource { "key_usage": { Type: schema.TypeList, Required: true, + ForceNew: true, Elem: &schema.Schema{ Type: schema.TypeString, ValidateFunc: validation.StringInSlice([]string{ @@ -367,7 +350,7 @@ func resourceArmKeyVaultCertificate() *schema.Resource { } } -func resourceArmKeyVaultCertificateCreate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCertificateCreate(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -469,7 +452,7 @@ func resourceArmKeyVaultCertificateCreate(d *schema.ResourceData, meta interface d.SetId(*resp.ID) - return resourceArmKeyVaultCertificateRead(d, meta) + return resourceKeyVaultCertificateRead(d, meta) } func keyVaultCertificateCreationRefreshFunc(ctx context.Context, client *keyvault.BaseClient, keyVaultBaseUrl string, name string) resource.StateRefreshFunc { @@ -494,7 +477,7 @@ func keyVaultCertificateCreationRefreshFunc(ctx context.Context, client *keyvaul } } -func resourceArmKeyVaultCertificateRead(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCertificateRead(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -538,7 +521,7 @@ func resourceArmKeyVaultCertificateRead(d *schema.ResourceData, meta interface{} d.Set("name", id.Name) - certificatePolicy := flattenKeyVaultCertificatePolicy(cert.Policy) + certificatePolicy := flattenKeyVaultCertificatePolicy(cert.Policy, cert.Cer) if err := d.Set("certificate_policy", certificatePolicy); err != nil { return fmt.Errorf("Error setting Key Vault Certificate Policy: %+v", err) } @@ -571,7 +554,7 @@ func resourceArmKeyVaultCertificateRead(d *schema.ResourceData, meta interface{} return tags.FlattenAndSet(d, cert.Tags) } -func resourceArmKeyVaultCertificateDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCertificateDelete(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) @@ -600,18 +583,47 @@ func resourceArmKeyVaultCertificateDelete(d *schema.ResourceData, meta interface return nil } - resp, err := client.DeleteCertificate(ctx, id.KeyVaultBaseUrl, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Error deleting Certificate %q from Key Vault: %+v", id.Name, err) + shouldPurge := meta.(*clients.Client).Features.KeyVault.PurgeSoftDeleteOnDestroy + description := fmt.Sprintf("Certificate %q (Key Vault %q)", id.Name, id.KeyVaultBaseUrl) + deleter := deleteAndPurgeCertificate{ + client: client, + keyVaultUri: id.KeyVaultBaseUrl, + name: id.Name, + } + if err := deleteAndOptionallyPurge(ctx, description, shouldPurge, deleter); err != nil { + return err } return nil } +var _ deleteAndPurgeNestedItem = deleteAndPurgeCertificate{} + +type deleteAndPurgeCertificate struct { + client *keyvault.BaseClient + keyVaultUri string + name string +} + +func (d deleteAndPurgeCertificate) DeleteNestedItem(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.DeleteCertificate(ctx, d.keyVaultUri, d.name) + return resp.Response, err +} + +func (d deleteAndPurgeCertificate) NestedItemHasBeenDeleted(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.GetCertificate(ctx, d.keyVaultUri, d.name, "") + return resp.Response, err +} + +func (d deleteAndPurgeCertificate) PurgeNestedItem(ctx context.Context) (autorest.Response, error) { + return d.client.PurgeDeletedCertificate(ctx, d.keyVaultUri, d.name) +} + +func (d deleteAndPurgeCertificate) NestedItemHasBeenPurged(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.GetDeletedCertificate(ctx, d.keyVaultUri, d.name) + return resp.Response, err +} + func expandKeyVaultCertificatePolicy(d *schema.ResourceData) keyvault.CertificatePolicy { policies := d.Get("certificate_policy").([]interface{}) policyRaw := policies[0].(map[string]interface{}) @@ -721,7 +733,11 @@ func expandKeyVaultCertificatePolicy(d *schema.ResourceData) keyvault.Certificat return policy } -func flattenKeyVaultCertificatePolicy(input *keyvault.CertificatePolicy) []interface{} { +func flattenKeyVaultCertificatePolicy(input *keyvault.CertificatePolicy, certData *[]byte) []interface{} { + if input == nil { + return []interface{}{} + } + policy := make(map[string]interface{}) if params := input.IssuerParameters; params != nil { @@ -800,6 +816,17 @@ func flattenKeyVaultCertificatePolicy(input *keyvault.CertificatePolicy) []inter } sanOutputs = append(sanOutputs, sanOutput) + } else if certData != nil && len(*certData) > 0 { + sanOutput := make(map[string]interface{}) + cert, err := x509.ParseCertificate(*certData) + if err != nil { + log.Printf("[DEBUG] Unable to read certificate data: %v", err) + } else { + sanOutput["emails"] = set.FromStringSlice(cert.EmailAddresses) + sanOutput["dns_names"] = set.FromStringSlice(cert.DNSNames) + sanOutput["upns"] = set.FromStringSlice([]string{}) + sanOutputs = append(sanOutputs, sanOutput) + } } certProps["key_usage"] = usages diff --git a/azurerm/internal/services/keyvault/key_vault_certificate_resource_test.go b/azurerm/internal/services/keyvault/key_vault_certificate_resource_test.go new file mode 100644 index 000000000000..cbd5fc11a33f --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_certificate_resource_test.go @@ -0,0 +1,1040 @@ +package keyvault_test + +import ( + "context" + "fmt" + "log" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KeyVaultCertificateResource struct { +} + +func TestAccKeyVaultCertificate_basicImportPFX(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicImportPFX(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("certificate_data").Exists(), + ), + }, + data.ImportStep("certificate"), + }) +} + +func TestAccKeyVaultCertificate_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicImportPFX(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("certificate_data").Exists(), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_key_vault_certificate"), + }, + }) +} + +func TestAccKeyVaultCertificate_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicGenerate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckKeyVaultCertificateDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultCertificate_disappearsWhenParentKeyVaultDeleted(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicGenerate(data), + Check: resource.ComposeTestCheckFunc( + testCheckKeyVaultDisappears("azurerm_key_vault.test"), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultCertificate_basicGenerate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicGenerate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("secret_id").Exists(), + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("thumbprint").Exists(), + check.That(data.ResourceName).Key("certificate_attribute.0.created").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultCertificate_basicGenerateUnknownIssuer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicGenerateUnknownIssuer(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultCertificate_softDeleteRecovery(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDeleteRecovery(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("secret_id").Exists(), + check.That(data.ResourceName).Key("certificate_data").Exists(), + ), + }, + { + Config: r.softDeleteRecovery(data, false), + Destroy: true, + }, + { + Config: r.softDeleteRecovery(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("secret_id").Exists(), + check.That(data.ResourceName).Key("certificate_data").Exists(), + ), + }, + }) +} + +func TestAccKeyVaultCertificate_basicGenerateSans(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicGenerateSans(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.subject_alternative_names.0.emails.#").HasValue("1"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.subject_alternative_names.0.dns_names.#").HasValue("1"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.subject_alternative_names.0.upns.#").HasValue("1"), + ), + }, + }) +} + +func TestAccKeyVaultCertificate_basicGenerateTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicGenerateTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultCertificate_basicExtendedKeyUsage(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicExtendedKeyUsage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.extended_key_usage.#").HasValue("3"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.extended_key_usage.0").HasValue("1.3.6.1.5.5.7.3.1"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.extended_key_usage.1").HasValue("1.3.6.1.5.5.7.3.2"), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.extended_key_usage.2").HasValue("1.3.6.1.4.1.311.21.10"), + ), + }, + }) +} + +func TestAccKeyVaultCertificate_emptyExtendedKeyUsage(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.emptyExtendedKeyUsage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("certificate_data").Exists(), + check.That(data.ResourceName).Key("certificate_policy.0.x509_certificate_properties.0.extended_key_usage.#").HasValue("0"), + ), + }, + }) +} + +func TestAccKeyVaultCertificate_withExternalAccessPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") + r := KeyVaultCertificateResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withExternalAccessPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withExternalAccessPolicyUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t KeyVaultCertificateResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + keyVaultClient := clients.KeyVault.VaultsClient + client := clients.KeyVault.ManagementClient + + id, err := azure.ParseKeyVaultChildID(state.ID) + if err != nil { + return nil, err + } + + keyVaultId, err := azure.GetKeyVaultIDFromBaseUrl(ctx, keyVaultClient, id.KeyVaultBaseUrl) + if err != nil || keyVaultId == nil { + return nil, fmt.Errorf("retrieving the Resource ID the Key Vault at URL %q: %s", id.KeyVaultBaseUrl, err) + } + + ok, err := azure.KeyVaultExists(ctx, keyVaultClient, *keyVaultId) + if err != nil || !ok { + return nil, fmt.Errorf("checking if key vault %q for Certificate %q in Vault at url %q exists: %v", *keyVaultId, id.Name, id.KeyVaultBaseUrl, err) + } + + cert, err := client.GetCertificate(ctx, id.KeyVaultBaseUrl, id.Name, "") + if err != nil { + return nil, fmt.Errorf("reading Key Vault Certificate: %+v", err) + } + + return utils.Bool(cert.ID != nil), nil +} + +func testCheckKeyVaultCertificateDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient + vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + name := rs.Primary.Attributes["name"] + keyVaultId := rs.Primary.Attributes["key_vault_id"] + vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) + } + + ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error checking if key vault %q for Certificate %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) + } + if !ok { + log.Printf("[DEBUG] Certificate %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) + return nil + } + + resp, err := client.DeleteCertificate(ctx, vaultBaseUrl, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil + } + + return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) + } + + return nil + } +} + +func (r KeyVaultCertificateResource) basicImportPFX(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate { + contents = filebase64("testdata/keyvaultcert.pfx") + password = "" + } + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = false + } + + secret_properties { + content_type = "application/x-pkcs12" + } + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_certificate" "import" { + name = azurerm_key_vault_certificate.test.name + key_vault_id = azurerm_key_vault.test.id + + certificate { + contents = filebase64("testdata/keyvaultcert.pfx") + password = "" + } + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = false + } + + secret_properties { + content_type = "application/x-pkcs12" + } + } +} +`, r.basicImportPFX(data)) +} + +func (r KeyVaultCertificateResource) basicGenerate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) basicGenerateUnknownIssuer(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Unknown" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "EmailContacts" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) basicGenerateSans(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + + subject_alternative_names { + emails = ["mary@stu.co.uk"] + dns_names = ["internal.contoso.com"] + upns = ["john@doe.com"] + } + + validity_in_months = 12 + } + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) basicGenerateTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } + + tags = { + "hello" = "world" + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) basicExtendedKeyUsage(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + extended_key_usage = [ + "1.3.6.1.5.5.7.3.1", # Server Authentication + "1.3.6.1.5.5.7.3.2", # Client Authentication + "1.3.6.1.4.1.311.21.10", # Application Policies + ] + + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) emptyExtendedKeyUsage(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + extended_key_usage = [] + + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultCertificateResource) softDeleteRecovery(data acceptance.TestData, purge bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = "%t" + recover_soft_deleted_key_vaults = true + } + } +} + +%s + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } +} +`, purge, r.template(data), data.RandomString) +} + +func (KeyVaultCertificateResource) withExternalAccessPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkeyvault%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" + soft_delete_enabled = true + soft_delete_retention_days = 7 +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + certificate_permissions = [ + "create", + "delete", + "get", + "purge", + "recover", + "update", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + + storage_permissions = [ + "set", + ] +} + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } + depends_on = [azurerm_key_vault_access_policy.test] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} + +func (KeyVaultCertificateResource) withExternalAccessPolicyUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkeyvault%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" + soft_delete_enabled = true + soft_delete_retention_days = 7 +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + certificate_permissions = [ + "backup", + "create", + "delete", + "get", + "recover", + "purge", + "update", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + + storage_permissions = [ + "set", + ] +} + +resource "azurerm_key_vault_certificate" "test" { + name = "acctestcert%s" + key_vault_id = azurerm_key_vault.test.id + + certificate_policy { + issuer_parameters { + name = "Self" + } + + key_properties { + exportable = true + key_size = 2048 + key_type = "RSA" + reuse_key = true + } + + lifetime_action { + action { + action_type = "AutoRenew" + } + + trigger { + days_before_expiry = 30 + } + } + + secret_properties { + content_type = "application/x-pkcs12" + } + + x509_certificate_properties { + key_usage = [ + "cRLSign", + "dataEncipherment", + "digitalSignature", + "keyAgreement", + "keyCertSign", + "keyEncipherment", + ] + + subject = "CN=hello-world" + validity_in_months = 12 + } + } + depends_on = [azurerm_key_vault_access_policy.test] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} + +func (KeyVaultCertificateResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkeyvault%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "create", + "delete", + "get", + "import", + "purge", + "recover", + "update", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + + storage_permissions = [ + "set", + ] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/keyvault/key_vault_data_source.go b/azurerm/internal/services/keyvault/key_vault_data_source.go index 864b76f7af35..671369c99f2d 100644 --- a/azurerm/internal/services/keyvault/key_vault_data_source.go +++ b/azurerm/internal/services/keyvault/key_vault_data_source.go @@ -15,9 +15,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmKeyVault() *schema.Resource { +func dataSourceKeyVault() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKeyVaultRead, + Read: dataSourceKeyVaultRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -157,7 +157,7 @@ func dataSourceArmKeyVault() *schema.Resource { } } -func dataSourceArmKeyVaultRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceKeyVaultRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/keyvault/key_vault_data_source_test.go b/azurerm/internal/services/keyvault/key_vault_data_source_test.go new file mode 100644 index 000000000000..c68d5739bacb --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_data_source_test.go @@ -0,0 +1,137 @@ +package keyvault_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type KeyVaultDataSource struct { +} + +func TestAccDataSourceKeyVault_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") + r := KeyVaultDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tenant_id").Exists(), + check.That(data.ResourceName).Key("sku_name").Exists(), + check.That(data.ResourceName).Key("access_policy.0.tenant_id").Exists(), + check.That(data.ResourceName).Key("access_policy.0.object_id").Exists(), + check.That(data.ResourceName).Key("access_policy.0.key_permissions.0").HasValue("create"), + check.That(data.ResourceName).Key("access_policy.0.secret_permissions.0").HasValue("set"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func TestAccDataSourceKeyVault_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") + r := KeyVaultDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tenant_id").Exists(), + check.That(data.ResourceName).Key("sku_name").Exists(), + check.That(data.ResourceName).Key("access_policy.0.tenant_id").Exists(), + check.That(data.ResourceName).Key("access_policy.0.object_id").Exists(), + check.That(data.ResourceName).Key("access_policy.0.key_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("access_policy.0.secret_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Production"), + ), + }, + }) +} + +func TestAccDataSourceKeyVault_networkAcls(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") + r := KeyVaultDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.networkAcls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tenant_id").Exists(), + check.That(data.ResourceName).Key("sku_name").Exists(), + check.That(data.ResourceName).Key("access_policy.0.tenant_id").Exists(), + check.That(data.ResourceName).Key("access_policy.0.object_id").Exists(), + check.That(data.ResourceName).Key("access_policy.0.key_permissions.0").HasValue("create"), + check.That(data.ResourceName).Key("access_policy.0.secret_permissions.0").HasValue("set"), + check.That(data.ResourceName).Key("network_acls.#").HasValue("1"), + check.That(data.ResourceName).Key("network_acls.0.default_action").HasValue("Allow"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func TestAccDataSourceKeyVault_softDelete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") + r := KeyVaultDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.enableSoftDelete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + check.That(data.ResourceName).Key("sku_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func (KeyVaultDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault" "test" { + name = azurerm_key_vault.test.name + resource_group_name = azurerm_key_vault.test.resource_group_name +} +`, KeyVaultResource{}.basic(data)) +} + +func (KeyVaultDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault" "test" { + name = azurerm_key_vault.test.name + resource_group_name = azurerm_key_vault.test.resource_group_name +} +`, KeyVaultResource{}.complete(data)) +} + +func (KeyVaultDataSource) networkAcls(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault" "test" { + name = azurerm_key_vault.test.name + resource_group_name = azurerm_key_vault.test.resource_group_name +} +`, KeyVaultResource{}.networkAclsUpdated(data)) +} + +func (KeyVaultDataSource) enableSoftDelete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault" "test" { + name = azurerm_key_vault.test.name + resource_group_name = azurerm_key_vault.test.resource_group_name +} +`, KeyVaultResource{}.softDelete(data, true)) +} diff --git a/azurerm/internal/services/keyvault/key_vault_key_data_source.go b/azurerm/internal/services/keyvault/key_vault_key_data_source.go index 3b1277b7d91f..8f8deb8a6033 100644 --- a/azurerm/internal/services/keyvault/key_vault_key_data_source.go +++ b/azurerm/internal/services/keyvault/key_vault_key_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmKeyVaultKey() *schema.Resource { +func dataSourceKeyVaultKey() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKeyVaultKeyRead, + Read: dataSourceKeyVaultKeyRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -71,7 +71,7 @@ func dataSourceArmKeyVaultKey() *schema.Resource { } } -func dataSourceArmKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/keyvault/key_vault_key_data_source_test.go b/azurerm/internal/services/keyvault/key_vault_key_data_source_test.go new file mode 100644 index 000000000000..7e749f5d95c6 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_key_data_source_test.go @@ -0,0 +1,40 @@ +package keyvault_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type KeyVaultKeyDataSource struct { +} + +func TestAccDataSourceKeyVaultKey_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_key", "test") + r := KeyVaultKeyDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("key_type").HasValue("RSA"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + }) +} + +func (KeyVaultKeyDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault_key" "test" { + name = azurerm_key_vault_key.test.name + key_vault_id = azurerm_key_vault.test.id +} +`, KeyVaultKeyResource{}.complete(data)) +} diff --git a/azurerm/internal/services/keyvault/key_vault_key_resource.go b/azurerm/internal/services/keyvault/key_vault_key_resource.go index 690844ddad4f..2a0e508fb74e 100644 --- a/azurerm/internal/services/keyvault/key_vault_key_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_key_resource.go @@ -1,12 +1,14 @@ package keyvault import ( + "context" "encoding/base64" "fmt" "log" "time" "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" + "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/date" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" @@ -19,14 +21,14 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKeyVaultKey() *schema.Resource { +func resourceKeyVaultKey() *schema.Resource { return &schema.Resource{ - Create: resourceArmKeyVaultKeyCreate, - Read: resourceArmKeyVaultKeyRead, - Update: resourceArmKeyVaultKeyUpdate, - Delete: resourceArmKeyVaultKeyDelete, + Create: resourceKeyVaultKeyCreate, + Read: resourceKeyVaultKeyRead, + Update: resourceKeyVaultKeyUpdate, + Delete: resourceKeyVaultKeyDelete, Importer: &schema.ResourceImporter{ - State: resourceArmKeyVaultChildResourceImporter, + State: nestedItemResourceImporter, }, Timeouts: &schema.ResourceTimeout{ @@ -152,7 +154,7 @@ func resourceArmKeyVaultKey() *schema.Resource { } } -func resourceArmKeyVaultKeyCreate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultKeyCreate(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -255,10 +257,10 @@ func resourceArmKeyVaultKeyCreate(d *schema.ResourceData, meta interface{}) erro d.SetId(*read.Key.Kid) - return resourceArmKeyVaultKeyRead(d, meta) + return resourceKeyVaultKeyRead(d, meta) } -func resourceArmKeyVaultKeyUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultKeyUpdate(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) @@ -314,10 +316,10 @@ func resourceArmKeyVaultKeyUpdate(d *schema.ResourceData, meta interface{}) erro return err } - return resourceArmKeyVaultKeyRead(d, meta) + return resourceKeyVaultKeyRead(d, meta) } -func resourceArmKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -400,7 +402,7 @@ func resourceArmKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmKeyVaultKeyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultKeyDelete(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) @@ -429,8 +431,45 @@ func resourceArmKeyVaultKeyDelete(d *schema.ResourceData, meta interface{}) erro return nil } - _, err = client.DeleteKey(ctx, id.KeyVaultBaseUrl, id.Name) - return err + shouldPurge := meta.(*clients.Client).Features.KeyVault.PurgeSoftDeleteOnDestroy + description := fmt.Sprintf("Key %q (Key Vault %q)", id.Name, id.KeyVaultBaseUrl) + deleter := deleteAndPurgeKey{ + client: client, + keyVaultUri: id.KeyVaultBaseUrl, + name: id.Name, + } + if err := deleteAndOptionallyPurge(ctx, description, shouldPurge, deleter); err != nil { + return err + } + + return nil +} + +var _ deleteAndPurgeNestedItem = deleteAndPurgeKey{} + +type deleteAndPurgeKey struct { + client *keyvault.BaseClient + keyVaultUri string + name string +} + +func (d deleteAndPurgeKey) DeleteNestedItem(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.DeleteKey(ctx, d.keyVaultUri, d.name) + return resp.Response, err +} + +func (d deleteAndPurgeKey) NestedItemHasBeenDeleted(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.GetKey(ctx, d.keyVaultUri, d.name, "") + return resp.Response, err +} + +func (d deleteAndPurgeKey) PurgeNestedItem(ctx context.Context) (autorest.Response, error) { + return d.client.PurgeDeletedKey(ctx, d.keyVaultUri, d.name) +} + +func (d deleteAndPurgeKey) NestedItemHasBeenPurged(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.GetDeletedKey(ctx, d.keyVaultUri, d.name) + return resp.Response, err } func expandKeyVaultKeyOptions(d *schema.ResourceData) *[]keyvault.JSONWebKeyOperation { diff --git a/azurerm/internal/services/keyvault/key_vault_key_resource_test.go b/azurerm/internal/services/keyvault/key_vault_key_resource_test.go new file mode 100644 index 000000000000..a35343e8b532 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_key_resource_test.go @@ -0,0 +1,825 @@ +package keyvault_test + +import ( + "context" + "fmt" + "log" + "testing" + "time" + + "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KeyVaultKeyResource struct { +} + +func TestAccKeyVaultKey_basicEC(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEC(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("key_size"), + }) +} + +func TestAccKeyVaultKey_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEC(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_key_vault_key"), + }, + }) +} + +func TestAccKeyVaultKey_basicECHSM(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicECHSM(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccKeyVaultKey_curveEC(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.curveEC(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultKey_basicRSA(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicRSA(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("key_size"), + }) +} + +func TestAccKeyVaultKey_basicRSAHSM(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicRSAHSM(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("key_size"), + }) +} + +func TestAccKeyVaultKey_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("not_before_date").HasValue("2020-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("expiration_date").HasValue("2021-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + data.ImportStep("key_size"), + }) +} + +func TestAccKeyVaultKey_softDeleteRecovery(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDeleteRecovery(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("not_before_date").HasValue("2020-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("expiration_date").HasValue("2021-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + data.ImportStep("key_size"), + { + Config: r.softDeleteRecovery(data, false), + Destroy: true, + }, + { + Config: r.softDeleteRecovery(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("not_before_date").HasValue("2020-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("expiration_date").HasValue("2021-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + }) +} + +func TestAccKeyVaultKey_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicRSA(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_opts.#").HasValue("6"), + check.That(data.ResourceName).Key("key_opts.0").HasValue("decrypt"), + ), + }, + { + Config: r.basicUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_opts.#").HasValue("5"), + check.That(data.ResourceName).Key("key_opts.0").HasValue("encrypt"), + ), + }, + }) +} + +func TestAccKeyVaultKey_updatedExternally(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEC(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + updateExpiryDateForKeyVaultKey(data.ResourceName, "2029-02-02T12:59:00Z"), + ), + ExpectNonEmptyPlan: true, + }, + { + Config: r.basicECUpdatedExternally(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.basicECUpdatedExternally(data), + PlanOnly: true, + }, + data.ImportStep("key_size"), + }) +} + +func TestAccKeyVaultKey_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEC(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckKeyVaultKeyDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultKey_disappearsWhenParentKeyVaultDeleted(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicEC(data), + Check: resource.ComposeTestCheckFunc( + testCheckKeyVaultDisappears("azurerm_key_vault.test"), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultKey_withExternalAccessPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") + r := KeyVaultKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withExternalAccessPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("key_size"), + { + Config: r.withExternalAccessPolicyUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("key_size"), + }) +} + +func (t KeyVaultKeyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + client := clients.KeyVault.ManagementClient + keyVaultClient := clients.KeyVault.VaultsClient + + id, err := azure.ParseKeyVaultChildID(state.ID) + if err != nil { + return nil, err + } + + keyVaultId, err := azure.GetKeyVaultIDFromBaseUrl(ctx, keyVaultClient, id.KeyVaultBaseUrl) + if err != nil || keyVaultId == nil { + return nil, fmt.Errorf("retrieving the Resource ID the Key Vault at URL %q: %s", id.KeyVaultBaseUrl, err) + } + + ok, err := azure.KeyVaultExists(ctx, keyVaultClient, *keyVaultId) + if err != nil || !ok { + return nil, fmt.Errorf("checking if key vault %q for Certificate %q in Vault at url %q exists: %v", *keyVaultId, id.Name, id.KeyVaultBaseUrl, err) + } + + resp, err := client.GetKey(ctx, id.KeyVaultBaseUrl, id.Name, "") + if err != nil { + return nil, fmt.Errorf("retrieving Key Vault Key %q: %+v", state.ID, err) + } + + return utils.Bool(resp.Key != nil), nil +} + +func updateExpiryDateForKeyVaultKey(resourceName string, expiryDate string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient + vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + name := rs.Primary.Attributes["name"] + keyVaultId := rs.Primary.Attributes["key_vault_id"] + vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) + } + + ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error checking if key vault %q for Key %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) + } + if !ok { + log.Printf("[DEBUG] Key %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) + return nil + } + + expirationDate, err := time.Parse(time.RFC3339, expiryDate) + if err != nil { + return err + } + expirationUnixTime := date.UnixTime(expirationDate) + update := keyvault.KeyUpdateParameters{ + KeyAttributes: &keyvault.KeyAttributes{ + Expires: &expirationUnixTime, + }, + } + if _, err = client.UpdateKey(ctx, vaultBaseUrl, name, "", update); err != nil { + return fmt.Errorf("updating secret: %+v", err) + } + + resp, err := client.GetKey(ctx, vaultBaseUrl, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Key Vault Key %q (resource group: %q) does not exist", name, vaultBaseUrl) + } + + return fmt.Errorf("Bad: Get on keyVaultManagementClient: %+v", err) + } + + return nil + } +} + +func testCheckKeyVaultKeyDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient + vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + keyVaultId := rs.Primary.Attributes["key_vault_id"] + vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) + } + + ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error checking if key vault %q for Key %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) + } + if !ok { + log.Printf("[DEBUG] Key %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) + return nil + } + + resp, err := client.DeleteKey(ctx, vaultBaseUrl, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil + } + + return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) + } + + return nil + } +} + +func (r KeyVaultKeyResource) basicEC(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "EC" + key_size = 2048 + + key_opts = [ + "sign", + "verify", + ] +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) basicECUpdatedExternally(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "EC" + key_size = 2048 + expiration_date = "2029-02-02T12:59:00Z" + + key_opts = [ + "sign", + "verify", + ] + + tags = { + Rick = "Morty" + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_key" "import" { + name = azurerm_key_vault_key.test.name + key_vault_id = azurerm_key_vault.test.id + key_type = "EC" + key_size = 2048 + + key_opts = [ + "sign", + "verify", + ] +} +`, r.basicEC(data)) +} + +func (r KeyVaultKeyResource) basicRSA(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) basicRSAHSM(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA-HSM" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + not_before_date = "2020-01-01T01:02:03Z" + expiration_date = "2021-01-01T01:02:03Z" + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] + + tags = { + "hello" = "world" + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) basicUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) curveEC(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "EC" + curve = "P-521" + + key_opts = [ + "sign", + "verify", + ] +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) basicECHSM(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "EC-HSM" + curve = "P-521" + + key_opts = [ + "sign", + "verify", + ] +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultKeyResource) softDeleteRecovery(data acceptance.TestData, purge bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = "%t" + recover_soft_deleted_key_vaults = true + } + } +} + +%s + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + not_before_date = "2020-01-01T01:02:03Z" + expiration_date = "2021-01-01T01:02:03Z" + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] + + tags = { + "hello" = "world" + } +} +`, purge, r.template(data), data.RandomString) +} + +func (KeyVaultKeyResource) withExternalAccessPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + tags = { + environment = "accTest" + } +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + "delete", + "get", + "purge", + "recover", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] +} + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "EC" + key_size = 2048 + + key_opts = [ + "sign", + "verify", + ] + + depends_on = [azurerm_key_vault_access_policy.test] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} + +func (KeyVaultKeyResource) withExternalAccessPolicyUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + tags = { + environment = "accTest" + } +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + "delete", + "encrypt", + "get", + "purge", + "recover", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] +} + +resource "azurerm_key_vault_key" "test" { + name = "key-%s" + key_vault_id = azurerm_key_vault.test.id + key_type = "EC" + key_size = 2048 + + key_opts = [ + "sign", + "verify", + ] + + depends_on = [azurerm_key_vault_access_policy.test] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} + +func (KeyVaultKeyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + "delete", + "get", + "purge", + "recover", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] + } + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/keyvault/key_vault_migration_resource.go b/azurerm/internal/services/keyvault/key_vault_migration_resource.go index 2c03e65a3432..1f8c5cfdf9d0 100644 --- a/azurerm/internal/services/keyvault/key_vault_migration_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_migration_resource.go @@ -10,17 +10,17 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/terraform" ) -func resourceAzureRMKeyVaultMigrateState(v int, is *terraform.InstanceState, _ interface{}) (*terraform.InstanceState, error) { +func resourceKeyVaultMigrateState(v int, is *terraform.InstanceState, _ interface{}) (*terraform.InstanceState, error) { switch v { case 0: - log.Println("[INFO] Found AzureRM Key Vault State v0; migrating to v1") - return migrateAzureRMKeyVaultStateV0toV1(is) + log.Println("[INFO] Found Key Vault State v0; migrating to v1") + return migrateKeyVaultStateV0toV1(is) default: return is, fmt.Errorf("Unexpected schema version: %d", v) } } -func migrateAzureRMKeyVaultStateV0toV1(is *terraform.InstanceState) (*terraform.InstanceState, error) { +func migrateKeyVaultStateV0toV1(is *terraform.InstanceState) (*terraform.InstanceState, error) { if is.Empty() { log.Println("[DEBUG] Empty InstanceState; nothing to migrate.") return is, nil @@ -28,7 +28,7 @@ func migrateAzureRMKeyVaultStateV0toV1(is *terraform.InstanceState) (*terraform. log.Printf("[DEBUG] ARM Key Vault Attributes before Migration: %#v", is.Attributes) - if err := migrateAzureRMKeyVaultStateV0toV1AccessPolicies(is); err != nil { + if err := migrateKeyVaultStateV0toV1AccessPolicies(is); err != nil { return nil, err } @@ -37,8 +37,8 @@ func migrateAzureRMKeyVaultStateV0toV1(is *terraform.InstanceState) (*terraform. return is, nil } -func migrateAzureRMKeyVaultStateV0toV1AccessPolicies(is *terraform.InstanceState) error { - keyVaultSchema := resourceArmKeyVault().Schema +func migrateKeyVaultStateV0toV1AccessPolicies(is *terraform.InstanceState) error { + keyVaultSchema := resourceKeyVault().Schema reader := &schema.MapFieldReader{ Schema: keyVaultSchema, Map: schema.BasicMapReader(is.Attributes), diff --git a/azurerm/internal/services/keyvault/key_vault_migration_test_resource.go b/azurerm/internal/services/keyvault/key_vault_migration_test_resource.go index 34b92ff2b0b8..16ac8bfe0fb3 100644 --- a/azurerm/internal/services/keyvault/key_vault_migration_test_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_migration_test_resource.go @@ -365,8 +365,7 @@ func TestKeyVaultMigrateState(t *testing.T) { ID: tc.ID, Attributes: tc.Attributes, } - is, err := resourceAzureRMKeyVaultMigrateState(tc.StateVersion, is, tc.Meta) - + is, err := resourceKeyVaultMigrateState(tc.StateVersion, is, tc.Meta) if err != nil { t.Fatalf("bad: %q, err: %+v", tn, err) } diff --git a/azurerm/internal/services/keyvault/key_vault_resource.go b/azurerm/internal/services/keyvault/key_vault_resource.go index 58ad77bb09d4..8a1677900e57 100644 --- a/azurerm/internal/services/keyvault/key_vault_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_resource.go @@ -8,7 +8,7 @@ import ( "strings" "time" - dataPlaneKeyVault "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" + KeyVaultMgmt "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" "github.com/Azure/azure-sdk-for-go/services/keyvault/mgmt/2019-09-01/keyvault" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" @@ -34,18 +34,18 @@ var armKeyVaultSkuFamily = "A" var keyVaultResourceName = "azurerm_key_vault" -func resourceArmKeyVault() *schema.Resource { +func resourceKeyVault() *schema.Resource { return &schema.Resource{ - Create: resourceArmKeyVaultCreate, - Read: resourceArmKeyVaultRead, - Update: resourceArmKeyVaultUpdate, - Delete: resourceArmKeyVaultDelete, + Create: resourceKeyVaultCreate, + Read: resourceKeyVaultRead, + Update: resourceKeyVaultUpdate, + Delete: resourceKeyVaultDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, - MigrateState: resourceAzureRMKeyVaultMigrateState, + MigrateState: resourceKeyVaultMigrateState, SchemaVersion: 1, Timeouts: &schema.ResourceTimeout{ @@ -103,7 +103,7 @@ func resourceArmKeyVault() *schema.Resource { "application_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validation.IsUUID, + ValidateFunc: validate.IsUUIDOrEmpty, }, "certificate_permissions": azure.SchemaKeyVaultCertificatePermissions(), "key_permissions": azure.SchemaKeyVaultKeyPermissions(), @@ -220,7 +220,7 @@ func resourceArmKeyVault() *schema.Resource { } } -func resourceArmKeyVaultCreate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.VaultsClient dataPlaneClient := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -374,7 +374,7 @@ func resourceArmKeyVaultCreate(d *schema.ResourceData, meta interface{}) error { } if v, ok := d.GetOk("contact"); ok { - contacts := dataPlaneKeyVault.Contacts{ + contacts := KeyVaultMgmt.Contacts{ ContactList: expandKeyVaultCertificateContactList(v.(*schema.Set).List()), } if read.Properties == nil || read.Properties.VaultURI == nil { @@ -385,12 +385,12 @@ func resourceArmKeyVaultCreate(d *schema.ResourceData, meta interface{}) error { } } - return resourceArmKeyVaultRead(d, meta) + return resourceKeyVaultRead(d, meta) } -func resourceArmKeyVaultUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.VaultsClient - dataPlaneClient := meta.(*clients.Client).KeyVault.ManagementClient + managementClient := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -586,7 +586,7 @@ func resourceArmKeyVaultUpdate(d *schema.ResourceData, meta interface{}) error { } if d.HasChange("contact") { - contacts := dataPlaneKeyVault.Contacts{ + contacts := KeyVaultMgmt.Contacts{ ContactList: expandKeyVaultCertificateContactList(d.Get("contact").(*schema.Set).List()), } if existing.Properties == nil || existing.Properties.VaultURI == nil { @@ -594,9 +594,9 @@ func resourceArmKeyVaultUpdate(d *schema.ResourceData, meta interface{}) error { } var err error if len(*contacts.ContactList) == 0 { - _, err = dataPlaneClient.DeleteCertificateContacts(ctx, *existing.Properties.VaultURI) + _, err = managementClient.DeleteCertificateContacts(ctx, *existing.Properties.VaultURI) } else { - _, err = dataPlaneClient.SetCertificateContacts(ctx, *existing.Properties.VaultURI, contacts) + _, err = managementClient.SetCertificateContacts(ctx, *existing.Properties.VaultURI, contacts) } if err != nil { return fmt.Errorf("failed to set Contacts for Key Vault %q (Resource Group %q): %s", name, resourceGroup, err) @@ -605,12 +605,12 @@ func resourceArmKeyVaultUpdate(d *schema.ResourceData, meta interface{}) error { d.Partial(false) - return resourceArmKeyVaultRead(d, meta) + return resourceKeyVaultRead(d, meta) } -func resourceArmKeyVaultRead(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.VaultsClient - dataPlaneClient := meta.(*clients.Client).KeyVault.ManagementClient + managementClient := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -668,7 +668,8 @@ func resourceArmKeyVaultRead(d *schema.ResourceData, meta interface{}) error { return fmt.Errorf("Error setting `access_policy` for KeyVault %q: %+v", *resp.Name, err) } - if resp, err := dataPlaneClient.GetCertificateContacts(ctx, *props.VaultURI); err != nil { + log.Printf("[STEBUG] - timing before") + if resp, err := managementClient.GetCertificateContacts(ctx, *props.VaultURI); err != nil { if !utils.ResponseWasForbidden(resp.Response) && !utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("retrieving `contact` for KeyVault: %+v", err) } @@ -677,12 +678,13 @@ func resourceArmKeyVaultRead(d *schema.ResourceData, meta interface{}) error { return fmt.Errorf("setting `contact` for KeyVault: %+v", err) } } + log.Printf("[STEBUG] - timing after") } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmKeyVaultDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -795,7 +797,7 @@ func keyVaultRefreshFunc(vaultUri string) resource.StateRefreshFunc { return func() (interface{}, string, error) { log.Printf("[DEBUG] Checking to see if KeyVault %q is available..", vaultUri) - var PTransport = &http.Transport{Proxy: http.ProxyFromEnvironment} + PTransport := &http.Transport{Proxy: http.ProxyFromEnvironment} client := &http.Client{ Transport: PTransport, @@ -855,15 +857,15 @@ func expandKeyVaultNetworkAcls(input []interface{}) (*keyvault.NetworkRuleSet, [ return &ruleSet, subnetIds } -func expandKeyVaultCertificateContactList(input []interface{}) *[]dataPlaneKeyVault.Contact { - results := make([]dataPlaneKeyVault.Contact, 0) +func expandKeyVaultCertificateContactList(input []interface{}) *[]KeyVaultMgmt.Contact { + results := make([]KeyVaultMgmt.Contact, 0) if len(input) == 0 || input[0] == nil { return &results } for _, item := range input { v := item.(map[string]interface{}) - results = append(results, dataPlaneKeyVault.Contact{ + results = append(results, KeyVaultMgmt.Contact{ Name: utils.String(v["name"].(string)), EmailAddress: utils.String(v["email"].(string)), Phone: utils.String(v["phone"].(string)), @@ -917,7 +919,7 @@ func flattenKeyVaultNetworkAcls(input *keyvault.NetworkRuleSet) []interface{} { return []interface{}{output} } -func flattenKeyVaultCertificateContactList(input *[]dataPlaneKeyVault.Contact) []interface{} { +func flattenKeyVaultCertificateContactList(input *[]KeyVaultMgmt.Contact) []interface{} { results := make([]interface{}, 0) if input == nil { return results diff --git a/azurerm/internal/services/keyvault/key_vault_resource_test.go b/azurerm/internal/services/keyvault/key_vault_resource_test.go new file mode 100644 index 000000000000..27a2a33df66c --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_resource_test.go @@ -0,0 +1,1205 @@ +package keyvault_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KeyVaultResource struct { +} + +func TestAccKeyVault_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku_name").HasValue("premium"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_key_vault"), + }, + }) +} + +func TestAccKeyVault_networkAcls(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.networkAcls(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.networkAclsUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_networkAclsAllowed(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.networkAclsAllowed(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_accessPolicyUpperLimit(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.accessPolicyUpperLimit(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckKeyVaultDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVault_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckKeyVaultDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVault_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("access_policy.0.application_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("access_policy.0.key_permissions.0").HasValue("create"), + check.That(data.ResourceName).Key("access_policy.0.secret_permissions.0").HasValue("set"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("access_policy.0.key_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("access_policy.0.secret_permissions.0").HasValue("get"), + check.That(data.ResourceName).Key("enabled_for_deployment").HasValue("true"), + check.That(data.ResourceName).Key("enabled_for_disk_encryption").HasValue("true"), + check.That(data.ResourceName).Key("enabled_for_template_deployment").HasValue("true"), + check.That(data.ResourceName).Key("enable_rbac_authorization").HasValue("true"), + check.That(data.ResourceName).Key("tags.environment").HasValue("Staging"), + ), + }, + { + Config: r.noAccessPolicyBlocks(data), + Check: resource.ComposeTestCheckFunc( + // There are no access_policy blocks in this configuration + // at all, which means to ignore any existing policies and + // so the one created in previous steps is still present. + check.That(data.ResourceName).Key("access_policy.#").HasValue("1"), + ), + }, + { + Config: r.accessPolicyExplicitZero(data), + Check: resource.ComposeTestCheckFunc( + // This config explicitly sets access_policy = [], which + // means to delete any existing policies. + check.That(data.ResourceName).Key("access_policy.#").HasValue("0"), + ), + }, + }) +} + +func TestAccKeyVault_updateContacts(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updateContacts(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_justCert(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.justCert(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("access_policy.0.certificate_permissions.0").HasValue("get"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_softDeleteEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDelete(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_softDeleteViaUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDelete(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("false"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + { + Config: r.softDelete(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_softDeleteAttemptToDisable(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDelete(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + { + Config: r.softDelete(data, false), + ExpectError: regexp.MustCompile("once Soft Delete has been Enabled it's not possible to disable it"), + }, + }) +} + +func TestAccKeyVault_softDeleteRecovery(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + // create it regularly + Config: r.softDelete(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + { + // delete the key vault + Config: r.softDeleteAbsent(data), + }, + { + // attempting to re-create it requires recovery, which is enabled by default + Config: r.softDelete(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_softDeleteRecoveryDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + // create it regularly + Config: r.softDeleteRecoveryDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + { + // delete the key vault + Config: r.softDeleteAbsent(data), + }, + { + // attempting to re-create it requires recovery, which is enabled by default + Config: r.softDeleteRecoveryDisabled(data), + ExpectError: regexp.MustCompile("An existing soft-deleted Key Vault exists with the Name"), + }, + }) +} + +func TestAccKeyVault_purgeProtectionEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.purgeProtection(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("true"), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_purgeProtectionAndSoftDeleteEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.purgeProtectionAndSoftDelete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("true"), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_purgeProtectionViaUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.purgeProtection(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("false"), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("false"), + ), + }, + data.ImportStep(), + { + Config: r.purgeProtection(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("true"), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVault_purgeProtectionAttemptToDisable(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.purgeProtection(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("purge_protection_enabled").HasValue("true"), + check.That(data.ResourceName).Key("soft_delete_enabled").HasValue("true"), + ), + }, + data.ImportStep(), + { + Config: r.purgeProtection(data, false), + ExpectError: regexp.MustCompile("once Purge Protection has been Enabled it's not possible to disable it"), + }, + }) +} + +func TestAccKeyVault_deletePolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") + r := KeyVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.noPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("access_policy.#").HasValue("0"), + ), + }, + data.ImportStep(), + }) +} + +func (t KeyVaultResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + name := id.Path["vaults"] + + resp, err := clients.KeyVault.VaultsClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading Key Vault (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckKeyVaultDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + vaultName := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for vault: %s", vaultName) + } + + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + resp, err := client.Delete(ctx, resourceGroup, vaultName) + if err != nil { + if response.WasNotFound(resp.Response) { + return nil + } + + return fmt.Errorf("Bad: Delete on keyVaultClient: %+v", err) + } + + return nil + } +} + +func (KeyVaultResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "managecontacts", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r KeyVaultResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault" "import" { + name = azurerm_key_vault.test.name + location = azurerm_key_vault.test.location + resource_group_name = azurerm_key_vault.test.resource_group_name + tenant_id = azurerm_key_vault.test.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } +} +`, r.basic(data)) +} + +func (KeyVaultResource) networkAclsTemplate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test_a" { + name = "acctestsubneta%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + service_endpoints = ["Microsoft.KeyVault"] +} + +resource "azurerm_subnet" "test_b" { + name = "acctestsubnetb%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.4.0/24" + service_endpoints = ["Microsoft.KeyVault"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (KeyVaultResource) networkAcls(data acceptance.TestData) string { + template := KeyVaultResource{}.networkAclsTemplate(data) + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } + + network_acls { + default_action = "Deny" + bypass = "None" + virtual_network_subnet_ids = [azurerm_subnet.test_a.id, azurerm_subnet.test_b.id] + } +} +`, template, data.RandomInteger) +} + +func (r KeyVaultResource) networkAclsUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } + + network_acls { + default_action = "Allow" + bypass = "AzureServices" + ip_rules = ["123.0.0.102/32"] + virtual_network_subnet_ids = [azurerm_subnet.test_a.id] + } +} +`, r.networkAclsTemplate(data), data.RandomInteger) +} + +func (r KeyVaultResource) networkAclsAllowed(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } + + network_acls { + default_action = "Allow" + bypass = "AzureServices" + } +} +`, r.networkAclsTemplate(data), data.RandomInteger) +} + +func (KeyVaultResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + ] + } + + enabled_for_deployment = true + enabled_for_disk_encryption = true + enabled_for_template_deployment = true + enable_rbac_authorization = true + + tags = { + environment = "Staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) noAccessPolicyBlocks(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + enabled_for_deployment = true + enabled_for_disk_encryption = true + enabled_for_template_deployment = true + enable_rbac_authorization = true + + tags = { + environment = "Staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) accessPolicyExplicitZero(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy = [] + + enabled_for_deployment = true + enabled_for_disk_encryption = true + enabled_for_template_deployment = true + enable_rbac_authorization = true + + tags = { + environment = "Staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + application_id = data.azurerm_client_config.current.client_id + + certificate_permissions = [ + "get", + ] + + key_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + ] + } + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) justCert(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "get", + ] + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) accessPolicyUpperLimit(data acceptance.TestData) string { + var storageAccountConfigs string + var accessPoliciesConfigs string + + for i := 1; i <= 20; i++ { + storageAccountConfigs += testAccKeyVault_generateStorageAccountConfigs(i, data.RandomString) + accessPoliciesConfigs += testAccKeyVault_generateAccessPolicyConfigs(i) + } + + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + %s +} + +%s +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, accessPoliciesConfigs, storageAccountConfigs) +} + +func testAccKeyVault_generateStorageAccountConfigs(accountNum int, rs string) string { + return fmt.Sprintf(` +resource "azurerm_storage_account" "test%d" { + name = "testsa%s%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" + + identity { + type = "SystemAssigned" + } + + tags = { + environment = "testing" + } +} +`, accountNum, rs, accountNum) +} + +func testAccKeyVault_generateAccessPolicyConfigs(accountNum int) string { + // due to a weird terraform fmt issue where: + // "${azurerm_storage_account.test%d.identity.0.principal_id}" + // becomes + // "${azurerm_storage_account.test % d.identity.0.principal_id}" + // + // lets inject this separately so we can run terrafmt on this file + + oid := fmt.Sprintf("${azurerm_storage_account.test%d.identity.0.principal_id}", accountNum) + + return fmt.Sprintf(` +access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = "%s" + + key_permissions = ["get", "create", "delete", "list", "restore", "recover", "unwrapkey", "wrapkey", "purge", "encrypt", "decrypt", "sign", "verify"] + secret_permissions = ["get"] +} +`, oid) +} + +func (KeyVaultResource) purgeProtection(data acceptance.TestData, enabled bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = "%t" + purge_protection_enabled = "%t" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enabled, enabled) +} + +func (KeyVaultResource) softDelete(data acceptance.TestData, enabled bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = %t +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enabled) +} + +func (KeyVaultResource) softDeleteAbsent(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + recover_soft_deleted_key_vaults = false + } + } +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (KeyVaultResource) softDeleteRecoveryDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + recover_soft_deleted_key_vaults = false + } + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) purgeProtectionAndSoftDelete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + purge_protection_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) noPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy = [] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (KeyVaultResource) updateContacts(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-kv-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "vault%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + certificate_permissions = [ + "managecontacts", + ] + + key_permissions = [ + "create", + ] + + secret_permissions = [ + "set", + ] + } + + contact { + email = "example@example.com" + name = "example" + phone = "01234567890" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/keyvault/key_vault_secret_data_source.go b/azurerm/internal/services/keyvault/key_vault_secret_data_source.go index 0f9c30503833..6513a6ad3fb7 100644 --- a/azurerm/internal/services/keyvault/key_vault_secret_data_source.go +++ b/azurerm/internal/services/keyvault/key_vault_secret_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmKeyVaultSecret() *schema.Resource { +func dataSourceKeyVaultSecret() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKeyVaultSecretRead, + Read: dataSourceKeyVaultSecretRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -54,7 +54,7 @@ func dataSourceArmKeyVaultSecret() *schema.Resource { } } -func dataSourceArmKeyVaultSecretRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceKeyVaultSecretRead(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/keyvault/key_vault_secret_data_source_test.go b/azurerm/internal/services/keyvault/key_vault_secret_data_source_test.go new file mode 100644 index 000000000000..8e2abadf5bc3 --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_secret_data_source_test.go @@ -0,0 +1,66 @@ +package keyvault_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type KeyVaultSecretDataSource struct { +} + +func TestAccDataSourceKeyVaultSecret_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_secret", "test") + r := KeyVaultSecretDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func TestAccDataSourceKeyVaultSecret_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_key_vault_secret", "test") + r := KeyVaultSecretDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("value").HasValue(""), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + }) +} + +func (KeyVaultSecretDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault_secret" "test" { + name = azurerm_key_vault_secret.test.name + key_vault_id = azurerm_key_vault.test.id +} +`, KeyVaultSecretResource{}.basic(data)) +} + +func (KeyVaultSecretDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_key_vault_secret" "test" { + name = azurerm_key_vault_secret.test.name + key_vault_id = azurerm_key_vault.test.id +} +`, KeyVaultSecretResource{}.complete(data)) +} diff --git a/azurerm/internal/services/keyvault/key_vault_secret_resource.go b/azurerm/internal/services/keyvault/key_vault_secret_resource.go index 536af3cef9f6..b30148340c7c 100644 --- a/azurerm/internal/services/keyvault/key_vault_secret_resource.go +++ b/azurerm/internal/services/keyvault/key_vault_secret_resource.go @@ -1,12 +1,13 @@ package keyvault import ( + "context" "fmt" "log" - "net/http" "time" "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" + "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/date" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" @@ -19,14 +20,14 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKeyVaultSecret() *schema.Resource { +func resourceKeyVaultSecret() *schema.Resource { return &schema.Resource{ - Create: resourceArmKeyVaultSecretCreate, - Read: resourceArmKeyVaultSecretRead, - Update: resourceArmKeyVaultSecretUpdate, - Delete: resourceArmKeyVaultSecretDelete, + Create: resourceKeyVaultSecretCreate, + Read: resourceKeyVaultSecretRead, + Update: resourceKeyVaultSecretUpdate, + Delete: resourceKeyVaultSecretDelete, Importer: &schema.ResourceImporter{ - State: resourceArmKeyVaultChildResourceImporter, + State: nestedItemResourceImporter, }, Timeouts: &schema.ResourceTimeout{ @@ -84,7 +85,7 @@ func resourceArmKeyVaultSecret() *schema.Resource { } } -func resourceArmKeyVaultSecretCreate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultSecretCreate(d *schema.ResourceData, meta interface{}) error { vaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -178,10 +179,10 @@ func resourceArmKeyVaultSecretCreate(d *schema.ResourceData, meta interface{}) e d.SetId(*read.ID) - return resourceArmKeyVaultSecretRead(d, meta) + return resourceKeyVaultSecretRead(d, meta) } -func resourceArmKeyVaultSecretUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultSecretUpdate(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) @@ -266,10 +267,10 @@ func resourceArmKeyVaultSecretUpdate(d *schema.ResourceData, meta interface{}) e // the ID is suffixed with the secret version d.SetId(*read.ID) - return resourceArmKeyVaultSecretRead(d, meta) + return resourceKeyVaultSecretRead(d, meta) } -func resourceArmKeyVaultSecretRead(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultSecretRead(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -335,7 +336,7 @@ func resourceArmKeyVaultSecretRead(d *schema.ResourceData, meta interface{}) err return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmKeyVaultSecretDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKeyVaultSecretDelete(d *schema.ResourceData, meta interface{}) error { keyVaultClient := meta.(*clients.Client).KeyVault.VaultsClient client := meta.(*clients.Client).KeyVault.ManagementClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) @@ -364,29 +365,43 @@ func resourceArmKeyVaultSecretDelete(d *schema.ResourceData, meta interface{}) e return nil } - _, err = client.DeleteSecret(ctx, id.KeyVaultBaseUrl, id.Name) - return err + shouldPurge := meta.(*clients.Client).Features.KeyVault.PurgeSoftDeleteOnDestroy + description := fmt.Sprintf("Secret %q (Key Vault %q)", id.Name, id.KeyVaultBaseUrl) + deleter := deleteAndPurgeSecret{ + client: client, + keyVaultUri: id.KeyVaultBaseUrl, + name: id.Name, + } + if err := deleteAndOptionallyPurge(ctx, description, shouldPurge, deleter); err != nil { + return err + } + + return nil } -func keyVaultChildItemRefreshFunc(secretUri string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - log.Printf("[DEBUG] Checking to see if KeyVault Secret %q is available..", secretUri) +var _ deleteAndPurgeNestedItem = deleteAndPurgeSecret{} - var PTransport = &http.Transport{Proxy: http.ProxyFromEnvironment} +type deleteAndPurgeSecret struct { + client *keyvault.BaseClient + keyVaultUri string + name string +} - client := &http.Client{ - Transport: PTransport, - } +func (d deleteAndPurgeSecret) DeleteNestedItem(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.DeleteSecret(ctx, d.keyVaultUri, d.name) + return resp.Response, err +} - conn, err := client.Get(secretUri) - if err != nil { - log.Printf("[DEBUG] Didn't find KeyVault secret at %q", secretUri) - return nil, "pending", fmt.Errorf("Error checking secret at %q: %s", secretUri, err) - } +func (d deleteAndPurgeSecret) NestedItemHasBeenDeleted(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.GetSecret(ctx, d.keyVaultUri, d.name, "") + return resp.Response, err +} - defer conn.Body.Close() +func (d deleteAndPurgeSecret) PurgeNestedItem(ctx context.Context) (autorest.Response, error) { + return d.client.PurgeDeletedSecret(ctx, d.keyVaultUri, d.name) +} - log.Printf("[DEBUG] Found KeyVault Secret %q", secretUri) - return "available", "available", nil - } +func (d deleteAndPurgeSecret) NestedItemHasBeenPurged(ctx context.Context) (autorest.Response, error) { + resp, err := d.client.GetDeletedSecret(ctx, d.keyVaultUri, d.name) + return resp.Response, err } diff --git a/azurerm/internal/services/keyvault/key_vault_secret_resource_test.go b/azurerm/internal/services/keyvault/key_vault_secret_resource_test.go new file mode 100644 index 000000000000..5693aab06c0c --- /dev/null +++ b/azurerm/internal/services/keyvault/key_vault_secret_resource_test.go @@ -0,0 +1,567 @@ +package keyvault_test + +import ( + "context" + "fmt" + "log" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KeyVaultSecretResource struct { +} + +func TestAccKeyVaultSecret_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultSecret_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_key_vault_secret"), + }, + }) +} + +func TestAccKeyVaultSecret_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckKeyVaultSecretDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultSecret_disappearsWhenParentKeyVaultDeleted(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckKeyVaultDisappears("azurerm_key_vault.test"), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccKeyVaultSecret_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("not_before_date").HasValue("2019-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("expiration_date").HasValue("2020-01-01T01:02:03Z"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultSecret_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + ), + }, + { + Config: r.basicUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("szechuan"), + ), + }, + }) +} + +func TestAccKeyVaultSecret_updatingValueChangedExternally(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + updateKeyVaultSecretValue(data.ResourceName, "mad-scientist"), + ), + ExpectNonEmptyPlan: true, + }, + { + Config: r.updateTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updateTags(data), + PlanOnly: true, + }, + data.ImportStep(), + }) +} + +func TestAccKeyVaultSecret_recovery(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDeleteRecovery(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + ), + }, + { + Config: r.softDeleteRecovery(data, false), + Destroy: true, + }, + { + // purge true here to make sure when we end the test there's no soft-deleted items left behind + Config: r.softDeleteRecovery(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("value").HasValue("rick-and-morty"), + ), + }, + }) +} + +func TestAccKeyVaultSecret_withExternalAccessPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") + r := KeyVaultSecretResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withExternalAccessPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withExternalAccessPolicyUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t KeyVaultSecretResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + client := clients.KeyVault.ManagementClient + keyVaultClient := clients.KeyVault.VaultsClient + + id, err := azure.ParseKeyVaultChildID(state.ID) + if err != nil { + return nil, err + } + + keyVaultId, err := azure.GetKeyVaultIDFromBaseUrl(ctx, keyVaultClient, id.KeyVaultBaseUrl) + if err != nil || keyVaultId == nil { + return nil, fmt.Errorf("retrieving the Resource ID the Key Vault at URL %q: %s", id.KeyVaultBaseUrl, err) + } + + ok, err := azure.KeyVaultExists(ctx, keyVaultClient, *keyVaultId) + if err != nil || !ok { + return nil, fmt.Errorf("checking if key vault %q for Certificate %q in Vault at url %q exists: %v", *keyVaultId, id.Name, id.KeyVaultBaseUrl, err) + } + + // we always want to get the latest version + resp, err := client.GetSecret(ctx, id.KeyVaultBaseUrl, id.Name, "") + if err != nil { + return nil, fmt.Errorf("making Read request on Azure KeyVault Secret %s: %+v", id.Name, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckKeyVaultSecretDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient + vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + name := rs.Primary.Attributes["name"] + keyVaultId := rs.Primary.Attributes["key_vault_id"] + vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) + } + + ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) + } + if !ok { + log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) + return nil + } + + resp, err := client.DeleteSecret(ctx, vaultBaseUrl, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil + } + + return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) + } + + return nil + } +} + +func updateKeyVaultSecretValue(resourceName, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient + vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + name := rs.Primary.Attributes["name"] + keyVaultId := rs.Primary.Attributes["key_vault_id"] + vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) + } + + ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) + if err != nil { + return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) + } + if !ok { + log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) + return nil + } + + updated := keyvault.SecretSetParameters{ + Value: utils.String(value), + } + if _, err = client.SetSecret(ctx, vaultBaseUrl, name, updated); err != nil { + return fmt.Errorf("updating secret: %+v", err) + } + return nil + } +} + +func (r KeyVaultSecretResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "rick-and-morty" + key_vault_id = azurerm_key_vault.test.id +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultSecretResource) updateTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "mad-scientist" + key_vault_id = azurerm_key_vault.test.id + + tags = { + Rick = "Morty" + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultSecretResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_secret" "import" { + name = azurerm_key_vault_secret.test.name + value = azurerm_key_vault_secret.test.value + key_vault_id = azurerm_key_vault_secret.test.key_vault_id +} +`, r.basic(data)) +} + +func (r KeyVaultSecretResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "" + key_vault_id = azurerm_key_vault.test.id + content_type = "application/xml" + not_before_date = "2019-01-01T01:02:03Z" + expiration_date = "2020-01-01T01:02:03Z" + + tags = { + "hello" = "world" + } +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultSecretResource) basicUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +%s + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "szechuan" + key_vault_id = azurerm_key_vault.test.id +} +`, r.template(data), data.RandomString) +} + +func (r KeyVaultSecretResource) softDeleteRecovery(data acceptance.TestData, purge bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = "%t" + recover_soft_deleted_key_vaults = true + } + } +} + +%s + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "rick-and-morty" + key_vault_id = azurerm_key_vault.test.id +} +`, purge, r.template(data), data.RandomString) +} + +func (KeyVaultSecretResource) withExternalAccessPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + tags = { + environment = "Production" + } +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + key_permissions = [ + "create", + "get", + ] + secret_permissions = [ + "set", + "get", + "delete", + "purge", + "recover" + ] +} + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "rick-and-morty" + key_vault_id = azurerm_key_vault.test.id + depends_on = [azurerm_key_vault_access_policy.test] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} + +func (KeyVaultSecretResource) withExternalAccessPolicyUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + tags = { + environment = "Production" + } +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + key_permissions = [ + "create", + "get", + ] + secret_permissions = [ + "set", + "get", + "delete", + "purge", + "recover" + ] +} + +resource "azurerm_key_vault_secret" "test" { + name = "secret-%s" + value = "rick-and-morty" + key_vault_id = azurerm_key_vault.test.id + depends_on = [azurerm_key_vault_access_policy.test] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} + +func (KeyVaultSecretResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv-%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" + soft_delete_enabled = true + soft_delete_retention_days = 7 + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "get", + ] + + secret_permissions = [ + "get", + "delete", + "purge", + "recover", + "set", + ] + } + + tags = { + environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/keyvault/parse/certificate_issuer.go b/azurerm/internal/services/keyvault/parse/certificate_issuer.go new file mode 100644 index 000000000000..60320016cc98 --- /dev/null +++ b/azurerm/internal/services/keyvault/parse/certificate_issuer.go @@ -0,0 +1,41 @@ +package parse + +import ( + "fmt" + "net/url" + "strings" +) + +type IssuerId struct { + KeyVaultBaseUrl string + Name string +} + +func IssuerID(id string) (*IssuerId, error) { + // example: https://example-keyvault.vault.azure.net/certificates/issuers/ExampleIssuer + idURL, err := url.ParseRequestURI(id) + if err != nil { + return nil, fmt.Errorf("Cannot parse Azure KeyVault Certificate Issuer Id: %s", err) + } + + path := idURL.Path + + path = strings.TrimPrefix(path, "/") + path = strings.TrimSuffix(path, "/") + + components := strings.Split(path, "/") + + if len(components) != 3 { + return nil, fmt.Errorf("Azure KeyVault Certificate Issuer Id should have 3 segments, got %d: '%s'", len(components), path) + } + if components[0] != "certificates" || components[1] != "issuers" { + return nil, fmt.Errorf("Key Vault Certificate Issuer ID path must begin with %q", "/certificates/issuers") + } + + issuerId := IssuerId{ + KeyVaultBaseUrl: fmt.Sprintf("%s://%s/", idURL.Scheme, idURL.Host), + Name: components[2], + } + + return &issuerId, nil +} diff --git a/azurerm/internal/services/keyvault/parse/key_vault_certificate_issuer.go b/azurerm/internal/services/keyvault/parse/key_vault_certificate_issuer.go deleted file mode 100644 index c11c0bc17d2a..000000000000 --- a/azurerm/internal/services/keyvault/parse/key_vault_certificate_issuer.go +++ /dev/null @@ -1,41 +0,0 @@ -package parse - -import ( - "fmt" - "net/url" - "strings" -) - -type KeyVaultCertificateIssuerID struct { - KeyVaultBaseUrl string - Name string -} - -func KeyVaultCertificateIssuerId(id string) (*KeyVaultCertificateIssuerID, error) { - // example: https://example-keyvault.vault.azure.net/certificates/issuers/ExampleIssuer - idURL, err := url.ParseRequestURI(id) - if err != nil { - return nil, fmt.Errorf("Cannot parse Azure KeyVault Certificate Issuer Id: %s", err) - } - - path := idURL.Path - - path = strings.TrimPrefix(path, "/") - path = strings.TrimSuffix(path, "/") - - components := strings.Split(path, "/") - - if len(components) != 3 { - return nil, fmt.Errorf("Azure KeyVault Certificate Issuer Id should have 3 segments, got %d: '%s'", len(components), path) - } - if components[0] != "certificates" || components[1] != "issuers" { - return nil, fmt.Errorf("Key Vault Certificate Issuer ID path must begin with %q", "/certificates/issuers") - } - - issuerId := KeyVaultCertificateIssuerID{ - KeyVaultBaseUrl: fmt.Sprintf("%s://%s/", idURL.Scheme, idURL.Host), - Name: components[2], - } - - return &issuerId, nil -} diff --git a/azurerm/internal/services/keyvault/parse/key_vault_id.go b/azurerm/internal/services/keyvault/parse/key_vault_id.go deleted file mode 100644 index 65944f189241..000000000000 --- a/azurerm/internal/services/keyvault/parse/key_vault_id.go +++ /dev/null @@ -1,31 +0,0 @@ -package parse - -import ( - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KeyVaultId struct { - Name string - ResourceGroup string -} - -func KeyVaultID(input string) (*KeyVaultId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - account := KeyVaultId{ - ResourceGroup: id.ResourceGroup, - } - - if account.Name, err = id.PopSegment("vaults"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &account, nil -} diff --git a/azurerm/internal/services/keyvault/parse/key_vault_id_test.go b/azurerm/internal/services/keyvault/parse/key_vault_id_test.go deleted file mode 100644 index 9c73991d5cf2..000000000000 --- a/azurerm/internal/services/keyvault/parse/key_vault_id_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKeyVaultID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KeyVaultId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Vaults Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/", - Expected: nil, - }, - { - Name: "Key Vault ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/vault1", - Expected: &KeyVaultId{ - Name: "vault1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.KeyVault/Vaults/vault1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KeyVaultID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/keyvault/parse/vault.go b/azurerm/internal/services/keyvault/parse/vault.go new file mode 100644 index 000000000000..ab4fcfd14a9e --- /dev/null +++ b/azurerm/internal/services/keyvault/parse/vault.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VaultId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewVaultID(subscriptionId, resourceGroup, name string) VaultId { + return VaultId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id VaultId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Vault", segmentsStr) +} + +func (id VaultId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.KeyVault/vaults/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// VaultID parses a Vault ID into an VaultId struct +func VaultID(input string) (*VaultId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VaultId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("vaults"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/keyvault/parse/vault_test.go b/azurerm/internal/services/keyvault/parse/vault_test.go new file mode 100644 index 000000000000..ca7731844f83 --- /dev/null +++ b/azurerm/internal/services/keyvault/parse/vault_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VaultId{} + +func TestVaultIDFormatter(t *testing.T) { + actual := NewVaultID("12345678-1234-9876-4563-123456789012", "resGroup1", "vault1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/vault1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVaultID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VaultId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/vault1", + Expected: &VaultId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "vault1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KEYVAULT/VAULTS/VAULT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VaultID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/keyvault/registration.go b/azurerm/internal/services/keyvault/registration.go index c63af715474a..de4066c960af 100644 --- a/azurerm/internal/services/keyvault/registration.go +++ b/azurerm/internal/services/keyvault/registration.go @@ -21,23 +21,23 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_key_vault_access_policy": dataSourceArmKeyVaultAccessPolicy(), - "azurerm_key_vault_certificate": dataSourceArmKeyVaultCertificate(), - "azurerm_key_vault_certificate_issuer": dataSourceArmKeyVaultCertificateIssuer(), - "azurerm_key_vault_key": dataSourceArmKeyVaultKey(), - "azurerm_key_vault_secret": dataSourceArmKeyVaultSecret(), - "azurerm_key_vault": dataSourceArmKeyVault(), + "azurerm_key_vault_access_policy": dataSourceKeyVaultAccessPolicy(), + "azurerm_key_vault_certificate": dataSourceKeyVaultCertificate(), + "azurerm_key_vault_certificate_issuer": dataSourceKeyVaultCertificateIssuer(), + "azurerm_key_vault_key": dataSourceKeyVaultKey(), + "azurerm_key_vault_secret": dataSourceKeyVaultSecret(), + "azurerm_key_vault": dataSourceKeyVault(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_key_vault_access_policy": resourceArmKeyVaultAccessPolicy(), - "azurerm_key_vault_certificate": resourceArmKeyVaultCertificate(), - "azurerm_key_vault_certificate_issuer": resourceArmKeyVaultCertificateIssuer(), - "azurerm_key_vault_key": resourceArmKeyVaultKey(), - "azurerm_key_vault_secret": resourceArmKeyVaultSecret(), - "azurerm_key_vault": resourceArmKeyVault(), + "azurerm_key_vault_access_policy": resourceKeyVaultAccessPolicy(), + "azurerm_key_vault_certificate": resourceKeyVaultCertificate(), + "azurerm_key_vault_certificate_issuer": resourceKeyVaultCertificateIssuer(), + "azurerm_key_vault_key": resourceKeyVaultKey(), + "azurerm_key_vault_secret": resourceKeyVaultSecret(), + "azurerm_key_vault": resourceKeyVault(), } } diff --git a/azurerm/internal/services/keyvault/resourceids.go b/azurerm/internal/services/keyvault/resourceids.go new file mode 100644 index 000000000000..fdd0e5d636e5 --- /dev/null +++ b/azurerm/internal/services/keyvault/resourceids.go @@ -0,0 +1,3 @@ +package keyvault + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Vault -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/vault1 diff --git a/azurerm/internal/services/keyvault/tests/testdata/keyvaultcert.pfx b/azurerm/internal/services/keyvault/testdata/keyvaultcert.pfx similarity index 100% rename from azurerm/internal/services/keyvault/tests/testdata/keyvaultcert.pfx rename to azurerm/internal/services/keyvault/testdata/keyvaultcert.pfx diff --git a/azurerm/internal/services/keyvault/tests/key_vault_access_policy_data_source_test.go b/azurerm/internal/services/keyvault/tests/key_vault_access_policy_data_source_test.go deleted file mode 100644 index 3cb57a24ec93..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_access_policy_data_source_test.go +++ /dev/null @@ -1,147 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_key(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Key Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.#", "9"), - resource.TestCheckNoResourceAttr(data.ResourceName, "secret_permissions"), - resource.TestCheckNoResourceAttr(data.ResourceName, "certificate_permissions"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_secret(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Secret Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckNoResourceAttr(data.ResourceName, "key_permissions"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.#", "7"), - resource.TestCheckNoResourceAttr(data.ResourceName, "certificate_permissions"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_certificate(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Certificate Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckNoResourceAttr(data.ResourceName, "key_permissions"), - resource.TestCheckNoResourceAttr(data.ResourceName, "secret_permissions"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_permissions.#", "12"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_keySecret(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Key & Secret Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.#", "9"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.#", "7"), - resource.TestCheckNoResourceAttr(data.ResourceName, "certificate_permissions"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_keyCertificate(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Key & Certificate Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.#", "9"), - resource.TestCheckNoResourceAttr(data.ResourceName, "secret_permissions"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_permissions.#", "12"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_secretCertificate(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Secret & Certificate Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckNoResourceAttr(data.ResourceName, "key_permissions"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.#", "7"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_permissions.#", "12"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultAccessPolicy_keySecretCertificate(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_access_policy", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultAccessPolicy("Key, Secret, & Certificate Management"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.#", "9"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.#", "7"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_permissions.#", "12"), - ), - }, - }, - }) -} - -func testAccDataSourceKeyVaultAccessPolicy(name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_key_vault_access_policy" "test" { - name = "%s" -} -`, name) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_access_policy_resource_test.go b/azurerm/internal/services/keyvault/tests/key_vault_access_policy_resource_test.go deleted file mode 100644 index 2cb6f1ac2e4d..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_access_policy_resource_test.go +++ /dev/null @@ -1,399 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/keyvault" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKeyVaultAccessPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.1", "set"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultAccessPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.1", "set"), - ), - }, - { - Config: testAccAzureRMKeyVaultAccessPolicy_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_key_vault_access_policy"), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultAccessPolicy_multiple(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test_with_application_id") - resourceName2 := "azurerm_key_vault_access_policy.test_no_application_id" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultAccessPolicy_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.0", "create"), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.1", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.1", "delete"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_permissions.0", "create"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_permissions.1", "delete"), - - testCheckAzureRMKeyVaultAccessPolicyExists(resourceName2), - resource.TestCheckResourceAttr(resourceName2, "key_permissions.0", "list"), - resource.TestCheckResourceAttr(resourceName2, "key_permissions.1", "encrypt"), - resource.TestCheckResourceAttr(resourceName2, "secret_permissions.0", "list"), - resource.TestCheckResourceAttr(resourceName2, "secret_permissions.1", "delete"), - resource.TestCheckResourceAttr(resourceName2, "certificate_permissions.0", "list"), - resource.TestCheckResourceAttr(resourceName2, "certificate_permissions.1", "delete"), - ), - }, - data.ImportStep(), - { - ResourceName: resourceName2, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultAccessPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultAccessPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "secret_permissions.1", "set"), - ), - }, - { - Config: testAccAzureRMKeyVaultAccessPolicy_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultAccessPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.0", "list"), - resource.TestCheckResourceAttr(data.ResourceName, "key_permissions.1", "encrypt"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultAccessPolicy_nonExistentVault(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_access_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultAccessPolicy_nonExistentVault(data), - ExpectNonEmptyPlan: true, - ExpectError: regexp.MustCompile(`Error retrieving Key Vault`), - }, - }, - }) -} - -func testCheckAzureRMKeyVaultAccessPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := azure.ParseAzureResourceID(rs.Primary.ID) - - if err != nil { - return err - } - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - - objectId := rs.Primary.Attributes["object_id"] - applicationId := rs.Primary.Attributes["application_id"] - - resp, err := client.Get(ctx, resGroup, vaultName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Key Vault %q (resource group: %q) does not exist", vaultName, resGroup) - } - - return fmt.Errorf("Bad: Get on keyVaultClient: %+v", err) - } - - policy, err := keyvault.FindKeyVaultAccessPolicy(resp.Properties.AccessPolicies, objectId, applicationId) - if err != nil { - return fmt.Errorf("Error finding Key Vault Access Policy %q : %+v", vaultName, err) - } - if policy == nil { - return fmt.Errorf("Bad: Key Vault Policy %q (resource group: %q, object_id: %s) does not exist", vaultName, resGroup, objectId) - } - - return nil - } -} - -func testAccAzureRMKeyVaultAccessPolicy_basic(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultAccessPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_access_policy" "test" { - key_vault_id = azurerm_key_vault.test.id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - "set", - ] - - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id -} -`, template) -} - -func testAccAzureRMKeyVaultAccessPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultAccessPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_access_policy" "import" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = azurerm_key_vault_access_policy.test.tenant_id - object_id = azurerm_key_vault_access_policy.test.object_id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - "set", - ] -} -`, template) -} - -func testAccAzureRMKeyVaultAccessPolicy_multiple(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultAccessPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_access_policy" "test_with_application_id" { - key_vault_id = azurerm_key_vault.test.id - - key_permissions = [ - "create", - "get", - ] - - secret_permissions = [ - "get", - "delete", - ] - - certificate_permissions = [ - "create", - "delete", - ] - - application_id = data.azurerm_client_config.current.client_id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id -} - -resource "azurerm_key_vault_access_policy" "test_no_application_id" { - key_vault_id = azurerm_key_vault.test.id - - key_permissions = [ - "list", - "encrypt", - ] - - secret_permissions = [ - "list", - "delete", - ] - - certificate_permissions = [ - "list", - "delete", - ] - - storage_permissions = [ - "backup", - "delete", - "deletesas", - "get", - "getsas", - "list", - "listsas", - "purge", - "recover", - "regeneratekey", - "restore", - "set", - "setsas", - "update", - ] - - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id -} -`, template) -} - -func testAccAzureRMKeyVaultAccessPolicy_update(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultAccessPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_access_policy" "test" { - key_vault_id = azurerm_key_vault.test.id - - key_permissions = [ - "list", - "encrypt", - ] - - secret_permissions = [] - - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id -} -`, template) -} - -func testAccAzureRMKeyVaultAccessPolicy_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - tags = { - environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKeyVaultAccessPolicy_nonExistentVault(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_access_policy" "test" { - # Must appear to be URL, but not actually exist - appending a string works - key_vault_id = "${azurerm_key_vault.test.id}NOPE" - - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_certificate_data_source_test.go b/azurerm/internal/services/keyvault/tests/key_vault_certificate_data_source_test.go deleted file mode 100644 index 6a7fc7199fdb..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_certificate_data_source_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKeyVaultCertificate_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVaultCertificate_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.key_properties.0.key_size", "2048"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.key_properties.0.key_type", "RSA"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultCertificate_generated(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVaultCertificate_generated(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.issuer_parameters.0.name", "Self"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.key_properties.0.exportable", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.key_properties.0.key_size", "2048"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.key_properties.0.key_type", "RSA"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.key_properties.0.reuse_key", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.lifetime_action.0.action.0.action_type", "AutoRenew"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.lifetime_action.0.trigger.0.days_before_expiry", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.secret_properties.0.content_type", "application/x-pkcs12"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.subject", "CN=hello-world"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.validity_in_months", "12"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMKeyVaultCertificate_basic(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultCertificate_basicImportPFX(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault_certificate" "test" { - name = azurerm_key_vault_certificate.test.name - key_vault_id = azurerm_key_vault.test.id -} -`, template) -} - -func testAccDataSourceAzureRMKeyVaultCertificate_generated(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultCertificate_basicGenerate(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault_certificate" "test" { - name = azurerm_key_vault_certificate.test.name - key_vault_id = azurerm_key_vault.test.id -} -`, template) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_certificate_issuer_data_source_test.go b/azurerm/internal/services/keyvault/tests/key_vault_certificate_issuer_data_source_test.go deleted file mode 100644 index dfdf0005c17c..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_certificate_issuer_data_source_test.go +++ /dev/null @@ -1,44 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKeyVaultCertificateIssuer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVaultCertificateIssuer_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "account_id", "test-account"), - resource.TestCheckResourceAttr(data.ResourceName, "provider_name", "DigiCert"), - resource.TestCheckResourceAttr(data.ResourceName, "org_id", "accTestOrg"), - resource.TestCheckResourceAttr(data.ResourceName, "admin.0.email_address", "admin@contoso.com"), - resource.TestCheckResourceAttr(data.ResourceName, "admin.0.first_name", "First"), - resource.TestCheckResourceAttr(data.ResourceName, "admin.0.last_name", "Last"), - resource.TestCheckResourceAttr(data.ResourceName, "admin.0.phone", "01234567890"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMKeyVaultCertificateIssuer_basic(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultCertificateIssuer_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault_certificate_issuer" "test" { - name = azurerm_key_vault_certificate_issuer.test.name - key_vault_id = azurerm_key_vault.test.id -} -`, template) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_certificate_issuer_resource_test.go b/azurerm/internal/services/keyvault/tests/key_vault_certificate_issuer_resource_test.go deleted file mode 100644 index b40d649fd753..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_certificate_issuer_resource_test.go +++ /dev/null @@ -1,392 +0,0 @@ -package tests - -import ( - "fmt" - "log" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKeyVaultCertificateIssuer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateIssuerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificateIssuer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists(data.ResourceName), - ), - }, - data.ImportStep("password"), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificateIssuer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateIssuerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificateIssuer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMKeyVaultCertificateIssuer_requiresImport), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificateIssuer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateIssuerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificateIssuer_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists(data.ResourceName), - ), - }, - data.ImportStep("password"), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificateIssuer_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateIssuerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificateIssuer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists(data.ResourceName), - ), - }, - data.ImportStep("password"), - { - Config: testAccAzureRMKeyVaultCertificateIssuer_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists(data.ResourceName), - ), - }, - data.ImportStep("password"), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificateIssuer_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateIssuerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificateIssuer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists(data.ResourceName), - testCheckAzureRMKeyVaultCertificateIssuerDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificateIssuer_disappearsWhenParentKeyVaultDeleted(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate_issuer", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateIssuerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificateIssuer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateIssuerExists("azurerm_key_vault_certificate_issuer.test"), - testCheckAzureRMKeyVaultDisappears("azurerm_key_vault.test"), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMKeyVaultCertificateIssuerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_key_vault_certificate_issuer" { - continue - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - // deleted, this is fine. - return nil - } - - ok, err := azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("failed to check if key vault %q for Certificate Issuer %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Certificate Issuer %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - // get the latest version - resp, err := client.GetCertificateIssuer(ctx, vaultBaseUrl, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return fmt.Errorf("Bad: Get on keyVault certificate issuer: %+v", err) - } - - return fmt.Errorf("Key Vault Certificate Issuer still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMKeyVaultCertificateIssuerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("failed to look up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("failed to check if key vault %q for Certificate Issuer %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Certificate Issuer %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.GetCertificateIssuer(ctx, vaultBaseUrl, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Key Vault Certificate Issuer %q (resource group: %q) does not exist", name, vaultBaseUrl) - } - - return fmt.Errorf("Bad: Get on keyVault certificate issuer: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMKeyVaultCertificateIssuerDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("failed to look up base URI from id %q: %+v", keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("failed to check if key vault %q for Certificate Issuer %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Certificate Issuer %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.DeleteCertificateIssuer(ctx, vaultBaseUrl, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMKeyVaultCertificateIssuer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "delete", - "import", - "get", - "manageissuers", - "setissuers", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate_issuer" "test" { - name = "acctestKVCI-%d" - key_vault_id = azurerm_key_vault.test.id - provider_name = "OneCertV2-PrivateCA" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMKeyVaultCertificateIssuer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultCertificateIssuer_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_certificate_issuer" "import" { - name = azurerm_key_vault_certificate_issuer.test.name - key_vault_id = azurerm_key_vault_certificate_issuer.test.key_vault_id - org_id = azurerm_key_vault_certificate_issuer.test.org_id - account_id = azurerm_key_vault_certificate_issuer.test.account_id - password = "test" - provider_name = azurerm_key_vault_certificate_issuer.test.provider_name -} - -`, template) -} - -func testAccAzureRMKeyVaultCertificateIssuer_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "delete", - "import", - "get", - "manageissuers", - "setissuers", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate_issuer" "test" { - name = "acctestKVCI-%d" - key_vault_id = azurerm_key_vault.test.id - account_id = "test-account" - password = "test" - provider_name = "DigiCert" - - org_id = "accTestOrg" - admin { - email_address = "admin@contoso.com" - first_name = "First" - last_name = "Last" - phone = "01234567890" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_certificate_resource_test.go b/azurerm/internal/services/keyvault/tests/key_vault_certificate_resource_test.go deleted file mode 100644 index 9a1e4b13fd3b..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_certificate_resource_test.go +++ /dev/null @@ -1,1174 +0,0 @@ -package tests - -import ( - "fmt" - "log" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKeyVaultCertificate_basicImportPFX(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicImportPFX(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - ), - }, - data.ImportStep("certificate"), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicImportPFX(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - ), - }, - { - Config: testAccAzureRMKeyVaultCertificate_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_key_vault_certificate"), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicGenerate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - testCheckAzureRMKeyVaultCertificateDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_disappearsWhenParentKeyVaultDeleted(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicGenerate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists("azurerm_key_vault_certificate.test"), - testCheckAzureRMKeyVaultDisappears("azurerm_key_vault.test"), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_basicGenerate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicGenerate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "secret_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttrSet(data.ResourceName, "thumbprint"), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_attribute.0.created"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_basicGenerateUnknownIssuer(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicGenerateUnknownIssuer(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_softDeleteRecovery(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_softDeleteRecovery(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "secret_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - ), - }, - { - Config: testAccAzureRMKeyVaultCertificate_softDeleteRecovery(data, false), - Destroy: true, - }, - { - Config: testAccAzureRMKeyVaultCertificate_softDeleteRecovery(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "secret_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_basicGenerateSans(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicGenerateSans(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.subject_alternative_names.0.emails.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.subject_alternative_names.0.dns_names.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.subject_alternative_names.0.upns.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_basicGenerateTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicGenerateTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_basicExtendedKeyUsage(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_basicExtendedKeyUsage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.extended_key_usage.#", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.extended_key_usage.0", "1.3.6.1.5.5.7.3.1"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.extended_key_usage.1", "1.3.6.1.5.5.7.3.2"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.extended_key_usage.2", "1.3.6.1.4.1.311.21.10"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultCertificate_emptyExtendedKeyUsage(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_certificate", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultCertificateDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultCertificate_emptyExtendedKeyUsage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultCertificateExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "certificate_data"), - resource.TestCheckResourceAttr(data.ResourceName, "certificate_policy.0.x509_certificate_properties.0.extended_key_usage.#", "0"), - ), - }, - }, - }) -} - -func testCheckAzureRMKeyVaultCertificateDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_key_vault_certificate" { - continue - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - // deleted, this is fine. - return nil - } - - ok, err := azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Certificate %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Certificate %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - // get the latest version - resp, err := client.GetCertificate(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return fmt.Errorf("Bad: Get on keyVault certificate: %+v", err) - } - - return fmt.Errorf("Key Vault Certificate still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMKeyVaultCertificateExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Certificate %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Certificate %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.GetCertificate(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Key Vault Certificate %q (resource group: %q) does not exist", name, vaultBaseUrl) - } - - return fmt.Errorf("Bad: Get on keyVault certificate: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMKeyVaultCertificateDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Certificate %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Certificate %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.DeleteCertificate(ctx, vaultBaseUrl, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMKeyVaultCertificate_basicImportPFX(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "delete", - "import", - "get", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate { - contents = filebase64("testdata/keyvaultcert.pfx") - password = "" - } - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = false - } - - secret_properties { - content_type = "application/x-pkcs12" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultCertificate_basicImportPFX(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_certificate" "import" { - name = azurerm_key_vault_certificate.test.name - key_vault_id = azurerm_key_vault.test.id - - certificate { - contents = filebase64("testdata/keyvaultcert.pfx") - password = "" - } - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = false - } - - secret_properties { - content_type = "application/x-pkcs12" - } - } -} -`, template) -} - -func testAccAzureRMKeyVaultCertificate_basicGenerate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - - storage_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - validity_in_months = 12 - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_basicGenerateUnknownIssuer(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - - storage_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Unknown" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "EmailContacts" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - validity_in_months = 12 - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_basicGenerateSans(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - - storage_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - - subject_alternative_names { - emails = ["mary@stu.co.uk"] - dns_names = ["internal.contoso.com"] - upns = ["john@doe.com"] - } - - validity_in_months = 12 - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_basicGenerateTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - validity_in_months = 12 - } - } - - tags = { - "hello" = "world" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_basicExtendedKeyUsage(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - - storage_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - extended_key_usage = [ - "1.3.6.1.5.5.7.3.1", # Server Authentication - "1.3.6.1.5.5.7.3.2", # Client Authentication - "1.3.6.1.4.1.311.21.10", # Application Policies - ] - - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - validity_in_months = 12 - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_emptyExtendedKeyUsage(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - - storage_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - extended_key_usage = [] - - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - validity_in_months = 12 - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultCertificate_softDeleteRecovery(data acceptance.TestData, purge bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - purge_soft_delete_on_destroy = "%t" - recover_soft_deleted_key_vaults = true - } - } -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-kvc-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkeyvault%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - soft_delete_enabled = true - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "create", - "delete", - "get", - "recover", - "update", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - - storage_permissions = [ - "set", - ] - } -} - -resource "azurerm_key_vault_certificate" "test" { - name = "acctestcert%s" - key_vault_id = azurerm_key_vault.test.id - - certificate_policy { - issuer_parameters { - name = "Self" - } - - key_properties { - exportable = true - key_size = 2048 - key_type = "RSA" - reuse_key = true - } - - lifetime_action { - action { - action_type = "AutoRenew" - } - - trigger { - days_before_expiry = 30 - } - } - - secret_properties { - content_type = "application/x-pkcs12" - } - - x509_certificate_properties { - key_usage = [ - "cRLSign", - "dataEncipherment", - "digitalSignature", - "keyAgreement", - "keyCertSign", - "keyEncipherment", - ] - - subject = "CN=hello-world" - validity_in_months = 12 - } - } -} -`, purge, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_data_source_test.go b/azurerm/internal/services/keyvault/tests/key_vault_data_source_test.go deleted file mode 100644 index 600e592c86a7..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_data_source_test.go +++ /dev/null @@ -1,157 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKeyVault_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "tenant_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "sku_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.tenant_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.object_id"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.key_permissions.0", "create"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.secret_permissions.0", "set"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVault_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVault_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "tenant_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "sku_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.tenant_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.object_id"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.key_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.secret_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVault_networkAcls(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVault_networkAcls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "tenant_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "sku_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.tenant_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.object_id"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.key_permissions.0", "create"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.secret_permissions.0", "set"), - resource.TestCheckResourceAttr(data.ResourceName, "network_acls.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "network_acls.0.default_action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVault_softDelete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKeyVault_enableSoftDelete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - resource.TestCheckResourceAttrSet(data.ResourceName, "sku_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMKeyVault_basic(data acceptance.TestData) string { - r := testAccAzureRMKeyVault_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault" "test" { - name = azurerm_key_vault.test.name - resource_group_name = azurerm_key_vault.test.resource_group_name -} -`, r) -} - -func testAccDataSourceAzureRMKeyVault_complete(data acceptance.TestData) string { - r := testAccAzureRMKeyVault_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault" "test" { - name = azurerm_key_vault.test.name - resource_group_name = azurerm_key_vault.test.resource_group_name -} -`, r) -} - -func testAccDataSourceAzureRMKeyVault_networkAcls(data acceptance.TestData) string { - r := testAccAzureRMKeyVault_networkAclsUpdated(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault" "test" { - name = azurerm_key_vault.test.name - resource_group_name = azurerm_key_vault.test.resource_group_name -} -`, r) -} - -func testAccDataSourceAzureRMKeyVault_enableSoftDelete(data acceptance.TestData) string { - r := testAccAzureRMKeyVault_softDelete(data, true) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault" "test" { - name = azurerm_key_vault.test.name - resource_group_name = azurerm_key_vault.test.resource_group_name -} -`, r) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_key_data_source_test.go b/azurerm/internal/services/keyvault/tests/key_vault_key_data_source_test.go deleted file mode 100644 index 9ed685fdccd1..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_key_data_source_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKeyVaultKey_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultKey_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "key_type", "RSA"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - }, - }) -} - -func testAccDataSourceKeyVaultKey_complete(data acceptance.TestData) string { - t := testAccAzureRMKeyVaultKey_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault_key" "test" { - name = azurerm_key_vault_key.test.name - key_vault_id = azurerm_key_vault.test.id -} -`, t) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_key_resource_test.go b/azurerm/internal/services/keyvault/tests/key_vault_key_resource_test.go deleted file mode 100644 index 9c50e1164aa5..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_key_resource_test.go +++ /dev/null @@ -1,1058 +0,0 @@ -package tests - -import ( - "fmt" - "log" - "testing" - "time" - - "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" - "github.com/Azure/go-autorest/autorest/date" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKeyVaultKey_basicEC(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicEC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - data.ImportStep("key_size"), - }, - }) -} - -func TestAccAzureRMKeyVaultKey_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicEC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMKeyVaultKey_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_key_vault_key"), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultKey_basicECHSM(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicECHSM(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultKey_curveEC(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_curveEC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultKey_basicRSA(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicRSA(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - data.ImportStep("key_size"), - }, - }) -} - -func TestAccAzureRMKeyVaultKey_basicRSAHSM(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicRSAHSM(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - data.ImportStep("key_size"), - }, - }) -} - -func TestAccAzureRMKeyVaultKey_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "not_before_date", "2020-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "expiration_date", "2021-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - data.ImportStep("key_size"), - }, - }) -} - -func TestAccAzureRMKeyVaultKey_softDeleteRecovery(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_softDeleteRecovery(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "not_before_date", "2020-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "expiration_date", "2021-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - data.ImportStep("key_size"), - { - Config: testAccAzureRMKeyVaultKey_softDeleteRecovery(data, false), - Destroy: true, - }, - { - Config: testAccAzureRMKeyVaultKey_softDeleteRecovery(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "not_before_date", "2020-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "expiration_date", "2021-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultKey_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicRSA(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_opts.#", "6"), - resource.TestCheckResourceAttr(data.ResourceName, "key_opts.0", "decrypt"), - ), - }, - { - Config: testAccAzureRMKeyVaultKey_basicUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "key_opts.#", "5"), - resource.TestCheckResourceAttr(data.ResourceName, "key_opts.0", "encrypt"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultKey_updatedExternally(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicEC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - updateExpiryDateForKeyVaultKey(data.ResourceName, "2029-02-02T12:59:00Z"), - ), - ExpectNonEmptyPlan: true, - }, - { - Config: testAccAzureRMKeyVaultKey_basicECUpdatedExternally(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMKeyVaultKey_basicECUpdatedExternally(data), - PlanOnly: true, - }, - data.ImportStep("key_size"), - }, - }) -} - -func TestAccAzureRMKeyVaultKey_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicEC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists(data.ResourceName), - testCheckAzureRMKeyVaultKeyDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultKey_disappearsWhenParentKeyVaultDeleted(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultKey_basicEC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultKeyExists("azurerm_key_vault_key.test"), - testCheckAzureRMKeyVaultDisappears("azurerm_key_vault.test"), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMKeyVaultKeyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_key_vault_key" { - continue - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - // key vault's been deleted - return nil - } - - ok, err := azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - // get the latest version - resp, err := client.GetKey(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return fmt.Errorf("Key Vault Key still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMKeyVaultKeyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Key %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Key %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.GetKey(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Key Vault Key %q (resource group: %q) does not exist", name, vaultBaseUrl) - } - - return fmt.Errorf("Bad: Get on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func updateExpiryDateForKeyVaultKey(resourceName string, expiryDate string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Key %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Key %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - expirationDate, err := time.Parse(time.RFC3339, expiryDate) - if err != nil { - return err - } - expirationUnixTime := date.UnixTime(expirationDate) - update := keyvault.KeyUpdateParameters{ - KeyAttributes: &keyvault.KeyAttributes{ - Expires: &expirationUnixTime, - }, - } - if _, err = client.UpdateKey(ctx, vaultBaseUrl, name, "", update); err != nil { - return fmt.Errorf("updating secret: %+v", err) - } - - resp, err := client.GetKey(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Key Vault Key %q (resource group: %q) does not exist", name, vaultBaseUrl) - } - - return fmt.Errorf("Bad: Get on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMKeyVaultKeyDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Key %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Key %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.DeleteKey(ctx, vaultBaseUrl, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMKeyVaultKey_basicEC(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "EC" - key_size = 2048 - - key_opts = [ - "sign", - "verify", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_basicECUpdatedExternally(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "EC" - key_size = 2048 - expiration_date = "2029-02-02T12:59:00Z" - - key_opts = [ - "sign", - "verify", - ] - - tags = { - Rick = "Morty" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultKey_basicEC(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_key" "import" { - name = azurerm_key_vault_key.test.name - key_vault_id = azurerm_key_vault.test.id - key_type = "EC" - key_size = 2048 - - key_opts = [ - "sign", - "verify", - ] -} -`, template) -} - -func testAccAzureRMKeyVaultKey_basicRSA(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - - key_opts = [ - "decrypt", - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_basicRSAHSM(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA-HSM" - key_size = 2048 - - key_opts = [ - "decrypt", - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - not_before_date = "2020-01-01T01:02:03Z" - expiration_date = "2021-01-01T01:02:03Z" - - key_opts = [ - "decrypt", - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] - - tags = { - "hello" = "world" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_basicUpdated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - - key_opts = [ - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_curveEC(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "EC" - curve = "P-521" - - key_opts = [ - "sign", - "verify", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_basicECHSM(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "EC-HSM" - curve = "P-521" - - key_opts = [ - "sign", - "verify", - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultKey_softDeleteRecovery(data acceptance.TestData, purge bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - purge_soft_delete_on_destroy = "%t" - recover_soft_deleted_key_vaults = true - } - } -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-kvk-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - soft_delete_enabled = true - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "recover", - "delete", - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "test" { - name = "key-%s" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - not_before_date = "2020-01-01T01:02:03Z" - expiration_date = "2021-01-01T01:02:03Z" - - key_opts = [ - "decrypt", - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] - - tags = { - "hello" = "world" - } -} -`, purge, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_resource_test.go b/azurerm/internal/services/keyvault/tests/key_vault_resource_test.go deleted file mode 100644 index a6fa611ac54c..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_resource_test.go +++ /dev/null @@ -1,1308 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKeyVault_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "premium"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMKeyVault_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_key_vault"), - }, - }, - }) -} - -func TestAccAzureRMKeyVault_networkAcls(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_networkAcls(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_networkAclsUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_networkAclsAllowed(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_networkAclsAllowed(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_accessPolicyUpperLimit(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_accessPolicyUpperLimit(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - testCheckAzureRMKeyVaultDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVault_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - testCheckAzureRMKeyVaultDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVault_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "access_policy.0.application_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.key_permissions.0", "create"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.secret_permissions.0", "set"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccAzureRMKeyVault_update(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.key_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.secret_permissions.0", "get"), - resource.TestCheckResourceAttr(data.ResourceName, "enabled_for_deployment", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "enabled_for_disk_encryption", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "enabled_for_template_deployment", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_rbac_authorization", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Staging"), - ), - }, - { - Config: testAccAzureRMKeyVault_noAccessPolicyBlocks(data), - Check: resource.ComposeTestCheckFunc( - // There are no access_policy blocks in this configuration - // at all, which means to ignore any existing policies and - // so the one created in previous steps is still present. - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.#", "1"), - ), - }, - { - Config: testAccAzureRMKeyVault_accessPolicyExplicitZero(data), - Check: resource.ComposeTestCheckFunc( - // This config explicitly sets access_policy = [], which - // means to delete any existing policies. - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.#", "0"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVault_updateContacts(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_updateContacts(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_justCert(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_justCert(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.0.certificate_permissions.0", "get"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_softDeleteEnabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_softDelete(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_softDeleteViaUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_softDelete(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_softDelete(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_softDeleteAttemptToDisable(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_softDelete(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_softDelete(data, false), - ExpectError: regexp.MustCompile("once Soft Delete has been Enabled it's not possible to disable it"), - }, - }, - }) -} - -func TestAccAzureRMKeyVault_softDeleteRecovery(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - // create it regularly - Config: testAccAzureRMKeyVault_softDelete(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - { - // delete the key vault - Config: testAccAzureRMKeyVault_softDeleteAbsent(data), - }, - { - // attempting to re-create it requires recovery, which is enabled by default - Config: testAccAzureRMKeyVault_softDelete(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_softDeleteRecoveryDisabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - // create it regularly - Config: testAccAzureRMKeyVault_softDeleteRecoveryDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - ), - }, - data.ImportStep(), - { - // delete the key vault - Config: testAccAzureRMKeyVault_softDeleteAbsent(data), - }, - { - // attempting to re-create it requires recovery, which is enabled by default - Config: testAccAzureRMKeyVault_softDeleteRecoveryDisabled(data), - ExpectError: regexp.MustCompile("An existing soft-deleted Key Vault exists with the Name"), - }, - }, - }) -} - -func TestAccAzureRMKeyVault_purgeProtectionEnabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_purgeProtection(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), // API rejects false if purge protection is enabled - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_purgeProtectionAndSoftDeleteEnabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_purgeProtectionAndSoftDelete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_purgeProtectionViaUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_purgeProtection(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "false"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_purgeProtection(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), // API rejects false if purge protection is enabled - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVault_purgeProtectionAttemptToDisable(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_purgeProtection(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "purge_protection_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_enabled", "true"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_purgeProtection(data, false), - ExpectError: regexp.MustCompile("once Purge Protection has been Enabled it's not possible to disable it"), - }, - }, - }) -} - -func TestAccAzureRMKeyVault_deletePolicy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKeyVault_noPolicy(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "access_policy.#", "0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMKeyVaultDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_key_vault" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return fmt.Errorf("Key Vault still exists:\n%#v", resp.Properties) - } - - return nil -} - -func testCheckAzureRMKeyVaultExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - vaultName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for vault: %s", vaultName) - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resp, err := client.Get(ctx, resourceGroup, vaultName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Vault %q (resource group: %q) does not exist", vaultName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on keyVaultClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMKeyVaultDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - vaultName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for vault: %s", vaultName) - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resp, err := client.Delete(ctx, resourceGroup, vaultName) - if err != nil { - if response.WasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Delete on keyVaultClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMKeyVault_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "ManageContacts", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKeyVault_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault" "import" { - name = azurerm_key_vault.test.name - location = azurerm_key_vault.test.location - resource_group_name = azurerm_key_vault.test.resource_group_name - tenant_id = azurerm_key_vault.test.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } -} -`, template) -} - -func testAccAzureRMKeyVault_networkAclsTemplate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test_a" { - name = "acctestsubneta%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" - service_endpoints = ["Microsoft.KeyVault"] -} - -resource "azurerm_subnet" "test_b" { - name = "acctestsubnetb%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.4.0/24" - service_endpoints = ["Microsoft.KeyVault"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMKeyVault_networkAcls(data acceptance.TestData) string { - template := testAccAzureRMKeyVault_networkAclsTemplate(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } - - network_acls { - default_action = "Deny" - bypass = "None" - virtual_network_subnet_ids = [azurerm_subnet.test_a.id, azurerm_subnet.test_b.id] - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMKeyVault_networkAclsUpdated(data acceptance.TestData) string { - template := testAccAzureRMKeyVault_networkAclsTemplate(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } - - network_acls { - default_action = "Allow" - bypass = "AzureServices" - ip_rules = ["123.0.0.102/32"] - virtual_network_subnet_ids = [azurerm_subnet.test_a.id] - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMKeyVault_networkAclsAllowed(data acceptance.TestData) string { - template := testAccAzureRMKeyVault_networkAclsTemplate(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } - - network_acls { - default_action = "Allow" - bypass = "AzureServices" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMKeyVault_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - ] - } - - enabled_for_deployment = true - enabled_for_disk_encryption = true - enabled_for_template_deployment = true - enable_rbac_authorization = true - - tags = { - environment = "Staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_noAccessPolicyBlocks(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - enabled_for_deployment = true - enabled_for_disk_encryption = true - enabled_for_template_deployment = true - enable_rbac_authorization = true - - tags = { - environment = "Staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_accessPolicyExplicitZero(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy = [] - - enabled_for_deployment = true - enabled_for_disk_encryption = true - enabled_for_template_deployment = true - enable_rbac_authorization = true - - tags = { - environment = "Staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - application_id = data.azurerm_client_config.current.client_id - - certificate_permissions = [ - "get", - ] - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - ] - } - - tags = { - environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_justCert(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "get", - ] - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_accessPolicyUpperLimit(data acceptance.TestData) string { - var storageAccountConfigs string - var accessPoliciesConfigs string - - for i := 1; i <= 20; i++ { - storageAccountConfigs += testAccAzureRMKeyVault_generateStorageAccountConfigs(i, data.RandomString) - accessPoliciesConfigs += testAccAzureRMKeyVault_generateAccessPolicyConfigs(i) - } - - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - %s -} - -%s -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, accessPoliciesConfigs, storageAccountConfigs) -} - -func testAccAzureRMKeyVault_generateStorageAccountConfigs(accountNum int, rs string) string { - return fmt.Sprintf(` -resource "azurerm_storage_account" "test%d" { - name = "testsa%s%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" - - identity { - type = "SystemAssigned" - } - - tags = { - environment = "testing" - } -} -`, accountNum, rs, accountNum) -} - -func testAccAzureRMKeyVault_generateAccessPolicyConfigs(accountNum int) string { - // due to a weird terraform fmt issue where: - // "${azurerm_storage_account.test%d.identity.0.principal_id}" - // becomes - // "${azurerm_storage_account.test % d.identity.0.principal_id}" - // - // lets inject this separately so we can run terrafmt on this file - - oid := fmt.Sprintf("${azurerm_storage_account.test%d.identity.0.principal_id}", accountNum) - - return fmt.Sprintf(` -access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = "%s" - - key_permissions = ["get", "create", "delete", "list", "restore", "recover", "unwrapkey", "wrapkey", "purge", "encrypt", "decrypt", "sign", "verify"] - secret_permissions = ["get"] -} -`, oid) -} - -func testAccAzureRMKeyVault_purgeProtection(data acceptance.TestData, enabled bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "premium" - soft_delete_enabled = "%t" - purge_protection_enabled = "%t" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enabled, enabled) -} - -func testAccAzureRMKeyVault_softDelete(data acceptance.TestData, enabled bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "premium" - soft_delete_enabled = %t -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enabled) -} - -func testAccAzureRMKeyVault_softDeleteAbsent(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - recover_soft_deleted_key_vaults = false - } - } -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMKeyVault_softDeleteRecoveryDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - recover_soft_deleted_key_vaults = false - } - } -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "premium" - soft_delete_enabled = true - soft_delete_retention_days = 7 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_purgeProtectionAndSoftDelete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "premium" - soft_delete_enabled = true - soft_delete_retention_days = 7 - purge_protection_enabled = true - -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_noPolicy(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy = [] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMKeyVault_updateContacts(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-kv-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "vault%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - certificate_permissions = [ - "ManageContacts", - ] - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "set", - ] - } - - contact { - email = "example@example.com" - name = "example" - phone = "01234567890" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_secret_data_source_test.go b/azurerm/internal/services/keyvault/tests/key_vault_secret_data_source_test.go deleted file mode 100644 index 45f3a5c0434b..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_secret_data_source_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKeyVaultSecret_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMKeyVaultSecret_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceKeyVaultSecret_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "value", ""), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - }, - }) -} - -func testAccDataSourceKeyVaultSecret_basic(data acceptance.TestData) string { - r := testAccAzureRMKeyVaultSecret_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault_secret" "test" { - name = azurerm_key_vault_secret.test.name - key_vault_id = azurerm_key_vault.test.id -} -`, r) -} - -func testAccDataSourceKeyVaultSecret_complete(data acceptance.TestData) string { - r := testAccAzureRMKeyVaultSecret_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_key_vault_secret" "test" { - name = azurerm_key_vault_secret.test.name - key_vault_id = azurerm_key_vault.test.id -} -`, r) -} diff --git a/azurerm/internal/services/keyvault/tests/key_vault_secret_resource_test.go b/azurerm/internal/services/keyvault/tests/key_vault_secret_resource_test.go deleted file mode 100644 index 8937203785c2..000000000000 --- a/azurerm/internal/services/keyvault/tests/key_vault_secret_resource_test.go +++ /dev/null @@ -1,651 +0,0 @@ -package tests - -import ( - "fmt" - "log" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/keyvault/2016-10-01/keyvault" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKeyVaultSecret_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - ), - }, - { - Config: testAccAzureRMKeyVaultSecret_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_key_vault_secret"), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - testCheckAzureRMKeyVaultSecretDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_disappearsWhenParentKeyVaultDeleted(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists("azurerm_key_vault_secret.test"), - testCheckAzureRMKeyVaultDisappears("azurerm_key_vault.test"), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "not_before_date", "2019-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "expiration_date", "2020-01-01T01:02:03Z"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - ), - }, - { - Config: testAccAzureRMKeyVaultSecret_basicUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "szechuan"), - ), - }, - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_updatingValueChangedExternally(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - updateKeyVaultSecretValue(data.ResourceName, "mad-scientist"), - ), - ExpectNonEmptyPlan: true, - }, - { - Config: testAccAzureRMKeyVaultSecret_updateTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMKeyVaultSecret_updateTags(data), - PlanOnly: true, - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKeyVaultSecret_recovery(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_key_vault_secret", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKeyVaultSecretDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKeyVaultSecret_softDeleteRecovery(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - ), - }, - { - Config: testAccAzureRMKeyVaultSecret_softDeleteRecovery(data, false), - Destroy: true, - }, - { - // purge true here to make sure when we end the test there's no soft-deleted items left behind - Config: testAccAzureRMKeyVaultSecret_softDeleteRecovery(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKeyVaultSecretExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "value", "rick-and-morty"), - ), - }, - }, - }) -} - -func testCheckAzureRMKeyVaultSecretDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_key_vault_secret" { - continue - } - - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - // key vault's been deleted - return nil - } - - ok, err := azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - // get the latest version - resp, err := client.GetSecret(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return fmt.Errorf("Key Vault Secret still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMKeyVaultSecretExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.GetSecret(ctx, vaultBaseUrl, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Key Vault Secret %q (resource group: %q) does not exist", name, vaultBaseUrl) - } - - return fmt.Errorf("Bad: Get on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMKeyVaultSecretDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - resp, err := client.DeleteSecret(ctx, vaultBaseUrl, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Delete on keyVaultManagementClient: %+v", err) - } - - return nil - } -} - -func updateKeyVaultSecretValue(resourceName, value string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.ManagementClient - vaultClient := acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - name := rs.Primary.Attributes["name"] - keyVaultId := rs.Primary.Attributes["key_vault_id"] - vaultBaseUrl, err := azure.GetKeyVaultBaseUrlFromID(ctx, vaultClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error looking up Secret %q vault url from id %q: %+v", name, keyVaultId, err) - } - - ok, err = azure.KeyVaultExists(ctx, acceptance.AzureProvider.Meta().(*clients.Client).KeyVault.VaultsClient, keyVaultId) - if err != nil { - return fmt.Errorf("Error checking if key vault %q for Secret %q in Vault at url %q exists: %v", keyVaultId, name, vaultBaseUrl, err) - } - if !ok { - log.Printf("[DEBUG] Secret %q Key Vault %q was not found in Key Vault at URI %q ", name, keyVaultId, vaultBaseUrl) - return nil - } - - updated := keyvault.SecretSetParameters{ - Value: utils.String(value), - } - if _, err = client.SetSecret(ctx, vaultBaseUrl, name, updated); err != nil { - return fmt.Errorf("updating secret: %+v", err) - } - return nil - } -} - -func testAccAzureRMKeyVaultSecret_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_secret" "test" { - name = "secret-%s" - value = "rick-and-morty" - key_vault_id = azurerm_key_vault.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultSecret_updateTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_secret" "test" { - name = "secret-%s" - value = "mad-scientist" - key_vault_id = azurerm_key_vault.test.id - - tags = { - Rick = "Morty" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultSecret_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKeyVaultSecret_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_secret" "import" { - name = azurerm_key_vault_secret.test.name - value = azurerm_key_vault_secret.test.value - key_vault_id = azurerm_key_vault_secret.test.key_vault_id -} -`, template) -} - -func testAccAzureRMKeyVaultSecret_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_secret" "test" { - name = "secret-%s" - value = "" - key_vault_id = azurerm_key_vault.test.id - content_type = "application/xml" - not_before_date = "2019-01-01T01:02:03Z" - expiration_date = "2020-01-01T01:02:03Z" - - tags = { - "hello" = "world" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultSecret_basicUpdated(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_secret" "test" { - name = "secret-%s" - value = "szechuan" - key_vault_id = azurerm_key_vault.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} - -func testAccAzureRMKeyVaultSecret_softDeleteRecovery(data acceptance.TestData, purge bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - purge_soft_delete_on_destroy = "%t" - recover_soft_deleted_key_vaults = true - } - } -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-kvs-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv-%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - soft_delete_enabled = true - - sku_name = "standard" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "get", - ] - - secret_permissions = [ - "get", - "recover", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_secret" "test" { - name = "secret-%s" - value = "rick-and-morty" - key_vault_id = azurerm_key_vault.test.id -} -`, purge, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} diff --git a/azurerm/internal/services/keyvault/validate/key_vault_certificate_issuer.go b/azurerm/internal/services/keyvault/validate/key_vault_certificate_issuer_name.go similarity index 100% rename from azurerm/internal/services/keyvault/validate/key_vault_certificate_issuer.go rename to azurerm/internal/services/keyvault/validate/key_vault_certificate_issuer_name.go diff --git a/azurerm/internal/services/keyvault/validate/key_vault_child_id.go b/azurerm/internal/services/keyvault/validate/key_vault_child_id.go new file mode 100644 index 000000000000..e136bd6d2d3a --- /dev/null +++ b/azurerm/internal/services/keyvault/validate/key_vault_child_id.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +func KeyVaultChildID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return warnings, errors + } + + if _, err := azure.ParseKeyVaultChildID(v); err != nil { + errors = append(errors, fmt.Errorf("can not parse %q as a Key Vault Child resource id: %v", k, err)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/keyvault/validate/key_vault_id.go b/azurerm/internal/services/keyvault/validate/key_vault_id.go index 5b365e71ce47..0789ff383b8c 100644 --- a/azurerm/internal/services/keyvault/validate/key_vault_id.go +++ b/azurerm/internal/services/keyvault/validate/key_vault_id.go @@ -13,7 +13,7 @@ func KeyVaultID(i interface{}, k string) (warnings []string, errors []error) { return } - if _, err := parse.KeyVaultID(v); err != nil { + if _, err := parse.VaultID(v); err != nil { errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) return } diff --git a/azurerm/internal/services/keyvault/validate/name.go b/azurerm/internal/services/keyvault/validate/key_vault_name.go similarity index 100% rename from azurerm/internal/services/keyvault/validate/name.go rename to azurerm/internal/services/keyvault/validate/key_vault_name.go diff --git a/azurerm/internal/services/keyvault/validate/name_test.go b/azurerm/internal/services/keyvault/validate/key_vault_name_test.go similarity index 100% rename from azurerm/internal/services/keyvault/validate/name_test.go rename to azurerm/internal/services/keyvault/validate/key_vault_name_test.go diff --git a/azurerm/internal/services/keyvault/validate/uuid.go b/azurerm/internal/services/keyvault/validate/uuid.go new file mode 100644 index 000000000000..d04b88fec581 --- /dev/null +++ b/azurerm/internal/services/keyvault/validate/uuid.go @@ -0,0 +1,26 @@ +package validate + +import ( + "fmt" + + "github.com/hashicorp/go-uuid" +) + +// IsUUIDOrEmpty is a ValidateFunc that ensures a string can be parsed as UUID or is empty +func IsUUIDOrEmpty(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if v == "" { + return + } + + if _, err := uuid.ParseUUID(v); err != nil { + errors = append(errors, fmt.Errorf("expected %q to be a valid UUID, got %v", k, v)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/keyvault/validate/vault_id.go b/azurerm/internal/services/keyvault/validate/vault_id.go new file mode 100644 index 000000000000..f1db254ee1b6 --- /dev/null +++ b/azurerm/internal/services/keyvault/validate/vault_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/keyvault/parse" +) + +func VaultID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VaultID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/keyvault/validate/vault_id_test.go b/azurerm/internal/services/keyvault/validate/vault_id_test.go new file mode 100644 index 000000000000..8739d943517c --- /dev/null +++ b/azurerm/internal/services/keyvault/validate/vault_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVaultID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.KeyVault/vaults/vault1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KEYVAULT/VAULTS/VAULT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VaultID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/client/client.go b/azurerm/internal/services/kusto/client/client.go index f57ee316556d..2e15e3debdcc 100644 --- a/azurerm/internal/services/kusto/client/client.go +++ b/azurerm/internal/services/kusto/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/kusto/identity.go b/azurerm/internal/services/kusto/identity.go index de7813ca2e32..9eefde9a5048 100644 --- a/azurerm/internal/services/kusto/identity.go +++ b/azurerm/internal/services/kusto/identity.go @@ -1,7 +1,7 @@ package kusto import ( - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" diff --git a/azurerm/internal/services/kusto/kusto_attached_database_configuration.go b/azurerm/internal/services/kusto/kusto_attached_database_configuration.go deleted file mode 100644 index edb5ec3ffb99..000000000000 --- a/azurerm/internal/services/kusto/kusto_attached_database_configuration.go +++ /dev/null @@ -1,220 +0,0 @@ -package kusto - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmKustoAttachedDatabaseConfiguration() *schema.Resource { - return &schema.Resource{ - Create: resourceArmKustoAttachedDatabaseConfigurationCreateUpdate, - Read: resourceArmKustoAttachedDatabaseConfigurationRead, - Update: resourceArmKustoAttachedDatabaseConfigurationCreateUpdate, - Delete: resourceArmKustoAttachedDatabaseConfigurationDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(60 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(60 * time.Minute), - Delete: schema.DefaultTimeout(60 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateAzureRMKustoDataConnectionName, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "location": azure.SchemaLocation(), - - "cluster_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateAzureRMKustoClusterName, - }, - - "database_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.Any(validateAzureRMKustoDatabaseName, validation.StringInSlice([]string{"*"}, false)), - }, - - "cluster_resource_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - }, - - "attached_database_names": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "default_principal_modification_kind": { - Type: schema.TypeString, - Optional: true, - Default: kusto.DefaultPrincipalsModificationKindNone, - ValidateFunc: validation.StringInSlice([]string{ - string(kusto.DefaultPrincipalsModificationKindNone), - string(kusto.DefaultPrincipalsModificationKindReplace), - string(kusto.DefaultPrincipalsModificationKindUnion), - }, false), - }, - }, - } -} - -func resourceArmKustoAttachedDatabaseConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for Azure Kusto Attached Database Configuration creation.") - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - clusterName := d.Get("cluster_name").(string) - - if d.IsNewResource() { - resp, err := client.Get(ctx, resourceGroup, clusterName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error checking for presence of existing Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %s", name, resourceGroup, clusterName, err) - } - } - - if resp.ID != nil && *resp.ID != "" { - return tf.ImportAsExistsError("azurerm_kusto_attached_database_configuration", *resp.ID) - } - } - - location := azure.NormalizeLocation(d.Get("location").(string)) - - configurationProperties := expandKustoAttachedDatabaseConfigurationProperties(d) - - configurationRequest := kusto.AttachedDatabaseConfiguration{ - Location: &location, - AttachedDatabaseConfigurationProperties: configurationProperties, - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, clusterName, name, configurationRequest) - if err != nil { - return fmt.Errorf("Error creating or updating Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", name, resourceGroup, clusterName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", name, resourceGroup, clusterName, err) - } - - configuration, err := client.Get(ctx, resourceGroup, clusterName, name) - if err != nil { - return fmt.Errorf("Error retrieving Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", name, resourceGroup, clusterName, err) - } - - d.SetId(*configuration.ID) - - return resourceArmKustoAttachedDatabaseConfigurationRead(d, meta) -} - -func resourceArmKustoAttachedDatabaseConfigurationRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.KustoAttachedDatabaseConfigurationID(d.Id()) - if err != nil { - return err - } - - configuration, err := client.Get(ctx, id.ResourceGroup, id.Cluster, id.Name) - - if err != nil { - if utils.ResponseWasNotFound(configuration.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error retrieving Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) - } - - d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) - d.Set("cluster_name", id.Cluster) - - if location := configuration.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if props := configuration.AttachedDatabaseConfigurationProperties; props != nil { - d.Set("cluster_resource_id", props.ClusterResourceID) - d.Set("database_name", props.DatabaseName) - d.Set("default_principal_modification_kind", props.DefaultPrincipalsModificationKind) - d.Set("attached_database_names", props.AttachedDatabaseNames) - } - - return nil -} - -func resourceArmKustoAttachedDatabaseConfigurationDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.KustoAttachedDatabaseConfigurationID(d.Id()) - if err != nil { - return err - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.Cluster, id.Name) - if err != nil { - return fmt.Errorf("Error deleting Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) - } - - return nil -} - -func expandKustoAttachedDatabaseConfigurationProperties(d *schema.ResourceData) *kusto.AttachedDatabaseConfigurationProperties { - AttachedDatabaseConfigurationProperties := &kusto.AttachedDatabaseConfigurationProperties{} - - if clusterResourceID, ok := d.GetOk("cluster_resource_id"); ok { - AttachedDatabaseConfigurationProperties.ClusterResourceID = utils.String(clusterResourceID.(string)) - } - - if databaseName, ok := d.GetOk("database_name"); ok { - AttachedDatabaseConfigurationProperties.DatabaseName = utils.String(databaseName.(string)) - } - - if defaultPrincipalModificationKind, ok := d.GetOk("default_principal_modification_kind"); ok { - AttachedDatabaseConfigurationProperties.DefaultPrincipalsModificationKind = kusto.DefaultPrincipalsModificationKind(defaultPrincipalModificationKind.(string)) - } - - return AttachedDatabaseConfigurationProperties -} diff --git a/azurerm/internal/services/kusto/kusto_attached_database_configuration_resource.go b/azurerm/internal/services/kusto/kusto_attached_database_configuration_resource.go new file mode 100644 index 000000000000..45657030faea --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_attached_database_configuration_resource.go @@ -0,0 +1,219 @@ +package kusto + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceKustoAttachedDatabaseConfiguration() *schema.Resource { + return &schema.Resource{ + Create: resourceKustoAttachedDatabaseConfigurationCreateUpdate, + Read: resourceKustoAttachedDatabaseConfigurationRead, + Update: resourceKustoAttachedDatabaseConfigurationCreateUpdate, + Delete: resourceKustoAttachedDatabaseConfigurationDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(60 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(60 * time.Minute), + Delete: schema.DefaultTimeout(60 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMKustoDataConnectionName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "cluster_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMKustoClusterName, + }, + + "database_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.Any(validateAzureRMKustoDatabaseName, validation.StringInSlice([]string{"*"}, false)), + }, + + "cluster_resource_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "attached_database_names": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "default_principal_modification_kind": { + Type: schema.TypeString, + Optional: true, + Default: kusto.DefaultPrincipalsModificationKindNone, + ValidateFunc: validation.StringInSlice([]string{ + string(kusto.DefaultPrincipalsModificationKindNone), + string(kusto.DefaultPrincipalsModificationKindReplace), + string(kusto.DefaultPrincipalsModificationKindUnion), + }, false), + }, + }, + } +} + +func resourceKustoAttachedDatabaseConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure Kusto Attached Database Configuration creation.") + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + clusterName := d.Get("cluster_name").(string) + + if d.IsNewResource() { + resp, err := client.Get(ctx, resourceGroup, clusterName, name) + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error checking for presence of existing Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %s", name, resourceGroup, clusterName, err) + } + } + + if resp.ID != nil && *resp.ID != "" { + return tf.ImportAsExistsError("azurerm_kusto_attached_database_configuration", *resp.ID) + } + } + + location := azure.NormalizeLocation(d.Get("location").(string)) + + configurationProperties := expandKustoAttachedDatabaseConfigurationProperties(d) + + configurationRequest := kusto.AttachedDatabaseConfiguration{ + Location: &location, + AttachedDatabaseConfigurationProperties: configurationProperties, + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, clusterName, name, configurationRequest) + if err != nil { + return fmt.Errorf("Error creating or updating Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", name, resourceGroup, clusterName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for completion of Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", name, resourceGroup, clusterName, err) + } + + configuration, err := client.Get(ctx, resourceGroup, clusterName, name) + if err != nil { + return fmt.Errorf("Error retrieving Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", name, resourceGroup, clusterName, err) + } + + d.SetId(*configuration.ID) + + return resourceKustoAttachedDatabaseConfigurationRead(d, meta) +} + +func resourceKustoAttachedDatabaseConfigurationRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.AttachedDatabaseConfigurationID(d.Id()) + if err != nil { + return err + } + + configuration, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(configuration.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error retrieving Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("cluster_name", id.ClusterName) + + if location := configuration.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := configuration.AttachedDatabaseConfigurationProperties; props != nil { + d.Set("cluster_resource_id", props.ClusterResourceID) + d.Set("database_name", props.DatabaseName) + d.Set("default_principal_modification_kind", props.DefaultPrincipalsModificationKind) + d.Set("attached_database_names", props.AttachedDatabaseNames) + } + + return nil +} + +func resourceKustoAttachedDatabaseConfigurationDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.AttachedDatabaseConfigurationID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.Name) + if err != nil { + return fmt.Errorf("Error deleting Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of Kusto Attached Database Configuration %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, err) + } + + return nil +} + +func expandKustoAttachedDatabaseConfigurationProperties(d *schema.ResourceData) *kusto.AttachedDatabaseConfigurationProperties { + AttachedDatabaseConfigurationProperties := &kusto.AttachedDatabaseConfigurationProperties{} + + if clusterResourceID, ok := d.GetOk("cluster_resource_id"); ok { + AttachedDatabaseConfigurationProperties.ClusterResourceID = utils.String(clusterResourceID.(string)) + } + + if databaseName, ok := d.GetOk("database_name"); ok { + AttachedDatabaseConfigurationProperties.DatabaseName = utils.String(databaseName.(string)) + } + + if defaultPrincipalModificationKind, ok := d.GetOk("default_principal_modification_kind"); ok { + AttachedDatabaseConfigurationProperties.DefaultPrincipalsModificationKind = kusto.DefaultPrincipalsModificationKind(defaultPrincipalModificationKind.(string)) + } + + return AttachedDatabaseConfigurationProperties +} diff --git a/azurerm/internal/services/kusto/kusto_attached_database_configuration_resource_test.go b/azurerm/internal/services/kusto/kusto_attached_database_configuration_resource_test.go new file mode 100644 index 000000000000..d5bc3e67e865 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_attached_database_configuration_resource_test.go @@ -0,0 +1,98 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoAttachedDatabaseConfigurationResource struct { +} + +func TestAccKustoAttachedDatabaseConfiguration_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_attached_database_configuration", "test") + r := KustoAttachedDatabaseConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (KustoAttachedDatabaseConfigurationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AttachedDatabaseConfigurationID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.AttachedDatabaseConfigurationsClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.AttachedDatabaseConfigurationProperties != nil), nil +} + +func (KustoAttachedDatabaseConfigurationResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster1" { + name = "acctestkc1%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_cluster" "cluster2" { + name = "acctestkc2%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "followed_database" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster1.name +} + +resource "azurerm_kusto_attached_database_configuration" "configuration1" { + name = "acctestka-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster1.name + cluster_resource_id = azurerm_kusto_cluster.cluster2.id + database_name = "*" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_resource.go b/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_resource.go index 7488d72a006d..99d60aa214e4 100644 --- a/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_resource.go +++ b/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoClusterCustomerManagedKey() *schema.Resource { +func resourceKustoClusterCustomerManagedKey() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoClusterCustomerManagedKeyCreateUpdate, - Read: resourceArmKustoClusterCustomerManagedKeyRead, - Update: resourceArmKustoClusterCustomerManagedKeyCreateUpdate, - Delete: resourceArmKustoClusterCustomerManagedKeyDelete, + Create: resourceKustoClusterCustomerManagedKeyCreateUpdate, + Read: resourceKustoClusterCustomerManagedKeyRead, + Update: resourceKustoClusterCustomerManagedKeyCreateUpdate, + Delete: resourceKustoClusterCustomerManagedKeyDelete, // TODO: this needs a custom ID validating importer Importer: &schema.ResourceImporter{ @@ -67,14 +67,14 @@ func resourceArmKustoClusterCustomerManagedKey() *schema.Resource { } } -func resourceArmKustoClusterCustomerManagedKeyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterCustomerManagedKeyCreateUpdate(d *schema.ResourceData, meta interface{}) error { clusterClient := meta.(*clients.Client).Kusto.ClustersClient vaultsClient := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() clusterIDRaw := d.Get("cluster_id").(string) - clusterID, err := parse.KustoClusterID(clusterIDRaw) + clusterID, err := parse.ClusterID(clusterIDRaw) if err != nil { return err } @@ -102,7 +102,7 @@ func resourceArmKustoClusterCustomerManagedKeyCreateUpdate(d *schema.ResourceDat } keyVaultIDRaw := d.Get("key_vault_id").(string) - keyVaultID, err := keyVaultParse.KeyVaultID(keyVaultIDRaw) + keyVaultID, err := keyVaultParse.VaultID(keyVaultIDRaw) if err != nil { return err } @@ -153,16 +153,16 @@ func resourceArmKustoClusterCustomerManagedKeyCreateUpdate(d *schema.ResourceDat d.SetId(resourceID) - return resourceArmKustoClusterCustomerManagedKeyRead(d, meta) + return resourceKustoClusterCustomerManagedKeyRead(d, meta) } -func resourceArmKustoClusterCustomerManagedKeyRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterCustomerManagedKeyRead(d *schema.ResourceData, meta interface{}) error { clusterClient := meta.(*clients.Client).Kusto.ClustersClient vaultsClient := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - clusterID, err := parse.KustoClusterID(d.Id()) + clusterID, err := parse.ClusterID(d.Id()) if err != nil { return err } @@ -221,12 +221,12 @@ func resourceArmKustoClusterCustomerManagedKeyRead(d *schema.ResourceData, meta return nil } -func resourceArmKustoClusterCustomerManagedKeyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterCustomerManagedKeyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClustersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - clusterID, err := parse.KustoClusterID(d.Id()) + clusterID, err := parse.ClusterID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_test.go b/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_test.go new file mode 100644 index 000000000000..026bfe84ade6 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_cluster_customer_managed_key_test.go @@ -0,0 +1,270 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" +) + +type KustoClusterCustomerManagedKeyResource struct { +} + +func TestAccKustoClusterCustomerManagedKey_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_customer_managed_key", "test") + r := KustoClusterCustomerManagedKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_vault_id").Exists(), + check.That(data.ResourceName).Key("key_name").Exists(), + check.That(data.ResourceName).Key("key_version").Exists(), + ), + }, + data.ImportStep(), + { + // Delete the encryption settings resource and verify it is gone + Config: r.template(data), + Check: resource.ComposeTestCheckFunc( + // Then ensure the encryption settings on the Kusto cluster + // have been reverted to their default state + testCheckKustoClusterExistsWithoutCustomerManagedKey("azurerm_kusto_cluster.test"), + ), + }, + }) +} + +func TestAccKustoClusterCustomerManagedKey_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_customer_managed_key", "test") + r := KustoClusterCustomerManagedKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_vault_id").Exists(), + check.That(data.ResourceName).Key("key_name").Exists(), + check.That(data.ResourceName).Key("key_version").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccKustoClusterCustomerManagedKey_updateKey(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_customer_managed_key", "test") + r := KustoClusterCustomerManagedKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("key_vault_id").Exists(), + check.That(data.ResourceName).Key("key_name").Exists(), + check.That(data.ResourceName).Key("key_version").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (KustoClusterCustomerManagedKeyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.ClustersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + if resp.ClusterProperties == nil || resp.ClusterProperties.KeyVaultProperties == nil { + return nil, fmt.Errorf("properties nil for %s", id.String()) + } + + return utils.Bool(true), nil +} + +func testCheckKustoClusterExistsWithoutCustomerManagedKey(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClustersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + id, err := parse.ClusterID(rs.Primary.ID) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Kusto Cluster %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) + } + + return fmt.Errorf("Bad: Get on kustoClustersClient: %+v", err) + } + + if props := resp.ClusterProperties; props != nil { + if encryption := props.KeyVaultProperties; encryption != nil { + return fmt.Errorf("Kusto Cluster encryption properties still found: %s", resourceName) + } + } + + return nil + } +} + +func (KustoClusterCustomerManagedKeyResource) basic(data acceptance.TestData) string { + template := KustoClusterCustomerManagedKeyResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_kusto_cluster_customer_managed_key" "test" { + cluster_id = azurerm_kusto_cluster.test.id + key_vault_id = azurerm_key_vault.test.id + key_name = azurerm_key_vault_key.first.name + key_version = azurerm_key_vault_key.first.version +} +`, template) +} + +func (KustoClusterCustomerManagedKeyResource) requiresImport(data acceptance.TestData) string { + template := KustoClusterCustomerManagedKeyResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_kusto_cluster_customer_managed_key" "import" { + cluster_id = azurerm_kusto_cluster_customer_managed_key.test.cluster_id + key_vault_id = azurerm_kusto_cluster_customer_managed_key.test.key_vault_id + key_name = azurerm_kusto_cluster_customer_managed_key.test.key_name + key_version = azurerm_kusto_cluster_customer_managed_key.test.key_version +} +`, template) +} + +func (KustoClusterCustomerManagedKeyResource) updated(data acceptance.TestData) string { + template := KustoClusterCustomerManagedKeyResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_key" "second" { + name = "second" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + + depends_on = [ + azurerm_key_vault_access_policy.client, + azurerm_key_vault_access_policy.cluster, + ] +} + +resource "azurerm_kusto_cluster_customer_managed_key" "test" { + cluster_id = azurerm_kusto_cluster.test.id + key_vault_id = azurerm_key_vault.test.id + key_name = azurerm_key_vault_key.second.name + key_version = azurerm_key_vault_key.second.version +} +`, template) +} + +func (KustoClusterCustomerManagedKeyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = false + } + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" + soft_delete_enabled = true + purge_protection_enabled = true +} + +resource "azurerm_key_vault_access_policy" "cluster" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = azurerm_kusto_cluster.test.identity.0.tenant_id + object_id = azurerm_kusto_cluster.test.identity.0.principal_id + + key_permissions = ["get", "unwrapkey", "wrapkey"] +} + +resource "azurerm_key_vault_access_policy" "client" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = ["get", "list", "create", "delete", "recover"] +} + +resource "azurerm_key_vault_key" "first" { + name = "test" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + + depends_on = [ + azurerm_key_vault_access_policy.client, + azurerm_key_vault_access_policy.cluster, + ] +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/kusto/kusto_cluster_data_source.go b/azurerm/internal/services/kusto/kusto_cluster_data_source.go index 1a8d4fd389c4..f3dbce3acbcf 100644 --- a/azurerm/internal/services/kusto/kusto_cluster_data_source.go +++ b/azurerm/internal/services/kusto/kusto_cluster_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmKustoCluster() *schema.Resource { +func dataSourceKustoCluster() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmKustoClusterRead, + Read: dataSourceKustoClusterRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -46,7 +46,7 @@ func dataSourceArmKustoCluster() *schema.Resource { } } -func dataSourceArmKustoClusterRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceKustoClusterRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClustersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/kusto/kusto_cluster_data_source_test.go b/azurerm/internal/services/kusto/kusto_cluster_data_source_test.go new file mode 100644 index 000000000000..99e141c56ec7 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_cluster_data_source_test.go @@ -0,0 +1,36 @@ +package kusto_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +func TestAccKustoClusterDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_kusto_cluster", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: testAccDataSourceKustoCluster_basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(KustoClusterResource{}), + resource.TestCheckResourceAttrSet(data.ResourceName, "uri"), + resource.TestCheckResourceAttrSet(data.ResourceName, "data_ingestion_uri"), + ), + }, + }) +} + +func testAccDataSourceKustoCluster_basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_kusto_cluster" "test" { + name = azurerm_kusto_cluster.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, KustoClusterResource{}.basic(data)) +} diff --git a/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource.go b/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource.go index 43f0ffb3ca81..764fbf7dd4a6 100644 --- a/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource.go +++ b/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource.go @@ -6,7 +6,7 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -17,11 +17,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoClusterPrincipalAssignment() *schema.Resource { +func resourceKustoClusterPrincipalAssignment() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoClusterPrincipalAssignmentCreateUpdate, - Read: resourceArmKustoClusterPrincipalAssignmentRead, - Delete: resourceArmKustoClusterPrincipalAssignmentDelete, + Create: resourceKustoClusterPrincipalAssignmentCreateUpdate, + Read: resourceKustoClusterPrincipalAssignmentRead, + Delete: resourceKustoClusterPrincipalAssignmentDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -99,7 +99,7 @@ func resourceArmKustoClusterPrincipalAssignment() *schema.Resource { } } -func resourceArmKustoClusterPrincipalAssignmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterPrincipalAssignmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClusterPrincipalAssignmentsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -157,31 +157,31 @@ func resourceArmKustoClusterPrincipalAssignmentCreateUpdate(d *schema.ResourceDa d.SetId(*resp.ID) - return resourceArmKustoClusterPrincipalAssignmentRead(d, meta) + return resourceKustoClusterPrincipalAssignmentRead(d, meta) } -func resourceArmKustoClusterPrincipalAssignmentRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterPrincipalAssignmentRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClusterPrincipalAssignmentsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoClusterPrincipalAssignmentID(d.Id()) + id, err := parse.ClusterPrincipalAssignmentID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Cluster, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.PrincipalAssignmentName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) + return fmt.Errorf("Error retrieving Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q): %+v", id.PrincipalAssignmentName, id.ResourceGroup, id.ClusterName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("cluster_name", id.Cluster) - d.Set("name", id.Name) + d.Set("cluster_name", id.ClusterName) + d.Set("name", id.PrincipalAssignmentName) tenantID := "" if resp.TenantID != nil { @@ -216,23 +216,23 @@ func resourceArmKustoClusterPrincipalAssignmentRead(d *schema.ResourceData, meta return nil } -func resourceArmKustoClusterPrincipalAssignmentDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterPrincipalAssignmentDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClusterPrincipalAssignmentsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoClusterPrincipalAssignmentID(d.Id()) + id, err := parse.ClusterPrincipalAssignmentID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Cluster, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.PrincipalAssignmentName) if err != nil { - return fmt.Errorf("Error deleting Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) + return fmt.Errorf("Error deleting Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q): %+v", id.PrincipalAssignmentName, id.ResourceGroup, id.ClusterName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) + return fmt.Errorf("Error waiting for deletion of Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q): %+v", id.PrincipalAssignmentName, id.ResourceGroup, id.ClusterName, err) } return nil diff --git a/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource_test.go b/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource_test.go new file mode 100644 index 000000000000..1dae693c2c6e --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_cluster_principal_assignment_resource_test.go @@ -0,0 +1,84 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoClusterPrincipalAssignmentResource struct { +} + +func TestAccKustoClusterPrincipalAssignment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_principal_assignment", "test") + r := KustoClusterPrincipalAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (KustoClusterPrincipalAssignmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterPrincipalAssignmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.ClusterPrincipalAssignmentsClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.PrincipalAssignmentName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.ClusterPrincipalProperties != nil), nil +} + +func (KustoClusterPrincipalAssignmentResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-kusto-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_cluster_principal_assignment" "test" { + name = "acctestkdpa%d" + resource_group_name = azurerm_resource_group.rg.name + cluster_name = azurerm_kusto_cluster.test.name + + tenant_id = data.azurerm_client_config.current.tenant_id + principal_id = data.azurerm_client_config.current.client_id + principal_type = "App" + role = "AllDatabasesViewer" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/kusto/kusto_cluster_resource.go b/azurerm/internal/services/kusto/kusto_cluster_resource.go index a0537a7500ba..c27e9bcc31cc 100644 --- a/azurerm/internal/services/kusto/kusto_cluster_resource.go +++ b/azurerm/internal/services/kusto/kusto_cluster_resource.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoCluster() *schema.Resource { +func resourceKustoCluster() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoClusterCreateUpdate, - Read: resourceArmKustoClusterRead, - Update: resourceArmKustoClusterCreateUpdate, - Delete: resourceArmKustoClusterDelete, + Create: resourceKustoClusterCreateUpdate, + Read: resourceKustoClusterRead, + Update: resourceKustoClusterCreateUpdate, + Delete: resourceKustoClusterDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -178,6 +178,17 @@ func resourceArmKustoCluster() *schema.Resource { }, }, + "engine": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: string(kusto.V2), + ValidateFunc: validation.StringInSlice([]string{ + string(kusto.V2), + string(kusto.V3), + }, false), + }, + "uri": { Type: schema.TypeString, Computed: true, @@ -195,7 +206,7 @@ func resourceArmKustoCluster() *schema.Resource { } } -func resourceArmKustoClusterCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClustersClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -230,10 +241,13 @@ func resourceArmKustoClusterCreateUpdate(d *schema.ResourceData, meta interface{ optimizedAutoScale := expandOptimizedAutoScale(d.Get("optimized_auto_scale").([]interface{})) if optimizedAutoScale != nil && *optimizedAutoScale.IsEnabled { - // if Capacity has not been set use min instances - if *sku.Capacity == 0 { + // Ensure that requested Capcity is always between min and max to support updating to not overlapping autoscale ranges + if *sku.Capacity < *optimizedAutoScale.Minimum { sku.Capacity = utils.Int32(*optimizedAutoScale.Minimum) } + if *sku.Capacity > *optimizedAutoScale.Maximum { + sku.Capacity = utils.Int32(*optimizedAutoScale.Maximum) + } // Capacity must be set for the initial creation when using OptimizedAutoScaling but cannot be updated if d.HasChange("sku.0.capacity") && !d.IsNewResource() { @@ -245,11 +259,14 @@ func resourceArmKustoClusterCreateUpdate(d *schema.ResourceData, meta interface{ } } + engine := kusto.EngineType(d.Get("engine").(string)) + clusterProperties := kusto.ClusterProperties{ OptimizedAutoscale: optimizedAutoScale, EnableDiskEncryption: utils.Bool(d.Get("enable_disk_encryption").(bool)), EnableStreamingIngest: utils.Bool(d.Get("enable_streaming_ingest").(bool)), EnablePurge: utils.Bool(d.Get("enable_purge").(bool)), + EngineType: engine, } if v, ok := d.GetOk("virtual_network_configuration"); ok { @@ -339,21 +356,20 @@ func resourceArmKustoClusterCreateUpdate(d *schema.ResourceData, meta interface{ } } - return resourceArmKustoClusterRead(d, meta) + return resourceKustoClusterRead(d, meta) } -func resourceArmKustoClusterRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClustersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoClusterID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } clusterResponse, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { if utils.ResponseWasNotFound(clusterResponse.Response) { d.SetId("") @@ -393,17 +409,18 @@ func resourceArmKustoClusterRead(d *schema.ResourceData, meta interface{}) error d.Set("language_extensions", flattenKustoClusterLanguageExtensions(clusterProperties.LanguageExtensions)) d.Set("uri", clusterProperties.URI) d.Set("data_ingestion_uri", clusterProperties.DataIngestionURI) + d.Set("engine", clusterProperties.EngineType) } return tags.FlattenAndSet(d, clusterResponse.Tags) } -func resourceArmKustoClusterDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoClusterDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.ClustersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoClusterID(d.Id()) + id, err := parse.ClusterID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/kusto/kusto_cluster_resource_test.go b/azurerm/internal/services/kusto/kusto_cluster_resource_test.go new file mode 100644 index 000000000000..2b18b0962b12 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_cluster_resource_test.go @@ -0,0 +1,669 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoClusterResource struct { +} + +func TestAccKustoCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoCluster_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enable_disk_encryption").HasValue("false"), + check.That(data.ResourceName).Key("enable_streaming_ingest").HasValue("false"), + check.That(data.ResourceName).Key("enable_purge").HasValue("false"), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enable_disk_encryption").HasValue("true"), + check.That(data.ResourceName).Key("enable_streaming_ingest").HasValue("true"), + check.That(data.ResourceName).Key("enable_purge").HasValue("true"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("enable_disk_encryption").HasValue("false"), + check.That(data.ResourceName).Key("enable_streaming_ingest").HasValue("false"), + check.That(data.ResourceName).Key("enable_purge").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoCluster_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.label").HasValue("test"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.label").HasValue("test1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("prod"), + ), + }, + }) +} + +func TestAccKustoCluster_sku(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku.0.name").HasValue("Dev(No SLA)_Standard_D11_v2"), + check.That(data.ResourceName).Key("sku.0.capacity").HasValue("1"), + ), + }, + { + Config: r.skuUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku.0.name").HasValue("Standard_D11_v2"), + check.That(data.ResourceName).Key("sku.0.capacity").HasValue("2"), + ), + }, + }) +} + +func TestAccKustoCluster_zones(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withZones(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("zones.#").HasValue("1"), + check.That(data.ResourceName).Key("zones.0").HasValue("1"), + ), + }, + }) +} + +func TestAccKustoCluster_identitySystemAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("identity.0.type").HasValue("SystemAssigned"), + check.That(data.ResourceName).Key("identity.0.identity_ids.#").HasValue("0"), + resource.TestMatchResourceAttr(data.ResourceName, "identity.0.principal_id", validate.UUIDRegExp), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoCluster_vnet(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.vnet(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("virtual_network_configuration.#").HasValue("1"), + check.That(data.ResourceName).Key("virtual_network_configuration.0.subnet_id").Exists(), + check.That(data.ResourceName).Key("virtual_network_configuration.0.engine_public_ip_id").Exists(), + check.That(data.ResourceName).Key("virtual_network_configuration.0.data_management_public_ip_id").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoCluster_languageExtensions(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.languageExtensions(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("language_extensions.#").HasValue("2"), + check.That(data.ResourceName).Key("language_extensions.0").HasValue("PYTHON"), + check.That(data.ResourceName).Key("language_extensions.1").HasValue("R"), + ), + }, + data.ImportStep(), + { + Config: r.languageExtensionsRemove(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("language_extensions.#").HasValue("1"), + check.That(data.ResourceName).Key("language_extensions.0").HasValue("R"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoCluster_optimizedAutoScale(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.optimizedAutoScale(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("optimized_auto_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("optimized_auto_scale.0.minimum_instances").HasValue("2"), + check.That(data.ResourceName).Key("optimized_auto_scale.0.maximum_instances").HasValue("3"), + ), + }, + data.ImportStep(), + { + Config: r.optimizedAutoScaleUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("optimized_auto_scale.#").HasValue("1"), + check.That(data.ResourceName).Key("optimized_auto_scale.0.minimum_instances").HasValue("3"), + check.That(data.ResourceName).Key("optimized_auto_scale.0.maximum_instances").HasValue("4"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoCluster_engineV3(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") + r := KustoClusterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.engineV3(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (KustoClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ClusterID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.ClustersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.ClusterProperties != nil), nil +} + +func (KustoClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + tags = { + label = "test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + tags = { + label = "test1" + ENV = "prod" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) skuUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Standard_D11_v2" + capacity = 2 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) withZones(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + zones = ["1"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + enable_disk_encryption = true + enable_streaming_ingest = true + enable_purge = true + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) identitySystemAssigned(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + identity { + type = "SystemAssigned" + } +} + `, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) languageExtensions(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + language_extensions = ["PYTHON", "R"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) languageExtensionsRemove(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + language_extensions = ["R"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) optimizedAutoScale(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Standard_D11_v2" + } + + optimized_auto_scale { + minimum_instances = 2 + maximum_instances = 3 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) optimizedAutoScaleUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Standard_D11_v2" + } + + optimized_auto_scale { + minimum_instances = 3 + maximum_instances = 4 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (KustoClusterResource) vnet(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestkc%s-vnet" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestkc%s-subnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/24"] +} + +resource "azurerm_network_security_group" "test" { + name = "acctestkc%s-nsg" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_network_security_rule" "test_allow_management_inbound" { + name = "AllowAzureDataExplorerManagement" + priority = 100 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = "443" + source_address_prefix = "AzureDataExplorerManagement" + destination_address_prefix = "VirtualNetwork" + resource_group_name = azurerm_resource_group.test.name + network_security_group_name = azurerm_network_security_group.test.name +} + +resource "azurerm_subnet_network_security_group_association" "test" { + subnet_id = azurerm_subnet.test.id + network_security_group_id = azurerm_network_security_group.test.id +} + +resource "azurerm_public_ip" "engine_pip" { + name = "acctestkc%s-engine-pip" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + allocation_method = "Static" +} + +resource "azurerm_public_ip" "management_pip" { + name = "acctestkc%s-management-pip" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Basic" + allocation_method = "Static" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + + virtual_network_configuration { + subnet_id = azurerm_subnet.test.id + engine_public_ip_id = azurerm_public_ip.engine_pip.id + data_management_public_ip_id = azurerm_public_ip.management_pip.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomString, data.RandomString, data.RandomString, data.RandomString) +} + +func (KustoClusterResource) engineV3(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } + engine = "V3" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource.go b/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource.go index bb547b88d9d4..96d1bf65d921 100644 --- a/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource.go +++ b/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource.go @@ -6,7 +6,7 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -17,11 +17,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoDatabasePrincipalAssignment() *schema.Resource { +func resourceKustoDatabasePrincipalAssignment() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoDatabasePrincipalAssignmentCreate, - Read: resourceArmKustoDatabasePrincipalAssignmentRead, - Delete: resourceArmKustoDatabasePrincipalAssignmentDelete, + Create: resourceKustoDatabasePrincipalAssignmentCreate, + Read: resourceKustoDatabasePrincipalAssignmentRead, + Delete: resourceKustoDatabasePrincipalAssignmentDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -109,7 +109,7 @@ func resourceArmKustoDatabasePrincipalAssignment() *schema.Resource { } } -func resourceArmKustoDatabasePrincipalAssignmentCreate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabasePrincipalAssignmentCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasePrincipalAssignmentsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -166,32 +166,32 @@ func resourceArmKustoDatabasePrincipalAssignmentCreate(d *schema.ResourceData, m d.SetId(*resp.ID) - return resourceArmKustoDatabasePrincipalAssignmentRead(d, meta) + return resourceKustoDatabasePrincipalAssignmentRead(d, meta) } -func resourceArmKustoDatabasePrincipalAssignmentRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabasePrincipalAssignmentRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasePrincipalAssignmentsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoDatabasePrincipalAssignmentID(d.Id()) + id, err := parse.DatabasePrincipalAssignmentID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Cluster, id.Database, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.PrincipalAssignmentName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("failed to retrieve Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.Cluster, id.Database, err) + return fmt.Errorf("failed to retrieve Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q): %+v", id.PrincipalAssignmentName, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } d.Set("resource_group_name", id.ResourceGroup) - d.Set("cluster_name", id.Cluster) - d.Set("database_name", id.Database) - d.Set("name", id.Name) + d.Set("cluster_name", id.ClusterName) + d.Set("database_name", id.DatabaseName) + d.Set("name", id.PrincipalAssignmentName) tenantID := "" if resp.TenantID != nil { @@ -226,23 +226,23 @@ func resourceArmKustoDatabasePrincipalAssignmentRead(d *schema.ResourceData, met return nil } -func resourceArmKustoDatabasePrincipalAssignmentDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabasePrincipalAssignmentDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasePrincipalAssignmentsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoDatabasePrincipalAssignmentID(d.Id()) + id, err := parse.DatabasePrincipalAssignmentID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Cluster, id.Database, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.PrincipalAssignmentName) if err != nil { - return fmt.Errorf("Error deleting Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.Cluster, id.Database, err) + return fmt.Errorf("Error deleting Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q): %+v", id.PrincipalAssignmentName, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.Cluster, id.Database, err) + return fmt.Errorf("Error waiting for deletion of Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q): %+v", id.PrincipalAssignmentName, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } return nil diff --git a/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource_test.go b/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource_test.go new file mode 100644 index 000000000000..e454cff4e6a0 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_database_principal_assignment_resource_test.go @@ -0,0 +1,126 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoDatabasePrincipalAssignmentResource struct { +} + +func TestAccKustoDatabasePrincipalAssignment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_database_principal_assignment", "test") + r := KustoDatabasePrincipalAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoDatabasePrincipalAssignment_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_database_principal_assignment", "test") + r := KustoDatabasePrincipalAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (KustoDatabasePrincipalAssignmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabasePrincipalAssignmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.DatabasePrincipalAssignmentsClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.PrincipalAssignmentName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.DatabasePrincipalProperties != nil), nil +} + +func (KustoDatabasePrincipalAssignmentResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-kusto-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.test.name +} + +resource "azurerm_kusto_database_principal_assignment" "test" { + name = "acctestkdpa%d" + resource_group_name = azurerm_resource_group.rg.name + cluster_name = azurerm_kusto_cluster.test.name + database_name = azurerm_kusto_database.test.name + + tenant_id = data.azurerm_client_config.current.tenant_id + principal_id = data.azurerm_client_config.current.client_id + principal_type = "App" + role = "Viewer" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (KustoDatabasePrincipalAssignmentResource) requiresImport(data acceptance.TestData) string { + template := KustoDatabasePrincipalAssignmentResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_kusto_database_principal_assignment" "import" { + name = azurerm_kusto_database_principal_assignment.test.name + resource_group_name = azurerm_kusto_database_principal_assignment.test.resource_group_name + cluster_name = azurerm_kusto_database_principal_assignment.test.cluster_name + database_name = azurerm_kusto_database_principal_assignment.test.database_name + + tenant_id = azurerm_kusto_database_principal_assignment.test.tenant_id + principal_id = azurerm_kusto_database_principal_assignment.test.principal_id + principal_type = azurerm_kusto_database_principal_assignment.test.principal_type + role = azurerm_kusto_database_principal_assignment.test.role +} +`, template) +} diff --git a/azurerm/internal/services/kusto/kusto_database_principal_resource.go b/azurerm/internal/services/kusto/kusto_database_principal_resource.go index 0a00d4a8854c..9d458804b55b 100644 --- a/azurerm/internal/services/kusto/kusto_database_principal_resource.go +++ b/azurerm/internal/services/kusto/kusto_database_principal_resource.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -17,11 +17,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoDatabasePrincipal() *schema.Resource { +func resourceKustoDatabasePrincipal() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoDatabasePrincipalCreate, - Read: resourceArmKustoDatabasePrincipalRead, - Delete: resourceArmKustoDatabasePrincipalDelete, + Create: resourceKustoDatabasePrincipalCreate, + Read: resourceKustoDatabasePrincipalRead, + Delete: resourceKustoDatabasePrincipalDelete, DeprecationMessage: "This resource has been superseded by `azurerm_kusto_database_principal_assignment` to reflects changes in the API/SDK and will be removed in version 3.0 of the provider.", @@ -118,7 +118,7 @@ func resourceArmKustoDatabasePrincipal() *schema.Resource { } } -func resourceArmKustoDatabasePrincipalCreate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabasePrincipalCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -207,32 +207,32 @@ func resourceArmKustoDatabasePrincipalCreate(d *schema.ResourceData, meta interf d.SetId(resourceID) - return resourceArmKustoDatabasePrincipalRead(d, meta) + return resourceKustoDatabasePrincipalRead(d, meta) } -func resourceArmKustoDatabasePrincipalRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabasePrincipalRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoDatabasePrincipalID(d.Id()) + id, err := parse.DatabasePrincipalID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Cluster, id.Database) + resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving Kusto Database %q (Resource Group %q, Cluster %q): %+v", id.Database, id.ResourceGroup, id.Cluster, err) + return fmt.Errorf("Error retrieving Kusto Database %q (Resource Group %q, Cluster %q): %+v", id.DatabaseName, id.ResourceGroup, id.ClusterName, err) } - databasePrincipals, err := client.ListPrincipals(ctx, id.ResourceGroup, id.Cluster, id.Database) + databasePrincipals, err := client.ListPrincipals(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName) if err != nil { if !utils.ResponseWasNotFound(databasePrincipals.Response) { - return fmt.Errorf("Error checking for presence of existing Kusto Database Principals %q (Resource Group %q, Cluster %q): %s", id, id.ResourceGroup, id.Cluster, err) + return fmt.Errorf("Error checking for presence of existing Kusto Database Principals %q (Resource Group %q, Cluster %q): %s", id, id.ResourceGroup, id.ClusterName, err) } } @@ -241,7 +241,7 @@ func resourceArmKustoDatabasePrincipalRead(d *schema.ResourceData, meta interfac if principals := databasePrincipals.Value; principals != nil { for _, currPrincipal := range *principals { // kusto database principals are unique when looked at with fqn and role - if string(currPrincipal.Role) == id.Role && currPrincipal.Fqn != nil && *currPrincipal.Fqn == id.Name { + if string(currPrincipal.Role) == id.RoleName && currPrincipal.Fqn != nil && *currPrincipal.Fqn == id.FQNName { principal = currPrincipal found = true break @@ -256,8 +256,8 @@ func resourceArmKustoDatabasePrincipalRead(d *schema.ResourceData, meta interfac } d.Set("resource_group_name", id.ResourceGroup) - d.Set("cluster_name", id.Cluster) - d.Set("database_name", id.Database) + d.Set("cluster_name", id.ClusterName) + d.Set("database_name", id.DatabaseName) d.Set("role", string(principal.Role)) d.Set("type", string(principal.Type)) @@ -274,13 +274,13 @@ func resourceArmKustoDatabasePrincipalRead(d *schema.ResourceData, meta interfac d.Set("name", principal.Name) } - splitFQN := strings.Split(id.Name, "=") + splitFQN := strings.Split(id.FQNName, "=") if len(splitFQN) != 2 { - return fmt.Errorf("Expected `fqn` to be in the format aadtype=objectid:clientid but got: %q", id.Name) + return fmt.Errorf("Expected `fqn` to be in the format aadtype=objectid:clientid but got: %q", id.FQNName) } splitIDs := strings.Split(splitFQN[1], ";") if len(splitIDs) != 2 { - return fmt.Errorf("Expected `fqn` to be in the format aadtype=objectid:clientid but got: %q", id.Name) + return fmt.Errorf("Expected `fqn` to be in the format aadtype=objectid:clientid but got: %q", id.FQNName) } d.Set("object_id", splitIDs[0]) d.Set("client_id", splitIDs[1]) @@ -288,19 +288,19 @@ func resourceArmKustoDatabasePrincipalRead(d *schema.ResourceData, meta interfac return nil } -func resourceArmKustoDatabasePrincipalDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabasePrincipalDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoDatabasePrincipalID(d.Id()) + id, err := parse.DatabasePrincipalID(d.Id()) if err != nil { return err } kustoPrincipal := kusto.DatabasePrincipal{ - Role: kusto.DatabasePrincipalRole(id.Role), - Fqn: utils.String(id.Name), + Role: kusto.DatabasePrincipalRole(id.RoleName), + Fqn: utils.String(id.FQNName), Type: kusto.DatabasePrincipalType(d.Get("type").(string)), // These three must be specified or the api returns `The request is invalid.` // For more info: https://github.com/Azure/azure-sdk-for-go/issues/6547 @@ -314,8 +314,8 @@ func resourceArmKustoDatabasePrincipalDelete(d *schema.ResourceData, meta interf Value: &principals, } - if _, err = client.RemovePrincipals(ctx, id.ResourceGroup, id.Cluster, id.Database, request); err != nil { - return fmt.Errorf("Error deleting Kusto Database Principal %q (Resource Group %q, Cluster %q, Database %q): %+v", id, id.ResourceGroup, id.Cluster, id.Database, err) + if _, err = client.RemovePrincipals(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, request); err != nil { + return fmt.Errorf("Error deleting Kusto Database Principal %q (Resource Group %q, Cluster %q, Database %q): %+v", id, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } return nil diff --git a/azurerm/internal/services/kusto/kusto_database_principal_resource_test.go b/azurerm/internal/services/kusto/kusto_database_principal_resource_test.go new file mode 100644 index 000000000000..db41c9762e54 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_database_principal_resource_test.go @@ -0,0 +1,107 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoDatabasePrincipalResource struct { +} + +func TestAccKustoDatabasePrincipal_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_database_principal", "test") + r := KustoDatabasePrincipalResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (KustoDatabasePrincipalResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + client := clients.Kusto.DatabasesClient + id, err := parse.DatabasePrincipalID(state.ID) + if err != nil { + return nil, err + } + + if _, err = client.Get(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName); err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + databasePrincipals, err := client.ListPrincipals(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName) + if err != nil { + if !utils.ResponseWasNotFound(databasePrincipals.Response) { + return nil, fmt.Errorf("retrieving principals for %s: %v", id.String(), err) + } + } + + if principals := databasePrincipals.Value; principals != nil { + for _, currPrincipal := range *principals { + // kusto database principals are unique when looked at with fqn and role + if string(currPrincipal.Role) == id.RoleName && currPrincipal.Fqn != nil && *currPrincipal.Fqn == id.FQNName { + return utils.Bool(true), nil + } + } + } + return utils.Bool(false), nil +} + +func (KustoDatabasePrincipalResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-kusto-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster.name +} + +resource "azurerm_kusto_database_principal" "test" { + resource_group_name = azurerm_resource_group.rg.name + cluster_name = azurerm_kusto_cluster.cluster.name + database_name = azurerm_kusto_database.test.name + + role = "Viewer" + type = "App" + client_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.client_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} diff --git a/azurerm/internal/services/kusto/kusto_database_resource.go b/azurerm/internal/services/kusto/kusto_database_resource.go index 16ce74612122..04b4997c681d 100644 --- a/azurerm/internal/services/kusto/kusto_database_resource.go +++ b/azurerm/internal/services/kusto/kusto_database_resource.go @@ -6,7 +6,7 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoDatabase() *schema.Resource { +func resourceKustoDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoDatabaseCreateUpdate, - Read: resourceArmKustoDatabaseRead, - Update: resourceArmKustoDatabaseCreateUpdate, - Delete: resourceArmKustoDatabaseDelete, + Create: resourceKustoDatabaseCreateUpdate, + Read: resourceKustoDatabaseRead, + Update: resourceKustoDatabaseCreateUpdate, + Delete: resourceKustoDatabaseDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -74,7 +74,7 @@ func resourceArmKustoDatabase() *schema.Resource { } } -func resourceArmKustoDatabaseCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabaseCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -142,40 +142,40 @@ func resourceArmKustoDatabaseCreateUpdate(d *schema.ResourceData, meta interface d.SetId(*database.ID) - return resourceArmKustoDatabaseRead(d, meta) + return resourceKustoDatabaseRead(d, meta) } -func resourceArmKustoDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoDatabaseID(d.Id()) + id, err := parse.DatabaseID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Cluster, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving Kusto Database %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.Cluster, err) + return fmt.Errorf("Error retrieving Kusto Database %q (Resource Group %q, Cluster %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, err) } if resp.Value == nil { - return fmt.Errorf("Error retrieving Kusto Database %q (Resource Group %q, Cluster %q): Invalid resource response", id.Name, id.ResourceGroup, id.Cluster) + return fmt.Errorf("Error retrieving Kusto Database %q (Resource Group %q, Cluster %q): Invalid resource response", id.Name, id.ResourceGroup, id.ClusterName) } database, ok := resp.Value.AsReadWriteDatabase() if !ok { - return fmt.Errorf("Existing resource is not a Read/Write Database (Resource Group %q, Cluster %q): %q", id.ResourceGroup, id.Cluster, id.Name) + return fmt.Errorf("Existing resource is not a Read/Write Database (Resource Group %q, Cluster %q): %q", id.ResourceGroup, id.ClusterName, id.Name) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("cluster_name", id.Cluster) + d.Set("cluster_name", id.ClusterName) if location := database.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) @@ -193,27 +193,23 @@ func resourceArmKustoDatabaseRead(d *schema.ResourceData, meta interface{}) erro return nil } -func resourceArmKustoDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DatabasesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - clusterName := id.Path["Clusters"] - name := id.Path["Databases"] - - future, err := client.Delete(ctx, resGroup, clusterName, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.Name) if err != nil { - return fmt.Errorf("Error deleting Kusto Database %q (Resource Group %q, Cluster %q): %+v", name, resGroup, clusterName, err) + return fmt.Errorf("deleting %s: %+v", id, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Kusto Database %q (Resource Group %q, Cluster %q): %+v", name, resGroup, clusterName, err) + return fmt.Errorf("waiting for deletion of %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/kusto/kusto_database_resource_test.go b/azurerm/internal/services/kusto/kusto_database_resource_test.go new file mode 100644 index 000000000000..bdab01940502 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_database_resource_test.go @@ -0,0 +1,259 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoDatabaseResource struct { +} + +func TestAccKustoDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_database", "test") + r := KustoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccKustoDatabase_softDeletePeriod(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_database", "test") + r := KustoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.softDeletePeriod(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_period").HasValue("P7D"), + ), + }, + { + Config: r.softDeletePeriodUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("soft_delete_period").HasValue("P31D"), + ), + }, + }) +} + +func TestAccKustoDatabase_hotCachePeriod(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_database", "test") + r := KustoDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hotCachePeriod(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("hot_cache_period").HasValue("P7D"), + ), + }, + { + Config: r.hotCachePeriodUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("hot_cache_period").HasValue("P14DT12H"), + ), + }, + }) +} + +func (KustoDatabaseResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (KustoDatabaseResource) softDeletePeriod(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster.name + + soft_delete_period = "P7D" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (KustoDatabaseResource) softDeletePeriodUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster.name + + soft_delete_period = "P31D" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (KustoDatabaseResource) hotCachePeriod(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster.name + + hot_cache_period = "P7D" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (KustoDatabaseResource) hotCachePeriodUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "rg" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "cluster" { + name = "acctestkc%s" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.rg.name + location = azurerm_resource_group.rg.location + cluster_name = azurerm_kusto_cluster.cluster.name + + hot_cache_period = "P14DT12H" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (KustoDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabaseID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.DatabasesClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + value, ok := resp.Value.AsReadWriteDatabase() + if !ok { + return nil, fmt.Errorf("%s is not a ReadWriteDatabase", id.String()) + } + + return utils.Bool(value.ReadWriteDatabaseProperties != nil), nil +} diff --git a/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource.go b/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource.go index 0737b046fb88..9aa1837c0aa6 100644 --- a/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource.go +++ b/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource.go @@ -6,7 +6,7 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-02-15/kusto" + "github.com/Azure/azure-sdk-for-go/services/kusto/mgmt/2020-09-18/kusto" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmKustoEventHubDataConnection() *schema.Resource { +func resourceKustoEventHubDataConnection() *schema.Resource { return &schema.Resource{ - Create: resourceArmKustoEventHubDataConnectionCreateUpdate, - Read: resourceArmKustoEventHubDataConnectionRead, - Update: resourceArmKustoEventHubDataConnectionCreateUpdate, - Delete: resourceArmKustoEventHubDataConnectionDelete, + Create: resourceKustoEventHubDataConnectionCreateUpdate, + Read: resourceKustoEventHubDataConnectionRead, + Update: resourceKustoEventHubDataConnectionCreateUpdate, + Delete: resourceKustoEventHubDataConnectionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -55,6 +55,17 @@ func resourceArmKustoEventHubDataConnection() *schema.Resource { ValidateFunc: validateAzureRMKustoClusterName, }, + "compression": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: kusto.CompressionNone, + ValidateFunc: validation.StringInSlice([]string{ + string(kusto.CompressionGZip), + string(kusto.CompressionNone), + }, false), + }, + "database_name": { Type: schema.TypeString, Required: true, @@ -120,7 +131,7 @@ func resourceArmKustoEventHubDataConnection() *schema.Resource { } } -func resourceArmKustoEventHubDataConnectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceKustoEventHubDataConnectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DataConnectionsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -180,33 +191,32 @@ func resourceArmKustoEventHubDataConnectionCreateUpdate(d *schema.ResourceData, d.SetId(*dataConnection.ID) } - return resourceArmKustoEventHubDataConnectionRead(d, meta) + return resourceKustoEventHubDataConnectionRead(d, meta) } -func resourceArmKustoEventHubDataConnectionRead(d *schema.ResourceData, meta interface{}) error { +func resourceKustoEventHubDataConnectionRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DataConnectionsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoEventHubDataConnectionID(d.Id()) + id, err := parse.DataConnectionID(d.Id()) if err != nil { return err } - connectionModel, err := client.Get(ctx, id.ResourceGroup, id.Cluster, id.Database, id.Name) - + connectionModel, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.Name) if err != nil { if utils.ResponseWasNotFound(connectionModel.Response) { d.SetId("") return nil } - return fmt.Errorf("Error retrieving Kusto Event Hub Data Connection %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.Cluster, id.Database, err) + return fmt.Errorf("Error retrieving Kusto Event Hub Data Connection %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("cluster_name", id.Cluster) - d.Set("database_name", id.Database) + d.Set("cluster_name", id.ClusterName) + d.Set("database_name", id.DatabaseName) if dataConnection, ok := connectionModel.Value.(kusto.EventHubDataConnection); ok { if location := dataConnection.Location; location != nil { @@ -219,29 +229,30 @@ func resourceArmKustoEventHubDataConnectionRead(d *schema.ResourceData, meta int d.Set("table_name", props.TableName) d.Set("mapping_rule_name", props.MappingRuleName) d.Set("data_format", props.DataFormat) + d.Set("compression", props.Compression) } } return nil } -func resourceArmKustoEventHubDataConnectionDelete(d *schema.ResourceData, meta interface{}) error { +func resourceKustoEventHubDataConnectionDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Kusto.DataConnectionsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.KustoEventHubDataConnectionID(d.Id()) + id, err := parse.DataConnectionID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Cluster, id.Database, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.Name) if err != nil { - return fmt.Errorf("Error deleting Kusto Event Hub Data Connection %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.Cluster, id.Database, err) + return fmt.Errorf("Error deleting Kusto Event Hub Data Connection %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of Kusto Event Hub Data Connection %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.Cluster, id.Database, err) + return fmt.Errorf("Error waiting for deletion of Kusto Event Hub Data Connection %q (Resource Group %q, Cluster %q, Database %q): %+v", id.Name, id.ResourceGroup, id.ClusterName, id.DatabaseName, err) } return nil @@ -306,5 +317,9 @@ func expandKustoEventHubDataConnectionProperties(d *schema.ResourceData) *kusto. eventHubConnectionProperties.DataFormat = kusto.EventHubDataFormat(df.(string)) } + if compression, ok := d.GetOk("compression"); ok { + eventHubConnectionProperties.Compression = kusto.Compression(compression.(string)) + } + return eventHubConnectionProperties } diff --git a/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource_test.go b/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource_test.go new file mode 100644 index 000000000000..d67bcd5feca7 --- /dev/null +++ b/azurerm/internal/services/kusto/kusto_eventhub_data_connection_resource_test.go @@ -0,0 +1,116 @@ +package kusto_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type KustoEventHubDataConnectionResource struct { +} + +func TestAccKustoEventHubDataConnection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_kusto_eventhub_data_connection", "test") + r := KustoEventHubDataConnectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (KustoEventHubDataConnectionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_kusto_cluster" "test" { + name = "acctestkc%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku { + name = "Dev(No SLA)_Standard_D11_v2" + capacity = 1 + } +} + +resource "azurerm_kusto_database" "test" { + name = "acctestkd-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_name = azurerm_kusto_cluster.test.name +} + +resource "azurerm_eventhub_namespace" "test" { + name = "acctesteventhubnamespace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" +} + +resource "azurerm_eventhub" "test" { + name = "acctesteventhub-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 1 + message_retention = 1 +} + +resource "azurerm_eventhub_consumer_group" "test" { + name = "acctesteventhubcg-%d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_kusto_eventhub_data_connection" "test" { + name = "acctestkedc-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + cluster_name = azurerm_kusto_cluster.test.name + database_name = azurerm_kusto_database.test.name + + eventhub_id = azurerm_eventhub.test.id + consumer_group = azurerm_eventhub_consumer_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (KustoEventHubDataConnectionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DataConnectionID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Kusto.DataConnectionsClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + value, ok := resp.Value.AsEventHubDataConnection() + if !ok { + return nil, fmt.Errorf("%s is not an EventHubDataConnection", id.String()) + } + + return utils.Bool(value.EventHubConnectionProperties != nil), nil +} diff --git a/azurerm/internal/services/kusto/parse/attached_database_configuration.go b/azurerm/internal/services/kusto/parse/attached_database_configuration.go new file mode 100644 index 000000000000..6740eeb298b1 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/attached_database_configuration.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AttachedDatabaseConfigurationId struct { + SubscriptionId string + ResourceGroup string + ClusterName string + Name string +} + +func NewAttachedDatabaseConfigurationID(subscriptionId, resourceGroup, clusterName, name string) AttachedDatabaseConfigurationId { + return AttachedDatabaseConfigurationId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + Name: name, + } +} + +func (id AttachedDatabaseConfigurationId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Attached Database Configuration", segmentsStr) +} + +func (id AttachedDatabaseConfigurationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s/AttachedDatabaseConfigurations/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.Name) +} + +// AttachedDatabaseConfigurationID parses a AttachedDatabaseConfiguration ID into an AttachedDatabaseConfigurationId struct +func AttachedDatabaseConfigurationID(input string) (*AttachedDatabaseConfigurationId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AttachedDatabaseConfigurationId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("AttachedDatabaseConfigurations"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/attached_database_configuration_test.go b/azurerm/internal/services/kusto/parse/attached_database_configuration_test.go new file mode 100644 index 000000000000..90651c5ab78a --- /dev/null +++ b/azurerm/internal/services/kusto/parse/attached_database_configuration_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AttachedDatabaseConfigurationId{} + +func TestAttachedDatabaseConfigurationIDFormatter(t *testing.T) { + actual := NewAttachedDatabaseConfigurationID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "config1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/config1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAttachedDatabaseConfigurationID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AttachedDatabaseConfigurationId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/config1", + Expected: &AttachedDatabaseConfigurationId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + Name: "config1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/ATTACHEDDATABASECONFIGURATIONS/CONFIG1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AttachedDatabaseConfigurationID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/cluster.go b/azurerm/internal/services/kusto/parse/cluster.go new file mode 100644 index 000000000000..2b61fd9f2b7e --- /dev/null +++ b/azurerm/internal/services/kusto/parse/cluster.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ClusterId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewClusterID(subscriptionId, resourceGroup, name string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ClusterId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) +} + +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ClusterID parses a Cluster ID into an ClusterId struct +func ClusterID(input string) (*ClusterId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ClusterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/cluster_principal_assignment.go b/azurerm/internal/services/kusto/parse/cluster_principal_assignment.go new file mode 100644 index 000000000000..0d86b9624739 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/cluster_principal_assignment.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ClusterPrincipalAssignmentId struct { + SubscriptionId string + ResourceGroup string + ClusterName string + PrincipalAssignmentName string +} + +func NewClusterPrincipalAssignmentID(subscriptionId, resourceGroup, clusterName, principalAssignmentName string) ClusterPrincipalAssignmentId { + return ClusterPrincipalAssignmentId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + PrincipalAssignmentName: principalAssignmentName, + } +} + +func (id ClusterPrincipalAssignmentId) String() string { + segments := []string{ + fmt.Sprintf("Principal Assignment Name %q", id.PrincipalAssignmentName), + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cluster Principal Assignment", segmentsStr) +} + +func (id ClusterPrincipalAssignmentId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s/PrincipalAssignments/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.PrincipalAssignmentName) +} + +// ClusterPrincipalAssignmentID parses a ClusterPrincipalAssignment ID into an ClusterPrincipalAssignmentId struct +func ClusterPrincipalAssignmentID(input string) (*ClusterPrincipalAssignmentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ClusterPrincipalAssignmentId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + if resourceId.PrincipalAssignmentName, err = id.PopSegment("PrincipalAssignments"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/cluster_principal_assignment_test.go b/azurerm/internal/services/kusto/parse/cluster_principal_assignment_test.go new file mode 100644 index 000000000000..1a0cf3f5d361 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/cluster_principal_assignment_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ClusterPrincipalAssignmentId{} + +func TestClusterPrincipalAssignmentIDFormatter(t *testing.T) { + actual := NewClusterPrincipalAssignmentID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "assignment1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/assignment1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestClusterPrincipalAssignmentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ClusterPrincipalAssignmentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // missing PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Error: true, + }, + + { + // missing value for PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/assignment1", + Expected: &ClusterPrincipalAssignmentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + PrincipalAssignmentName: "assignment1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/PRINCIPALASSIGNMENTS/ASSIGNMENT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ClusterPrincipalAssignmentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + if actual.PrincipalAssignmentName != v.Expected.PrincipalAssignmentName { + t.Fatalf("Expected %q but got %q for PrincipalAssignmentName", v.Expected.PrincipalAssignmentName, actual.PrincipalAssignmentName) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/cluster_test.go b/azurerm/internal/services/kusto/parse/cluster_test.go new file mode 100644 index 000000000000..1185cb02e221 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/cluster_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ClusterId{} + +func TestClusterIDFormatter(t *testing.T) { + actual := NewClusterID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestClusterID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ClusterId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1", + Expected: &ClusterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "cluster1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ClusterID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/data_connection.go b/azurerm/internal/services/kusto/parse/data_connection.go new file mode 100644 index 000000000000..f15eb1d35842 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/data_connection.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DataConnectionId struct { + SubscriptionId string + ResourceGroup string + ClusterName string + DatabaseName string + Name string +} + +func NewDataConnectionID(subscriptionId, resourceGroup, clusterName, databaseName, name string) DataConnectionId { + return DataConnectionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + DatabaseName: databaseName, + Name: name, + } +} + +func (id DataConnectionId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Database Name %q", id.DatabaseName), + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Data Connection", segmentsStr) +} + +func (id DataConnectionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s/Databases/%s/DataConnections/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.Name) +} + +// DataConnectionID parses a DataConnection ID into an DataConnectionId struct +func DataConnectionID(input string) (*DataConnectionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DataConnectionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + if resourceId.DatabaseName, err = id.PopSegment("Databases"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("DataConnections"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/data_connection_test.go b/azurerm/internal/services/kusto/parse/data_connection_test.go new file mode 100644 index 000000000000..011b410601ab --- /dev/null +++ b/azurerm/internal/services/kusto/parse/data_connection_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DataConnectionId{} + +func TestDataConnectionIDFormatter(t *testing.T) { + actual := NewDataConnectionID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "database1", "connection1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/connection1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDataConnectionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DataConnectionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Error: true, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/connection1", + Expected: &DataConnectionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + DatabaseName: "database1", + Name: "connection1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1/DATACONNECTIONS/CONNECTION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DataConnectionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + if actual.DatabaseName != v.Expected.DatabaseName { + t.Fatalf("Expected %q but got %q for DatabaseName", v.Expected.DatabaseName, actual.DatabaseName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/database.go b/azurerm/internal/services/kusto/parse/database.go new file mode 100644 index 000000000000..a9040ade4283 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/database.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabaseId struct { + SubscriptionId string + ResourceGroup string + ClusterName string + Name string +} + +func NewDatabaseID(subscriptionId, resourceGroup, clusterName, name string) DatabaseId { + return DatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + Name: name, + } +} + +func (id DatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database", segmentsStr) +} + +func (id DatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s/Databases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.Name) +} + +// DatabaseID parses a Database ID into an DatabaseId struct +func DatabaseID(input string) (*DatabaseId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("Databases"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/database_principal.go b/azurerm/internal/services/kusto/parse/database_principal.go new file mode 100644 index 000000000000..b76efaf687d5 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/database_principal.go @@ -0,0 +1,87 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabasePrincipalId struct { + SubscriptionId string + ResourceGroup string + ClusterName string + DatabaseName string + RoleName string + FQNName string +} + +func NewDatabasePrincipalID(subscriptionId, resourceGroup, clusterName, databaseName, roleName, fQNName string) DatabasePrincipalId { + return DatabasePrincipalId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + DatabaseName: databaseName, + RoleName: roleName, + FQNName: fQNName, + } +} + +func (id DatabasePrincipalId) String() string { + segments := []string{ + fmt.Sprintf("F Q N Name %q", id.FQNName), + fmt.Sprintf("Role Name %q", id.RoleName), + fmt.Sprintf("Database Name %q", id.DatabaseName), + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database Principal", segmentsStr) +} + +func (id DatabasePrincipalId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s/Databases/%s/Role/%s/FQN/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.RoleName, id.FQNName) +} + +// DatabasePrincipalID parses a DatabasePrincipal ID into an DatabasePrincipalId struct +func DatabasePrincipalID(input string) (*DatabasePrincipalId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabasePrincipalId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + if resourceId.DatabaseName, err = id.PopSegment("Databases"); err != nil { + return nil, err + } + if resourceId.RoleName, err = id.PopSegment("Role"); err != nil { + return nil, err + } + if resourceId.FQNName, err = id.PopSegment("FQN"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/database_principal_assignment.go b/azurerm/internal/services/kusto/parse/database_principal_assignment.go new file mode 100644 index 000000000000..dbda522361aa --- /dev/null +++ b/azurerm/internal/services/kusto/parse/database_principal_assignment.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabasePrincipalAssignmentId struct { + SubscriptionId string + ResourceGroup string + ClusterName string + DatabaseName string + PrincipalAssignmentName string +} + +func NewDatabasePrincipalAssignmentID(subscriptionId, resourceGroup, clusterName, databaseName, principalAssignmentName string) DatabasePrincipalAssignmentId { + return DatabasePrincipalAssignmentId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + DatabaseName: databaseName, + PrincipalAssignmentName: principalAssignmentName, + } +} + +func (id DatabasePrincipalAssignmentId) String() string { + segments := []string{ + fmt.Sprintf("Principal Assignment Name %q", id.PrincipalAssignmentName), + fmt.Sprintf("Database Name %q", id.DatabaseName), + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database Principal Assignment", segmentsStr) +} + +func (id DatabasePrincipalAssignmentId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Kusto/Clusters/%s/Databases/%s/PrincipalAssignments/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.DatabaseName, id.PrincipalAssignmentName) +} + +// DatabasePrincipalAssignmentID parses a DatabasePrincipalAssignment ID into an DatabasePrincipalAssignmentId struct +func DatabasePrincipalAssignmentID(input string) (*DatabasePrincipalAssignmentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabasePrincipalAssignmentId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("Clusters"); err != nil { + return nil, err + } + if resourceId.DatabaseName, err = id.PopSegment("Databases"); err != nil { + return nil, err + } + if resourceId.PrincipalAssignmentName, err = id.PopSegment("PrincipalAssignments"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/kusto/parse/database_principal_assignment_test.go b/azurerm/internal/services/kusto/parse/database_principal_assignment_test.go new file mode 100644 index 000000000000..9408c758aabd --- /dev/null +++ b/azurerm/internal/services/kusto/parse/database_principal_assignment_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabasePrincipalAssignmentId{} + +func TestDatabasePrincipalAssignmentIDFormatter(t *testing.T) { + actual := NewDatabasePrincipalAssignmentID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "database1", "assignment1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/assignment1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabasePrincipalAssignmentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabasePrincipalAssignmentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Error: true, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Error: true, + }, + + { + // missing PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/", + Error: true, + }, + + { + // missing value for PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/assignment1", + Expected: &DatabasePrincipalAssignmentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + DatabaseName: "database1", + PrincipalAssignmentName: "assignment1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1/PRINCIPALASSIGNMENTS/ASSIGNMENT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabasePrincipalAssignmentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + if actual.DatabaseName != v.Expected.DatabaseName { + t.Fatalf("Expected %q but got %q for DatabaseName", v.Expected.DatabaseName, actual.DatabaseName) + } + if actual.PrincipalAssignmentName != v.Expected.PrincipalAssignmentName { + t.Fatalf("Expected %q but got %q for PrincipalAssignmentName", v.Expected.PrincipalAssignmentName, actual.PrincipalAssignmentName) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/database_principal_test.go b/azurerm/internal/services/kusto/parse/database_principal_test.go new file mode 100644 index 000000000000..1c1f96e4121d --- /dev/null +++ b/azurerm/internal/services/kusto/parse/database_principal_test.go @@ -0,0 +1,160 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabasePrincipalId{} + +func TestDatabasePrincipalIDFormatter(t *testing.T) { + actual := NewDatabasePrincipalID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "database1", "Viewer", "aaduser=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/aaduser=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabasePrincipalID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabasePrincipalId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Error: true, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Error: true, + }, + + { + // missing RoleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/", + Error: true, + }, + + { + // missing value for RoleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/", + Error: true, + }, + + { + // missing FQNName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/", + Error: true, + }, + + { + // missing value for FQNName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/aaduser=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222", + Expected: &DatabasePrincipalId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + DatabaseName: "database1", + RoleName: "Viewer", + FQNName: "aaduser=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1/ROLE/VIEWER/FQN/AADUSER=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabasePrincipalID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + if actual.DatabaseName != v.Expected.DatabaseName { + t.Fatalf("Expected %q but got %q for DatabaseName", v.Expected.DatabaseName, actual.DatabaseName) + } + if actual.RoleName != v.Expected.RoleName { + t.Fatalf("Expected %q but got %q for RoleName", v.Expected.RoleName, actual.RoleName) + } + if actual.FQNName != v.Expected.FQNName { + t.Fatalf("Expected %q but got %q for FQNName", v.Expected.FQNName, actual.FQNName) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/database_test.go b/azurerm/internal/services/kusto/parse/database_test.go new file mode 100644 index 000000000000..2f6d16ffb535 --- /dev/null +++ b/azurerm/internal/services/kusto/parse/database_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabaseId{} + +func TestDatabaseIDFormatter(t *testing.T) { + actual := NewDatabaseID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "database1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabaseID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabaseId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1", + Expected: &DatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + Name: "database1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabaseID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/kusto/parse/kusto_attached_database_configuration.go b/azurerm/internal/services/kusto/parse/kusto_attached_database_configuration.go deleted file mode 100644 index abcf8fe0978b..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_attached_database_configuration.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoAttachedDatabaseConfigurationId struct { - ResourceGroup string - Cluster string - Name string -} - -func KustoAttachedDatabaseConfigurationID(input string) (*KustoAttachedDatabaseConfigurationId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Attached Database Configuration ID %q: %+v", input, err) - } - - configuration := KustoAttachedDatabaseConfigurationId{ - ResourceGroup: id.ResourceGroup, - } - - if configuration.Cluster, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if configuration.Name, err = id.PopSegment("AttachedDatabaseConfigurations"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &configuration, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_attached_database_configuration_test.go b/azurerm/internal/services/kusto/parse/kusto_attached_database_configuration_test.go deleted file mode 100644 index c7c52b251d00..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_attached_database_configuration_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoAttachedDatabaseConfigurationId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoAttachedDatabaseConfigurationId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Cluster", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/AttachedDatabaseConfigurations/configuration1", - Expected: nil, - }, - { - Name: "Kusto Attached Database Configuration ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/configuration1", - Expected: &KustoAttachedDatabaseConfigurationId{ - Name: "configuration1", - Cluster: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoAttachedDatabaseConfigurationID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/parse/kusto_cluster.go b/azurerm/internal/services/kusto/parse/kusto_cluster.go deleted file mode 100644 index b52c7b6d0542..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_cluster.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoClusterId struct { - ResourceGroup string - Name string -} - -func KustoClusterID(input string) (*KustoClusterId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Cluster ID %q: %+v", input, err) - } - - cluster := KustoClusterId{ - ResourceGroup: id.ResourceGroup, - } - - if cluster.Name, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &cluster, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_cluster_principal_assignment.go b/azurerm/internal/services/kusto/parse/kusto_cluster_principal_assignment.go deleted file mode 100644 index 594e61510840..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_cluster_principal_assignment.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoClusterPrincipalAssignmentId struct { - ResourceGroup string - Cluster string - Name string -} - -func KustoClusterPrincipalAssignmentID(input string) (*KustoClusterPrincipalAssignmentId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Cluster Principal ID %q: %+v", input, err) - } - - principal := KustoClusterPrincipalAssignmentId{ - ResourceGroup: id.ResourceGroup, - } - - if principal.Cluster, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if principal.Name, err = id.PopSegment("PrincipalAssignments"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &principal, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_cluster_principal_assignment_test.go b/azurerm/internal/services/kusto/parse/kusto_cluster_principal_assignment_test.go deleted file mode 100644 index 0f2bd16df5ca..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_cluster_principal_assignment_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoClusterPrincipalAssignmentId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoClusterPrincipalAssignmentId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing Cluster", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/PrincipalAssignments/assignment1", - Expected: nil, - }, - { - Name: "Missing PrincipalAssignment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/", - Expected: nil, - }, - { - Name: "Cluster Principal Assignment ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/assignment1", - Expected: &KustoClusterPrincipalAssignmentId{ - Name: "assignment1", - Cluster: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoClusterPrincipalAssignmentID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.Cluster != v.Expected.Cluster { - t.Fatalf("Expected %q but got %q for Cluster", v.Expected.Cluster, actual.Cluster) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/parse/kusto_cluster_test.go b/azurerm/internal/services/kusto/parse/kusto_cluster_test.go deleted file mode 100644 index 71942e1aa348..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_cluster_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoClusterId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoClusterId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing Cluster", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/", - Expected: nil, - }, - { - Name: "Cluster ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1", - Expected: &KustoClusterId{ - Name: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoClusterID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/parse/kusto_database.go b/azurerm/internal/services/kusto/parse/kusto_database.go deleted file mode 100644 index 4cd6ad80db94..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_database.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoDatabaseId struct { - ResourceGroup string - Cluster string - Name string -} - -func KustoDatabaseID(input string) (*KustoDatabaseId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Database ID %q: %+v", input, err) - } - - database := KustoDatabaseId{ - ResourceGroup: id.ResourceGroup, - } - - if database.Cluster, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if database.Name, err = id.PopSegment("Databases"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &database, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_database_principal.go b/azurerm/internal/services/kusto/parse/kusto_database_principal.go deleted file mode 100644 index b4209829006f..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_database_principal.go +++ /dev/null @@ -1,48 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoDatabasePrincipalId struct { - ResourceGroup string - Cluster string - Database string - Role string - Name string -} - -func KustoDatabasePrincipalID(input string) (*KustoDatabasePrincipalId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Database Principal ID %q: %+v", input, err) - } - - principal := KustoDatabasePrincipalId{ - ResourceGroup: id.ResourceGroup, - } - - if principal.Cluster, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if principal.Database, err = id.PopSegment("Databases"); err != nil { - return nil, err - } - - if principal.Role, err = id.PopSegment("Role"); err != nil { - return nil, err - } - - if principal.Name, err = id.PopSegment("FQN"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &principal, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_database_principal_assignment.go b/azurerm/internal/services/kusto/parse/kusto_database_principal_assignment.go deleted file mode 100644 index a3d063a3c11e..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_database_principal_assignment.go +++ /dev/null @@ -1,43 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoDatabasePrincipalAssignmentId struct { - ResourceGroup string - Cluster string - Database string - Name string -} - -func KustoDatabasePrincipalAssignmentID(input string) (*KustoDatabasePrincipalAssignmentId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Database Principal ID %q: %+v", input, err) - } - - principal := KustoDatabasePrincipalAssignmentId{ - ResourceGroup: id.ResourceGroup, - } - - if principal.Cluster, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if principal.Database, err = id.PopSegment("Databases"); err != nil { - return nil, err - } - - if principal.Name, err = id.PopSegment("PrincipalAssignments"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &principal, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_database_principal_assignment_test.go b/azurerm/internal/services/kusto/parse/kusto_database_principal_assignment_test.go deleted file mode 100644 index 85b3589986d2..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_database_principal_assignment_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoDatabasePrincipalAssignmentId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoDatabasePrincipalAssignmentId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing Cluster", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Databases/database1/PrincipalAssignments/assignment1", - Expected: nil, - }, - { - Name: "Missing Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/assignment1", - Expected: nil, - }, - { - Name: "Database Principal Assignment ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/assignment1", - Expected: &KustoDatabasePrincipalAssignmentId{ - Name: "assignment1", - Database: "database1", - Cluster: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoDatabasePrincipalAssignmentID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.Database != v.Expected.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expected.Database, actual.Database) - } - - if actual.Cluster != v.Expected.Cluster { - t.Fatalf("Expected %q but got %q for Cluster", v.Expected.Cluster, actual.Cluster) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/parse/kusto_database_principal_test.go b/azurerm/internal/services/kusto/parse/kusto_database_principal_test.go deleted file mode 100644 index 05545c0ac856..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_database_principal_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoDatabasePrincipalId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoDatabasePrincipalId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing FQN", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Databases/database1", - Expected: nil, - }, - { - Name: "Missing Role", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/FQN/aaduser=;", - Expected: nil, - }, - { - Name: "Database Principal ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/aaduser=00000000-0000-0000-0000-000000000000;00000000-0000-0000-0000-000000000000", - Expected: &KustoDatabasePrincipalId{ - Name: "aaduser=00000000-0000-0000-0000-000000000000;00000000-0000-0000-0000-000000000000", - Role: "Viewer", - Database: "database1", - Cluster: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoDatabasePrincipalID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.Role != v.Expected.Role { - t.Fatalf("Expected %q but got %q for Role", v.Expected.Role, actual.Role) - } - - if actual.Database != v.Expected.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expected.Database, actual.Database) - } - - if actual.Cluster != v.Expected.Cluster { - t.Fatalf("Expected %q but got %q for Cluster", v.Expected.Cluster, actual.Cluster) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/parse/kusto_database_test.go b/azurerm/internal/services/kusto/parse/kusto_database_test.go deleted file mode 100644 index 2c2f3469b8e9..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_database_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoDatabaseId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoDatabaseId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing Cluster", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Databases/database1", - Expected: nil, - }, - { - Name: "Missing Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1", - Expected: nil, - }, - { - Name: "Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1", - Expected: &KustoDatabaseId{ - Name: "database1", - Cluster: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoDatabaseID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.Cluster != v.Expected.Cluster { - t.Fatalf("Expected %q but got %q for Cluster", v.Expected.Cluster, actual.Cluster) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/parse/kusto_eventhub_data_connection.go b/azurerm/internal/services/kusto/parse/kusto_eventhub_data_connection.go deleted file mode 100644 index 6121c643fc9a..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_eventhub_data_connection.go +++ /dev/null @@ -1,43 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type KustoEventHubDataConnectionId struct { - ResourceGroup string - Cluster string - Database string - Name string -} - -func KustoEventHubDataConnectionID(input string) (*KustoEventHubDataConnectionId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Kusto Event Hub Data Connection ID %q: %+v", input, err) - } - - dataConnection := KustoEventHubDataConnectionId{ - ResourceGroup: id.ResourceGroup, - } - - if dataConnection.Cluster, err = id.PopSegment("Clusters"); err != nil { - return nil, err - } - - if dataConnection.Database, err = id.PopSegment("Databases"); err != nil { - return nil, err - } - - if dataConnection.Name, err = id.PopSegment("DataConnections"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &dataConnection, nil -} diff --git a/azurerm/internal/services/kusto/parse/kusto_eventhub_data_connection_test.go b/azurerm/internal/services/kusto/parse/kusto_eventhub_data_connection_test.go deleted file mode 100644 index bffcb94c599b..000000000000 --- a/azurerm/internal/services/kusto/parse/kusto_eventhub_data_connection_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestKustoEventHubDataConnectionId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *KustoEventHubDataConnectionId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "Missing Cluster", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Databases/database1", - Expected: nil, - }, - { - Name: "Missing Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1", - Expected: nil, - }, - { - Name: "Missing Data Connection", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1", - Expected: nil, - }, - { - Name: "Data Connection ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/connection1", - Expected: &KustoEventHubDataConnectionId{ - Name: "connection1", - Database: "database1", - Cluster: "cluster1", - ResourceGroup: "group1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := KustoEventHubDataConnectionID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.Database != v.Expected.Database { - t.Fatalf("Expected %q but got %q for Database", v.Expected.Database, actual.Database) - } - - if actual.Cluster != v.Expected.Cluster { - t.Fatalf("Expected %q but got %q for Cluster", v.Expected.Cluster, actual.Cluster) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/kusto/registration.go b/azurerm/internal/services/kusto/registration.go index ff958b3c6a25..4c56dc6924b9 100644 --- a/azurerm/internal/services/kusto/registration.go +++ b/azurerm/internal/services/kusto/registration.go @@ -21,20 +21,20 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_kusto_cluster": dataSourceArmKustoCluster(), + "azurerm_kusto_cluster": dataSourceKustoCluster(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_kusto_cluster": resourceArmKustoCluster(), - "azurerm_kusto_cluster_customer_managed_key": resourceArmKustoClusterCustomerManagedKey(), - "azurerm_kusto_cluster_principal_assignment": resourceArmKustoClusterPrincipalAssignment(), - "azurerm_kusto_database": resourceArmKustoDatabase(), - "azurerm_kusto_database_principal": resourceArmKustoDatabasePrincipal(), - "azurerm_kusto_database_principal_assignment": resourceArmKustoDatabasePrincipalAssignment(), - "azurerm_kusto_eventhub_data_connection": resourceArmKustoEventHubDataConnection(), - "azurerm_kusto_attached_database_configuration": resourceArmKustoAttachedDatabaseConfiguration(), + "azurerm_kusto_cluster": resourceKustoCluster(), + "azurerm_kusto_cluster_customer_managed_key": resourceKustoClusterCustomerManagedKey(), + "azurerm_kusto_cluster_principal_assignment": resourceKustoClusterPrincipalAssignment(), + "azurerm_kusto_database": resourceKustoDatabase(), + "azurerm_kusto_database_principal": resourceKustoDatabasePrincipal(), + "azurerm_kusto_database_principal_assignment": resourceKustoDatabasePrincipalAssignment(), + "azurerm_kusto_eventhub_data_connection": resourceKustoEventHubDataConnection(), + "azurerm_kusto_attached_database_configuration": resourceKustoAttachedDatabaseConfiguration(), } } diff --git a/azurerm/internal/services/kusto/resourceids.go b/azurerm/internal/services/kusto/resourceids.go new file mode 100644 index 000000000000..12c62d72e8a6 --- /dev/null +++ b/azurerm/internal/services/kusto/resourceids.go @@ -0,0 +1,9 @@ +package kusto + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=AttachedDatabaseConfiguration -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/config1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ClusterPrincipalAssignment -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/assignment1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Database -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DatabasePrincipal -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/aaduser=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DatabasePrincipalAssignment -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/assignment1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DataConnection -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/connection1 diff --git a/azurerm/internal/services/kusto/tests/kusto_attached_database_configuration_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_attached_database_configuration_resource_test.go deleted file mode 100644 index 1ea469646761..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_attached_database_configuration_resource_test.go +++ /dev/null @@ -1,145 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoAttachedDatabaseConfiguration_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_attached_database_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoAttachedDatabaseConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoAttachedDatabaseConfiguration_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoAttachedDatabaseConfigurationExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMKustoAttachedDatabaseConfiguration_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster1" { - name = "acctestkc1%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_cluster" "cluster2" { - name = "acctestkc2%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "followed_database" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster1.name -} - -resource "azurerm_kusto_attached_database_configuration" "configuration1" { - name = "acctestka-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster1.name - cluster_resource_id = azurerm_kusto_cluster.cluster2.id - database_name = "*" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMKustoAttachedDatabaseConfigurationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_attached_database_configuration" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - clusterName := rs.Primary.Attributes["cluster_name"] - name := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, clusterName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoAttachedDatabaseConfigurationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.AttachedDatabaseConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - configurationName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto Attached Database Configuration: %s", configurationName) - } - - clusterName, hasClusterName := rs.Primary.Attributes["cluster_name"] - if !hasClusterName { - return fmt.Errorf("Bad: no resource group found in state for Kusto Attached Database Configuration: %s", configurationName) - } - - resp, err := client.Get(ctx, resourceGroup, clusterName, configurationName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Attached Database Configuration %q (resource group: %q, cluster: %q) does not exist", configurationName, resourceGroup, clusterName) - } - - return fmt.Errorf("Bad: Get on AttachedDatabaseConfigurationsClient: %+v", err) - } - - return nil - } -} diff --git a/azurerm/internal/services/kusto/tests/kusto_cluster_customer_managed_key_test.go b/azurerm/internal/services/kusto/tests/kusto_cluster_customer_managed_key_test.go deleted file mode 100644 index e78cd21a8228..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_cluster_customer_managed_key_test.go +++ /dev/null @@ -1,294 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" -) - -func TestAccAzureRMKustoClusterCustomerManagedKey_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_customer_managed_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoClusterCustomerManagedKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterWithCustomerManagedKeyExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_vault_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_version"), - ), - }, - data.ImportStep(), - { - // Delete the encryption settings resource and verify it is gone - Config: testAccAzureRMKustoClusterCustomerManagedKey_template(data), - Check: resource.ComposeTestCheckFunc( - // Then ensure the encryption settings on the Kusto cluster - // have been reverted to their default state - testCheckAzureRMKustoClusterExistsWithoutCustomerManagedKey("azurerm_kusto_cluster.test"), - ), - }, - }, - }) -} - -func TestAccAzureRMKustoClusterCustomerManagedKey_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_customer_managed_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoClusterCustomerManagedKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterWithCustomerManagedKeyExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_vault_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_version"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMKustoClusterCustomerManagedKey_requiresImport), - }, - }) -} - -func TestAccAzureRMKustoClusterCustomerManagedKey_updateKey(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_customer_managed_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoClusterCustomerManagedKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterWithCustomerManagedKeyExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_vault_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "key_version"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKustoClusterCustomerManagedKey_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterWithCustomerManagedKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMKustoClusterWithCustomerManagedKeyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClustersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.KustoClusterID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Cluster %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get on kustoClustersClient: %+v", err) - } - - if props := resp.ClusterProperties; props != nil { - if encryption := props.KeyVaultProperties; encryption == nil { - return fmt.Errorf("Kusto Cluster encryption properties not found: %s", resourceName) - } - } - - return nil - } -} - -func testCheckAzureRMKustoClusterExistsWithoutCustomerManagedKey(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClustersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.KustoClusterID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Cluster %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Bad: Get on kustoClustersClient: %+v", err) - } - - if props := resp.ClusterProperties; props != nil { - if encryption := props.KeyVaultProperties; encryption != nil { - return fmt.Errorf("Kusto Cluster encryption properties still found: %s", resourceName) - } - } - - return nil - } -} - -func testAccAzureRMKustoClusterCustomerManagedKey_basic(data acceptance.TestData) string { - template := testAccAzureRMKustoClusterCustomerManagedKey_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_kusto_cluster_customer_managed_key" "test" { - cluster_id = azurerm_kusto_cluster.test.id - key_vault_id = azurerm_key_vault.test.id - key_name = azurerm_key_vault_key.first.name - key_version = azurerm_key_vault_key.first.version -} -`, template) -} - -func testAccAzureRMKustoClusterCustomerManagedKey_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKustoClusterCustomerManagedKey_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_kusto_cluster_customer_managed_key" "import" { - cluster_id = azurerm_kusto_cluster_customer_managed_key.test.cluster_id - key_vault_id = azurerm_kusto_cluster_customer_managed_key.test.key_vault_id - key_name = azurerm_kusto_cluster_customer_managed_key.test.key_name - key_version = azurerm_kusto_cluster_customer_managed_key.test.key_version -} -`, template) -} - -func testAccAzureRMKustoClusterCustomerManagedKey_updated(data acceptance.TestData) string { - template := testAccAzureRMKustoClusterCustomerManagedKey_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_key" "second" { - name = "second" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] - - depends_on = [ - azurerm_key_vault_access_policy.client, - azurerm_key_vault_access_policy.cluster, - ] -} - -resource "azurerm_kusto_cluster_customer_managed_key" "test" { - cluster_id = azurerm_kusto_cluster.test.id - key_vault_id = azurerm_key_vault.test.id - key_name = azurerm_key_vault_key.second.name - key_version = azurerm_key_vault_key.second.version -} -`, template) -} - -func testAccAzureRMKustoClusterCustomerManagedKey_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - purge_soft_delete_on_destroy = false - } - } -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "standard" - soft_delete_enabled = true - purge_protection_enabled = true -} - -resource "azurerm_key_vault_access_policy" "cluster" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = azurerm_kusto_cluster.test.identity.0.tenant_id - object_id = azurerm_kusto_cluster.test.identity.0.principal_id - - key_permissions = ["get", "unwrapkey", "wrapkey"] -} - -resource "azurerm_key_vault_access_policy" "client" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = ["get", "list", "create", "delete", "recover"] -} - -resource "azurerm_key_vault_key" "first" { - name = "test" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] - - depends_on = [ - azurerm_key_vault_access_policy.client, - azurerm_key_vault_access_policy.cluster, - ] -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) -} diff --git a/azurerm/internal/services/kusto/tests/kusto_cluster_data_source_test.go b/azurerm/internal/services/kusto/tests/kusto_cluster_data_source_test.go deleted file mode 100644 index cfc84acb348d..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_cluster_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMKustoCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMKustoCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "uri"), - resource.TestCheckResourceAttrSet(data.ResourceName, "data_ingestion_uri"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMKustoCluster_basic(data acceptance.TestData) string { - template := testAccAzureRMKustoCluster_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_kusto_cluster" "test" { - name = azurerm_kusto_cluster.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/kusto/tests/kusto_cluster_principal_assignment_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_cluster_principal_assignment_resource_test.go deleted file mode 100644 index d716ea614618..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_cluster_principal_assignment_resource_test.go +++ /dev/null @@ -1,131 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoClusterPrincipalAssignment_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster_principal_assignment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterPrincipalAssignmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoClusterPrincipalAssignment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterPrincipalAssignmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMKustoClusterPrincipalAssignmentDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClusterPrincipalAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_cluster_principal_assignment" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - clusterName := rs.Primary.Attributes["cluster_name"] - name := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, clusterName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoClusterPrincipalAssignmentExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClusterPrincipalAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto Cluster Principal Assignment: %s", name) - } - - clusterName, hasClusterName := rs.Primary.Attributes["cluster_name"] - if !hasClusterName { - return fmt.Errorf("Bad: no cluster found in state for Kusto Cluster Principal Assignment: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, clusterName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Cluster Principal Assignment %q (Resource Group %q, Cluster %q) does not exist", name, resourceGroup, clusterName) - } - - return fmt.Errorf("Bad: Get on ClusterPrincipalAssignmentsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMKustoClusterPrincipalAssignment_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-kusto-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_cluster_principal_assignment" "test" { - name = "acctestkdpa%d" - resource_group_name = azurerm_resource_group.rg.name - cluster_name = azurerm_kusto_cluster.test.name - - tenant_id = data.azurerm_client_config.current.tenant_id - principal_id = data.azurerm_client_config.current.client_id - principal_type = "App" - role = "AllDatabasesViewer" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} diff --git a/azurerm/internal/services/kusto/tests/kusto_cluster_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_cluster_resource_test.go deleted file mode 100644 index 78d8b741e8b1..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_cluster_resource_test.go +++ /dev/null @@ -1,702 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoCluster_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoCluster_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_disk_encryption", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_streaming_ingest", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_purge", "false"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKustoCluster_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_disk_encryption", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_streaming_ingest", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_purge", "true"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKustoCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "enable_disk_encryption", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_streaming_ingest", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "enable_purge", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoCluster_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.label", "test"), - ), - }, - { - Config: testAccAzureRMKustoCluster_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.label", "test1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "prod"), - ), - }, - }, - }) -} - -func TestAccAzureRMKustoCluster_sku(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "Dev(No SLA)_Standard_D11_v2"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "1"), - ), - }, - { - Config: testAccAzureRMKustoCluster_skuUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "Standard_D11_v2"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "2"), - ), - }, - }, - }) -} - -func TestAccAzureRMKustoCluster_zones(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_withZones(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "zones.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "zones.0", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMKustoCluster_identitySystemAssigned(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_identitySystemAssigned(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "identity.0.type", "SystemAssigned"), - resource.TestCheckResourceAttr(data.ResourceName, "identity.0.identity_ids.#", "0"), - resource.TestMatchResourceAttr(data.ResourceName, "identity.0.principal_id", validate.UUIDRegExp), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoCluster_vnet(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_vnet(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "virtual_network_configuration.#", "1"), - resource.TestCheckResourceAttrSet(data.ResourceName, "virtual_network_configuration.0.subnet_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "virtual_network_configuration.0.engine_public_ip_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "virtual_network_configuration.0.data_management_public_ip_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoCluster_languageExtensions(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_languageExtensions(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "language_extensions.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "language_extensions.0", "PYTHON"), - resource.TestCheckResourceAttr(data.ResourceName, "language_extensions.1", "R"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKustoCluster_languageExtensionsRemove(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "language_extensions.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "language_extensions.0", "R"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoCluster_optimizedAutoScale(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_cluster", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoClusterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoCluster_optimizedAutoScale(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "optimized_auto_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "optimized_auto_scale.0.minimum_instances", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "optimized_auto_scale.0.maximum_instances", "3"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKustoCluster_optimizedAutoScaleUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "optimized_auto_scale.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "optimized_auto_scale.0.minimum_instances", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "optimized_auto_scale.0.maximum_instances", "4"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMKustoCluster_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoClusterExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMKustoCluster_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - tags = { - label = "test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - tags = { - label = "test1" - ENV = "prod" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_skuUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Standard_D11_v2" - capacity = 2 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_withZones(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - zones = ["1"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - enable_disk_encryption = true - enable_streaming_ingest = true - enable_purge = true - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_identitySystemAssigned(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - identity { - type = "SystemAssigned" - } -} - `, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_languageExtensions(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - language_extensions = ["PYTHON", "R"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_languageExtensionsRemove(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - language_extensions = ["R"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_optimizedAutoScale(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Standard_D11_v2" - } - - optimized_auto_scale { - minimum_instances = 2 - maximum_instances = 3 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_optimizedAutoScaleUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Standard_D11_v2" - } - - optimized_auto_scale { - minimum_instances = 3 - maximum_instances = 4 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMKustoCluster_vnet(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestkc%s-vnet" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestkc%s-subnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_network_security_group" "test" { - name = "acctestkc%s-nsg" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_network_security_rule" "test_allow_management_inbound" { - name = "AllowAzureDataExplorerManagement" - priority = 100 - direction = "Inbound" - access = "Allow" - protocol = "Tcp" - source_port_range = "*" - destination_port_range = "443" - source_address_prefix = "AzureDataExplorerManagement" - destination_address_prefix = "VirtualNetwork" - resource_group_name = azurerm_resource_group.test.name - network_security_group_name = azurerm_network_security_group.test.name -} - -resource "azurerm_subnet_network_security_group_association" "test" { - subnet_id = azurerm_subnet.test.id - network_security_group_id = azurerm_network_security_group.test.id -} - -resource "azurerm_public_ip" "engine_pip" { - name = "acctestkc%s-engine-pip" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - allocation_method = "Static" -} - -resource "azurerm_public_ip" "management_pip" { - name = "acctestkc%s-management-pip" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Basic" - allocation_method = "Static" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } - - virtual_network_configuration { - subnet_id = azurerm_subnet.test.id - engine_public_ip_id = azurerm_public_ip.engine_pip.id - data_management_public_ip_id = azurerm_public_ip.management_pip.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString, data.RandomString, data.RandomString, data.RandomString, data.RandomString) -} - -func testCheckAzureRMKustoClusterDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClustersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_cluster" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoClusterExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.ClustersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - kustoCluster := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto Cluster: %s", kustoCluster) - } - - resp, err := client.Get(ctx, resourceGroup, kustoCluster) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Cluster %q (resource group: %q) does not exist", kustoCluster, resourceGroup) - } - - return fmt.Errorf("Bad: Get on ClustersClient: %+v", err) - } - - return nil - } -} diff --git a/azurerm/internal/services/kusto/tests/kusto_database_principal_assignment_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_database_principal_assignment_resource_test.go deleted file mode 100644 index 783eceb75649..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_database_principal_assignment_resource_test.go +++ /dev/null @@ -1,183 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoDatabasePrincipalAssignment_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_database_principal_assignment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoDatabasePrincipalAssignmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoDatabasePrincipalAssignment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabasePrincipalAssignmentExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoDatabasePrincipalAssignment_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_database_principal_assignment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoDatabasePrincipalAssignmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoDatabasePrincipalAssignment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabasePrincipalAssignmentExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMKustoDatabasePrincipalAssignment_requiresImport), - }, - }) -} - -func testCheckAzureRMKustoDatabasePrincipalAssignmentDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DatabasePrincipalAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_database_principal_assignment" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - clusterName := rs.Primary.Attributes["cluster_name"] - databaseName := rs.Primary.Attributes["database_name"] - name := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, clusterName, databaseName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoDatabasePrincipalAssignmentExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DatabasePrincipalAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto Database Principal Assignment: %s", name) - } - - clusterName, hasClusterName := rs.Primary.Attributes["cluster_name"] - if !hasClusterName { - return fmt.Errorf("Bad: no cluster found in state for Kusto Database Principal Assignment: %s", name) - } - - databaseName, hasDatabaseName := rs.Primary.Attributes["database_name"] - if !hasDatabaseName { - return fmt.Errorf("Bad: no database found in state for Kusto Database Principal Assignment: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, clusterName, databaseName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Database Principal Assignment %q (Resource Group %q, Cluster %q, Database %q) does not exist", name, resourceGroup, clusterName, databaseName) - } - - return fmt.Errorf("Bad: Get on DatabasePrincipalAssignmentsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMKustoDatabasePrincipalAssignment_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-kusto-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.test.name -} - -resource "azurerm_kusto_database_principal_assignment" "test" { - name = "acctestkdpa%d" - resource_group_name = azurerm_resource_group.rg.name - cluster_name = azurerm_kusto_cluster.test.name - database_name = azurerm_kusto_database.test.name - - tenant_id = data.azurerm_client_config.current.tenant_id - principal_id = data.azurerm_client_config.current.client_id - principal_type = "App" - role = "Viewer" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMKustoDatabasePrincipalAssignment_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMKustoDatabasePrincipalAssignment_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_kusto_database_principal_assignment" "import" { - name = azurerm_kusto_database_principal_assignment.test.name - resource_group_name = azurerm_kusto_database_principal_assignment.test.resource_group_name - cluster_name = azurerm_kusto_database_principal_assignment.test.cluster_name - database_name = azurerm_kusto_database_principal_assignment.test.database_name - - tenant_id = azurerm_kusto_database_principal_assignment.test.tenant_id - principal_id = azurerm_kusto_database_principal_assignment.test.principal_id - principal_type = azurerm_kusto_database_principal_assignment.test.principal_type - role = azurerm_kusto_database_principal_assignment.test.role -} -`, template) -} diff --git a/azurerm/internal/services/kusto/tests/kusto_database_principal_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_database_principal_resource_test.go deleted file mode 100644 index 01bf647c1c5e..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_database_principal_resource_test.go +++ /dev/null @@ -1,173 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoDatabasePrincipal_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_database_principal", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoDatabasePrincipalDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoDatabasePrincipal_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabasePrincipalExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMKustoDatabasePrincipalDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_database_principal" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - clusterName := rs.Primary.Attributes["cluster_name"] - databaseName := rs.Primary.Attributes["database_name"] - role := rs.Primary.Attributes["role"] - fqn := rs.Primary.Attributes["fully_qualified_name"] - resp, err := client.ListPrincipals(ctx, resourceGroup, clusterName, databaseName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - found := false - if principals := resp.Value; principals != nil { - for _, currPrincipal := range *principals { - // kusto database principals are unique when looked at with fqn and role - if string(currPrincipal.Role) == role && currPrincipal.Fqn != nil && *currPrincipal.Fqn == fqn { - found = true - break - } - } - } - if found { - return fmt.Errorf("Kusto Database Principal %q still exists", fqn) - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoDatabasePrincipalExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - role := rs.Primary.Attributes["role"] - fqn := rs.Primary.Attributes["fully_qualified_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto Database Principal: %s", fqn) - } - - clusterName, hasClusterName := rs.Primary.Attributes["cluster_name"] - if !hasClusterName { - return fmt.Errorf("Bad: no cluster name found in state for Kusto Database Principal: %s", fqn) - } - - databaseName, hasDatabaseName := rs.Primary.Attributes["database_name"] - if !hasDatabaseName { - return fmt.Errorf("Bad: no database name found in state for Kusto Database Principal: %s", fqn) - } - - resp, err := client.ListPrincipals(ctx, resourceGroup, clusterName, databaseName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Database %q (resource group: %q, cluster: %q) does not exist", fqn, resourceGroup, clusterName) - } - - return fmt.Errorf("Bad: Get on DatabasesClient: %+v", err) - } - - found := false - if principals := resp.Value; principals != nil { - for _, currPrincipal := range *principals { - // kusto database principals are unique when looked at with fqn and role - if string(currPrincipal.Role) == role && currPrincipal.Fqn != nil && *currPrincipal.Fqn == fqn { - found = true - break - } - } - } - if !found { - return fmt.Errorf("Unable to find Kusto Database Principal %q", fqn) - } - - return nil - } -} - -func testAccAzureRMKustoDatabasePrincipal_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" {} - - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-kusto-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster.name -} - -resource "azurerm_kusto_database_principal" "test" { - resource_group_name = azurerm_resource_group.rg.name - cluster_name = azurerm_kusto_cluster.cluster.name - database_name = azurerm_kusto_database.test.name - - role = "Viewer" - type = "App" - client_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.client_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} diff --git a/azurerm/internal/services/kusto/tests/kusto_database_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_database_resource_test.go deleted file mode 100644 index 99768244227e..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_database_resource_test.go +++ /dev/null @@ -1,309 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMKustoDatabase_softDeletePeriod(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoDatabase_softDeletePeriod(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_period", "P7D"), - ), - }, - { - Config: testAccAzureRMKustoDatabase_softDeletePeriodUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "soft_delete_period", "P31D"), - ), - }, - }, - }) -} - -func TestAccAzureRMKustoDatabase_hotCachePeriod(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoDatabase_hotCachePeriod(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "hot_cache_period", "P7D"), - ), - }, - { - Config: testAccAzureRMKustoDatabase_hotCachePeriodUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "hot_cache_period", "P14DT12H"), - ), - }, - }, - }) -} - -func testAccAzureRMKustoDatabase_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMKustoDatabase_softDeletePeriod(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster.name - - soft_delete_period = "P7D" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMKustoDatabase_softDeletePeriodUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster.name - - soft_delete_period = "P31D" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMKustoDatabase_hotCachePeriod(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster.name - - hot_cache_period = "P7D" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMKustoDatabase_hotCachePeriodUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "rg" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "cluster" { - name = "acctestkc%s" - location = azurerm_resource_group.rg.location - resource_group_name = azurerm_resource_group.rg.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.rg.name - location = azurerm_resource_group.rg.location - cluster_name = azurerm_kusto_cluster.cluster.name - - hot_cache_period = "P14DT12H" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testCheckAzureRMKustoDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_database" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - clusterName := rs.Primary.Attributes["cluster_name"] - name := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, clusterName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - kustoDatabase := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto Database: %s", kustoDatabase) - } - - clusterName, hasClusterName := rs.Primary.Attributes["cluster_name"] - if !hasClusterName { - return fmt.Errorf("Bad: no resource group found in state for Kusto Database: %s", kustoDatabase) - } - - resp, err := client.Get(ctx, resourceGroup, clusterName, kustoDatabase) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto Database %q (resource group: %q, cluster: %q) does not exist", kustoDatabase, resourceGroup, clusterName) - } - - return fmt.Errorf("Bad: Get on DatabasesClient: %+v", err) - } - - return nil - } -} diff --git a/azurerm/internal/services/kusto/tests/kusto_eventhub_data_connection_resource_test.go b/azurerm/internal/services/kusto/tests/kusto_eventhub_data_connection_resource_test.go deleted file mode 100644 index bee728cc7eaf..000000000000 --- a/azurerm/internal/services/kusto/tests/kusto_eventhub_data_connection_resource_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMKustoEventHubDataConnection_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_kusto_eventhub_data_connection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMKustoEventHubDataConnectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMKustoEventHubDataConnection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMKustoEventHubDataConnectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMKustoEventHubDataConnection_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_kusto_cluster" "test" { - name = "acctestkc%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku { - name = "Dev(No SLA)_Standard_D11_v2" - capacity = 1 - } -} - -resource "azurerm_kusto_database" "test" { - name = "acctestkd-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_name = azurerm_kusto_cluster.test.name -} - -resource "azurerm_eventhub_namespace" "test" { - name = "acctesteventhubnamespace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" -} - -resource "azurerm_eventhub" "test" { - name = "acctesteventhub-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 1 - message_retention = 1 -} - -resource "azurerm_eventhub_consumer_group" "test" { - name = "acctesteventhubcg-%d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_kusto_eventhub_data_connection" "test" { - name = "acctestkedc-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - cluster_name = azurerm_kusto_cluster.test.name - database_name = azurerm_kusto_database.test.name - - eventhub_id = azurerm_eventhub.test.id - consumer_group = azurerm_eventhub_consumer_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMKustoEventHubDataConnectionDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DataConnectionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_kusto_eventhub_data_connection" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - clusterName := rs.Primary.Attributes["cluster_name"] - databaseName := rs.Primary.Attributes["database_name"] - name := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, clusterName, databaseName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return err - } - - return nil - } - - return nil -} - -func testCheckAzureRMKustoEventHubDataConnectionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Kusto.DataConnectionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Kusto EventHub Data Connection: %s", name) - } - - clusterName, hasClusterName := rs.Primary.Attributes["cluster_name"] - if !hasClusterName { - return fmt.Errorf("Bad: no resource group found in state for Kusto EventHub Data Connection: %s", name) - } - - databaseName, hasDatabaseName := rs.Primary.Attributes["database_name"] - if !hasDatabaseName { - return fmt.Errorf("Bad: no resource group found in state for Kusto EventHub Data Connection: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, clusterName, databaseName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Kusto EventHub Data Connection %q (resource group: %q, cluster: %q, database: %q) does not exist", name, resourceGroup, clusterName, databaseName) - } - - return fmt.Errorf("Bad: Get on DataConnectionsClient: %+v", err) - } - - return nil - } -} diff --git a/azurerm/internal/services/kusto/validate/attached_database_configuration_id.go b/azurerm/internal/services/kusto/validate/attached_database_configuration_id.go new file mode 100644 index 000000000000..b4e80e4c519f --- /dev/null +++ b/azurerm/internal/services/kusto/validate/attached_database_configuration_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func AttachedDatabaseConfigurationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AttachedDatabaseConfigurationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/attached_database_configuration_id_test.go b/azurerm/internal/services/kusto/validate/attached_database_configuration_id_test.go new file mode 100644 index 000000000000..00e5e3d4bfbc --- /dev/null +++ b/azurerm/internal/services/kusto/validate/attached_database_configuration_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAttachedDatabaseConfigurationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/AttachedDatabaseConfigurations/config1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/ATTACHEDDATABASECONFIGURATIONS/CONFIG1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AttachedDatabaseConfigurationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/validate/cluster_id.go b/azurerm/internal/services/kusto/validate/cluster_id.go new file mode 100644 index 000000000000..ea7e39ceff10 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/cluster_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func ClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/cluster_id_test.go b/azurerm/internal/services/kusto/validate/cluster_id_test.go new file mode 100644 index 000000000000..6302927253ae --- /dev/null +++ b/azurerm/internal/services/kusto/validate/cluster_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestClusterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ClusterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/validate/cluster_principal_assignment_id.go b/azurerm/internal/services/kusto/validate/cluster_principal_assignment_id.go new file mode 100644 index 000000000000..5ae26b1551cd --- /dev/null +++ b/azurerm/internal/services/kusto/validate/cluster_principal_assignment_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func ClusterPrincipalAssignmentID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ClusterPrincipalAssignmentID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/cluster_principal_assignment_id_test.go b/azurerm/internal/services/kusto/validate/cluster_principal_assignment_id_test.go new file mode 100644 index 000000000000..4999f7fdb71f --- /dev/null +++ b/azurerm/internal/services/kusto/validate/cluster_principal_assignment_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestClusterPrincipalAssignmentID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // missing PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Valid: false, + }, + + { + // missing value for PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/PrincipalAssignments/assignment1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/PRINCIPALASSIGNMENTS/ASSIGNMENT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ClusterPrincipalAssignmentID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/validate/data_connection_id.go b/azurerm/internal/services/kusto/validate/data_connection_id.go new file mode 100644 index 000000000000..96c2b9114e86 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/data_connection_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func DataConnectionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DataConnectionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/data_connection_id_test.go b/azurerm/internal/services/kusto/validate/data_connection_id_test.go new file mode 100644 index 000000000000..80e4450df405 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/data_connection_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDataConnectionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Valid: false, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/DataConnections/connection1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1/DATACONNECTIONS/CONNECTION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DataConnectionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/validate/database_id.go b/azurerm/internal/services/kusto/validate/database_id.go new file mode 100644 index 000000000000..fbba0d08a1e4 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func DatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/database_id_test.go b/azurerm/internal/services/kusto/validate/database_id_test.go new file mode 100644 index 000000000000..afe2846c883e --- /dev/null +++ b/azurerm/internal/services/kusto/validate/database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/validate/database_principal_assignment_id.go b/azurerm/internal/services/kusto/validate/database_principal_assignment_id.go new file mode 100644 index 000000000000..ca5dc12dde67 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/database_principal_assignment_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func DatabasePrincipalAssignmentID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabasePrincipalAssignmentID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/database_principal_assignment_id_test.go b/azurerm/internal/services/kusto/validate/database_principal_assignment_id_test.go new file mode 100644 index 000000000000..bf5881430629 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/database_principal_assignment_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabasePrincipalAssignmentID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Valid: false, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Valid: false, + }, + + { + // missing PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/", + Valid: false, + }, + + { + // missing value for PrincipalAssignmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/PrincipalAssignments/assignment1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1/PRINCIPALASSIGNMENTS/ASSIGNMENT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabasePrincipalAssignmentID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/kusto/validate/database_principal_id.go b/azurerm/internal/services/kusto/validate/database_principal_id.go new file mode 100644 index 000000000000..d4254d2622c8 --- /dev/null +++ b/azurerm/internal/services/kusto/validate/database_principal_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/kusto/parse" +) + +func DatabasePrincipalID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabasePrincipalID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/kusto/validate/database_principal_id_test.go b/azurerm/internal/services/kusto/validate/database_principal_id_test.go new file mode 100644 index 000000000000..e32b94614a2d --- /dev/null +++ b/azurerm/internal/services/kusto/validate/database_principal_id_test.go @@ -0,0 +1,112 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabasePrincipalID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/", + Valid: false, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/", + Valid: false, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/", + Valid: false, + }, + + { + // missing RoleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/", + Valid: false, + }, + + { + // missing value for RoleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/", + Valid: false, + }, + + { + // missing FQNName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/", + Valid: false, + }, + + { + // missing value for FQNName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1/Role/Viewer/FQN/aaduser=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.KUSTO/CLUSTERS/CLUSTER1/DATABASES/DATABASE1/ROLE/VIEWER/FQN/AADUSER=11111111-1111-1111-1111-111111111111;22222222-2222-2222-2222-222222222222", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabasePrincipalID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/lighthouse/lighthouse_assignment_resource.go b/azurerm/internal/services/lighthouse/lighthouse_assignment_resource.go new file mode 100644 index 000000000000..52afa4da7671 --- /dev/null +++ b/azurerm/internal/services/lighthouse/lighthouse_assignment_resource.go @@ -0,0 +1,191 @@ +package lighthouse + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/managedservices/mgmt/2019-06-01/managedservices" + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/validate" + resourceValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/validate" + subscriptionValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/subscription/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceLighthouseAssignment() *schema.Resource { + return &schema.Resource{ + Create: resourceLighthouseAssignmentCreate, + Read: resourceLighthouseAssignmentRead, + Delete: resourceLighthouseAssignmentDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + ValidateFunc: validation.IsUUID, + }, + + "lighthouse_definition_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LighthouseDefinitionID, + }, + + "scope": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.Any(subscriptionValidate.SubscriptionID, resourceValidate.ResourceGroupID), + }, + }, + } +} + +func resourceLighthouseAssignmentCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Lighthouse.AssignmentsClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + lighthouseAssignmentName := d.Get("name").(string) + if lighthouseAssignmentName == "" { + uuid, err := uuid.GenerateUUID() + if err != nil { + return fmt.Errorf("Error generating UUID for Lighthouse Assignment: %+v", err) + } + + lighthouseAssignmentName = uuid + } + + scope := d.Get("scope").(string) + + existing, err := client.Get(ctx, scope, lighthouseAssignmentName, utils.Bool(false)) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Lighthouse Assignment %q (Scope %q): %+v", lighthouseAssignmentName, scope, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_lighthouse_assignment", *existing.ID) + } + + parameters := managedservices.RegistrationAssignment{ + Properties: &managedservices.RegistrationAssignmentProperties{ + RegistrationDefinitionID: utils.String(d.Get("lighthouse_definition_id").(string)), + }, + } + + if _, err := client.CreateOrUpdate(ctx, scope, lighthouseAssignmentName, parameters); err != nil { + return fmt.Errorf("creating Lighthouse Assignment %q (Scope %q): %+v", lighthouseAssignmentName, scope, err) + } + + read, err := client.Get(ctx, scope, lighthouseAssignmentName, utils.Bool(false)) + if err != nil { + return fmt.Errorf("retrieving Lighthouse Assessment %q (Scope %q): %+v", lighthouseAssignmentName, scope, err) + } + + if read.ID == nil || *read.ID == "" { + return fmt.Errorf("ID was nil or empty for Lighthouse Assignment %q ID (scope %q) ID", lighthouseAssignmentName, scope) + } + + d.SetId(*read.ID) + + return resourceLighthouseAssignmentRead(d, meta) +} + +func resourceLighthouseAssignmentRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Lighthouse.AssignmentsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LighthouseAssignmentID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.Scope, id.Name, utils.Bool(false)) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[WARN] Lighthouse Assignment %q was not found (Scope %q)", id.Name, id.Scope) + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Lighthouse Assignment %q (Scope %q): %+v", id.Name, id.Scope, err) + } + + d.Set("name", resp.Name) + d.Set("scope", id.Scope) + + if props := resp.Properties; props != nil { + d.Set("lighthouse_definition_id", props.RegistrationDefinitionID) + } + + return nil +} + +func resourceLighthouseAssignmentDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Lighthouse.AssignmentsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LighthouseAssignmentID(d.Id()) + if err != nil { + return err + } + + _, err = client.Delete(ctx, id.Scope, id.Name) + if err != nil { + return fmt.Errorf("Error deleting Lighthouse Assignment %q at Scope %q: %+v", id.Name, id.Scope, err) + } + + stateConf := &resource.StateChangeConf{ + Pending: []string{"Deleting"}, + Target: []string{"Deleted"}, + Refresh: lighthouseAssignmentDeleteRefreshFunc(ctx, client, id.Scope, id.Name), + MinTimeout: 15 * time.Second, + Timeout: d.Timeout(schema.TimeoutDelete), + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("Error waiting for Lighthouse Assignment %q (Scope %q) to be deleted: %s", id.Name, id.Scope, err) + } + + return nil +} + +func lighthouseAssignmentDeleteRefreshFunc(ctx context.Context, client *managedservices.RegistrationAssignmentsClient, scope string, lighthouseAssignmentName string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + expandLighthouseDefinition := true + res, err := client.Get(ctx, scope, lighthouseAssignmentName, &expandLighthouseDefinition) + if err != nil { + if utils.ResponseWasNotFound(res.Response) { + return res, "Deleted", nil + } + return nil, "Error", fmt.Errorf("Error issuing read request in lighthouseAssignmentDeleteRefreshFunc to Lighthouse Assignment %q (Scope %q): %s", lighthouseAssignmentName, scope, err) + } + + return res, "Deleting", nil + } +} diff --git a/azurerm/internal/services/lighthouse/lighthouse_assignment_resource_test.go b/azurerm/internal/services/lighthouse/lighthouse_assignment_resource_test.go new file mode 100644 index 000000000000..a309a4b6b218 --- /dev/null +++ b/azurerm/internal/services/lighthouse/lighthouse_assignment_resource_test.go @@ -0,0 +1,170 @@ +package lighthouse_test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/google/uuid" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LighthouseAssignmentResource struct { +} + +func TestAccLighthouseAssignment_basic(t *testing.T) { + // Multiple tenants are needed to test this resource. + // Second tenant ID needs to be set as a environment variable ARM_TENANT_ID_ALT. + // ObjectId for user, usergroup or service principal from second Tenant needs to be set as a environment variable ARM_PRINCIPAL_ID_ALT_TENANT. + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + data := acceptance.BuildTestData(t, "azurerm_lighthouse_assignment", "test") + r := LighthouseAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(uuid.New().String(), secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func TestAccLighthouseAssignment_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lighthouse_assignment", "test") + r := LighthouseAssignmentResource{} + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + id := uuid.New().String() + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(id, secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + { + Config: r.requiresImport(id, secondTenantID, principalID, data), + ExpectError: acceptance.RequiresImportError("azurerm_lighthouse_assignment"), + }, + }) +} + +func TestAccLighthouseAssignment_emptyID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lighthouse_assignment", "test") + r := LighthouseAssignmentResource{} + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.emptyId(secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func (LighthouseAssignmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LighthouseAssignmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Lighthouse.AssignmentsClient.Get(ctx, id.Scope, id.Name, utils.Bool(false)) + if err != nil { + return nil, fmt.Errorf("retrieving Lighthouse Assignment %q (Scope: %q) does not exist", id.Name, id.Scope) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (LighthouseAssignmentResource) basic(id string, secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_subscription" "primary" { +} + +data "azurerm_role_definition" "contributor" { + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" +} + +resource "azurerm_lighthouse_definition" "test" { + name = "acctest-LD-%d" + description = "Acceptance Test Lighthouse Definition" + managing_tenant_id = "%s" + + authorization { + principal_id = "%s" + role_definition_id = data.azurerm_role_definition.contributor.role_definition_id + } +} + +resource "azurerm_lighthouse_assignment" "test" { + name = "%s" + scope = data.azurerm_subscription.primary.id + lighthouse_definition_id = azurerm_lighthouse_definition.test.id +} + +`, data.RandomInteger, secondTenantID, principalID, id) +} + +func (r LighthouseAssignmentResource) requiresImport(id string, secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_lighthouse_assignment" "import" { + name = azurerm_lighthouse_assignment.test.name + lighthouse_definition_id = azurerm_lighthouse_assignment.test.lighthouse_definition_id + scope = azurerm_lighthouse_assignment.test.scope +} +`, r.basic(id, secondTenantID, principalID, data)) +} + +func (LighthouseAssignmentResource) emptyId(secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_subscription" "primary" { +} + +data "azurerm_role_definition" "contributor" { + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" +} + +resource "azurerm_lighthouse_definition" "test" { + name = "acctest-LD-%d" + description = "Acceptance Test Lighthouse Definition" + managing_tenant_id = "%s" + + authorization { + principal_id = "%s" + role_definition_id = data.azurerm_role_definition.contributor.role_definition_id + } +} + +resource "azurerm_lighthouse_assignment" "test" { + scope = data.azurerm_subscription.primary.id + lighthouse_definition_id = azurerm_lighthouse_definition.test.id +} +`, data.RandomInteger, secondTenantID, principalID) +} diff --git a/azurerm/internal/services/lighthouse/lighthouse_definition_data_source_test.go b/azurerm/internal/services/lighthouse/lighthouse_definition_data_source_test.go new file mode 100644 index 000000000000..e7bb5a501292 --- /dev/null +++ b/azurerm/internal/services/lighthouse/lighthouse_definition_data_source_test.go @@ -0,0 +1,71 @@ +package lighthouse_test + +import ( + "fmt" + "os" + "testing" + + "github.com/google/uuid" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type LighthouseDefinitionDataSourceResource struct { +} + +func TestAccLighthouseDefinitionDataSource_basic(t *testing.T) { + // Multiple tenants are needed to test this resource. + // Second tenant ID needs to be set as a environment variable ARM_TENANT_ID_ALT. + // ObjectId for user, usergroup or service principal from second Tenant needs to be set as a environment variable ARM_PRINCIPAL_ID_ALT_TENANT. + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + data := acceptance.BuildTestData(t, "data.azurerm_lighthouse_definition", "test") + r := LighthouseDefinitionDataSourceResource{} + id := uuid.New().String() + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(id, secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("scope").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctest-LD-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("description").HasValue("Acceptance Test Lighthouse Definition"), + resource.TestMatchResourceAttr(data.ResourceName, "managing_tenant_id", validate.UUIDRegExp), + resource.TestMatchResourceAttr(data.ResourceName, "authorization.0.principal_id", validate.UUIDRegExp), + resource.TestMatchResourceAttr(data.ResourceName, "authorization.0.role_definition_id", validate.UUIDRegExp), + ), + }, + }) +} + +func (LighthouseDefinitionDataSourceResource) basic(id string, secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_role_definition" "contributor" { + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" +} + +resource "azurerm_lighthouse_definition" "test" { + lighthouse_definition_id = "%s" + name = "acctest-LD-%d" + description = "Acceptance Test Lighthouse Definition" + managing_tenant_id = "%s" + + authorization { + principal_id = "%s" + role_definition_id = data.azurerm_role_definition.contributor.role_definition_id + } +} + +data "azurerm_lighthouse_definition" "test" { + lighthouse_definition_id = azurerm_lighthouse_definition.test.lighthouse_definition_id +} +`, id, data.RandomInteger, secondTenantID, principalID) +} diff --git a/azurerm/internal/services/lighthouse/lighthouse_definition_resource.go b/azurerm/internal/services/lighthouse/lighthouse_definition_resource.go new file mode 100644 index 000000000000..da08262a27a4 --- /dev/null +++ b/azurerm/internal/services/lighthouse/lighthouse_definition_resource.go @@ -0,0 +1,249 @@ +package lighthouse + +import ( + "fmt" + "log" + "time" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/subscription/validate" + + "github.com/Azure/azure-sdk-for-go/services/managedservices/mgmt/2019-06-01/managedservices" + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceLighthouseDefinition() *schema.Resource { + return &schema.Resource{ + Create: resourceLighthouseDefinitionCreateUpdate, + Read: resourceLighthouseDefinitionRead, + Update: resourceLighthouseDefinitionCreateUpdate, + Delete: resourceLighthouseDefinitionDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "managing_tenant_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "scope": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.SubscriptionID, + }, + + "authorization": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "principal_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.IsUUID, + }, + + "role_definition_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.IsUUID, + }, + }, + }, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "lighthouse_definition_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + }, + } +} + +func resourceLighthouseDefinitionCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Lighthouse.DefinitionsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + lighthouseDefinitionID := d.Get("lighthouse_definition_id").(string) + if lighthouseDefinitionID == "" { + uuid, err := uuid.GenerateUUID() + if err != nil { + return fmt.Errorf("Error generating UUID for Lighthouse Definition: %+v", err) + } + + lighthouseDefinitionID = uuid + } + + subscriptionID := meta.(*clients.Client).Account.SubscriptionId + if subscriptionID == "" { + return fmt.Errorf("Error reading Subscription for Lighthouse Definition %q", lighthouseDefinitionID) + } + + scope := d.Get("scope").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, scope, lighthouseDefinitionID) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Lighthouse Definition %q (Scope %q): %+v", lighthouseDefinitionID, scope, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_lighthouse_definition", *existing.ID) + } + } + + parameters := managedservices.RegistrationDefinition{ + Properties: &managedservices.RegistrationDefinitionProperties{ + Description: utils.String(d.Get("description").(string)), + Authorizations: expandLighthouseDefinitionAuthorization(d.Get("authorization").(*schema.Set).List()), + RegistrationDefinitionName: utils.String(d.Get("name").(string)), + ManagedByTenantID: utils.String(d.Get("managing_tenant_id").(string)), + }, + } + + if _, err := client.CreateOrUpdate(ctx, lighthouseDefinitionID, scope, parameters); err != nil { + return fmt.Errorf("Error Creating/Updating Lighthouse Definition %q (Scope %q): %+v", lighthouseDefinitionID, scope, err) + } + + read, err := client.Get(ctx, scope, lighthouseDefinitionID) + if err != nil { + return err + } + + if read.ID == nil { + return fmt.Errorf("Cannot read Lighthouse Definition %q ID (scope %q) ID", lighthouseDefinitionID, scope) + } + + d.SetId(*read.ID) + + return resourceLighthouseDefinitionRead(d, meta) +} + +func resourceLighthouseDefinitionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Lighthouse.DefinitionsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LighthouseDefinitionID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.Scope, id.LighthouseDefinitionID) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[WARN] Lighthouse Definition %q was not found (Scope %q)", id.LighthouseDefinitionID, id.Scope) + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Lighthouse Definition %q (Scope %q): %+v", id.LighthouseDefinitionID, id.Scope, err) + } + + d.Set("lighthouse_definition_id", resp.Name) + d.Set("scope", id.Scope) + + if props := resp.Properties; props != nil { + if err := d.Set("authorization", flattenLighthouseDefinitionAuthorization(props.Authorizations)); err != nil { + return fmt.Errorf("setting `authorization`: %+v", err) + } + d.Set("description", props.Description) + d.Set("name", props.RegistrationDefinitionName) + d.Set("managing_tenant_id", props.ManagedByTenantID) + } + + return nil +} + +func resourceLighthouseDefinitionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Lighthouse.DefinitionsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LighthouseDefinitionID(d.Id()) + if err != nil { + return err + } + + _, err = client.Delete(ctx, id.LighthouseDefinitionID, id.Scope) + if err != nil { + return fmt.Errorf("Error deleting Lighthouse Definition %q at Scope %q: %+v", id.LighthouseDefinitionID, id.Scope, err) + } + + return nil +} + +func flattenLighthouseDefinitionAuthorization(input *[]managedservices.Authorization) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + for _, item := range *input { + principalID := "" + if item.PrincipalID != nil { + principalID = *item.PrincipalID + } + + roleDefinitionID := "" + if item.RoleDefinitionID != nil { + roleDefinitionID = *item.RoleDefinitionID + } + + results = append(results, map[string]interface{}{ + "role_definition_id": roleDefinitionID, + "principal_id": principalID, + }) + } + + return results +} + +func expandLighthouseDefinitionAuthorization(input []interface{}) *[]managedservices.Authorization { + results := make([]managedservices.Authorization, 0) + for _, item := range input { + v := item.(map[string]interface{}) + result := managedservices.Authorization{ + RoleDefinitionID: utils.String(v["role_definition_id"].(string)), + PrincipalID: utils.String(v["principal_id"].(string)), + } + results = append(results, result) + } + return &results +} diff --git a/azurerm/internal/services/lighthouse/lighthouse_definition_resource_test.go b/azurerm/internal/services/lighthouse/lighthouse_definition_resource_test.go new file mode 100644 index 000000000000..cdd706d772dc --- /dev/null +++ b/azurerm/internal/services/lighthouse/lighthouse_definition_resource_test.go @@ -0,0 +1,231 @@ +package lighthouse_test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/google/uuid" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LighthouseDefinitionResource struct { +} + +func TestAccLighthouseDefinition_basic(t *testing.T) { + // Multiple tenants are needed to test this resource. + // Second tenant ID needs to be set as a environment variable ARM_TENANT_ID_ALT. + // ObjectId for user, usergroup or service principal from second Tenant needs to be set as a environment variable ARM_PRINCIPAL_ID_ALT_TENANT. + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") + r := LighthouseDefinitionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(uuid.New().String(), secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("scope").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), + ), + }, + }) +} + +func TestAccLighthouseDefinition_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") + r := LighthouseDefinitionResource{} + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + id := uuid.New().String() + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(id, secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("scope").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), + ), + }, + { + Config: r.requiresImport(id, secondTenantID, principalID, data), + ExpectError: acceptance.RequiresImportError("azurerm_lighthouse_definition"), + }, + }) +} + +func TestAccLighthouseDefinition_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") + r := LighthouseDefinitionResource{} + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(uuid.New().String(), secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("scope").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), + check.That(data.ResourceName).Key("description").HasValue("Acceptance Test Lighthouse Definition"), + ), + }, + data.ImportStep("lighthouse_definition_id"), + }) +} + +func TestAccLighthouseDefinition_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") + r := LighthouseDefinitionResource{} + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + id := uuid.New().String() + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(id, secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("scope").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), + ), + }, + { + Config: r.complete(id, secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("scope").Exists(), + resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), + check.That(data.ResourceName).Key("description").HasValue("Acceptance Test Lighthouse Definition"), + ), + }, + }) +} + +func TestAccLighthouseDefinition_emptyID(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") + r := LighthouseDefinitionResource{} + secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") + principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.emptyId(secondTenantID, principalID, data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("lighthouse_definition_id").Exists(), + ), + }, + }) +} + +func (LighthouseDefinitionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LighthouseDefinitionID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Lighthouse.DefinitionsClient.Get(ctx, id.Scope, id.LighthouseDefinitionID) + if err != nil { + return nil, fmt.Errorf("retrieving Lighthouse Definition %q (Scope: %q) does not exist", id.LighthouseDefinitionID, id.Scope) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (LighthouseDefinitionResource) basic(id string, secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_role_definition" "contributor" { + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" +} + +resource "azurerm_lighthouse_definition" "test" { + lighthouse_definition_id = "%s" + name = "acctest-LD-%d" + managing_tenant_id = "%s" + + authorization { + principal_id = "%s" + role_definition_id = data.azurerm_role_definition.contributor.role_definition_id + } +} +`, id, data.RandomInteger, secondTenantID, principalID) +} + +func (r LighthouseDefinitionResource) requiresImport(id string, secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_lighthouse_definition" "import" { + name = azurerm_lighthouse_definition.test.name + lighthouse_definition_id = azurerm_lighthouse_definition.test.lighthouse_definition_id + managing_tenant_id = azurerm_lighthouse_definition.test.managing_tenant_id + authorization { + principal_id = azurerm_lighthouse_definition.test.managing_tenant_id + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" + } +} +`, r.basic(id, secondTenantID, principalID, data)) +} + +func (LighthouseDefinitionResource) complete(id string, secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_role_definition" "contributor" { + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" +} + +resource "azurerm_lighthouse_definition" "test" { + lighthouse_definition_id = "%s" + name = "acctest-LD-%d" + description = "Acceptance Test Lighthouse Definition" + managing_tenant_id = "%s" + + authorization { + principal_id = "%s" + role_definition_id = data.azurerm_role_definition.contributor.role_definition_id + } +} +`, id, data.RandomInteger, secondTenantID, principalID) +} + +func (LighthouseDefinitionResource) emptyId(secondTenantID string, principalID string, data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_role_definition" "contributor" { + role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" +} + +resource "azurerm_lighthouse_definition" "test" { + name = "acctest-LD-%d" + description = "Acceptance Test Lighthouse Definition" + managing_tenant_id = "%s" + + authorization { + principal_id = "%s" + role_definition_id = data.azurerm_role_definition.contributor.role_definition_id + } +} +`, data.RandomInteger, secondTenantID, principalID) +} diff --git a/azurerm/internal/services/lighthouse/registration.go b/azurerm/internal/services/lighthouse/registration.go index eebf0b495ef9..258ffb5823fd 100644 --- a/azurerm/internal/services/lighthouse/registration.go +++ b/azurerm/internal/services/lighthouse/registration.go @@ -26,7 +26,7 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_lighthouse_definition": resourceArmLighthouseDefinition(), - "azurerm_lighthouse_assignment": resourceArmLighthouseAssignment(), + "azurerm_lighthouse_definition": resourceLighthouseDefinition(), + "azurerm_lighthouse_assignment": resourceLighthouseAssignment(), } } diff --git a/azurerm/internal/services/lighthouse/resource_arm_lighthouse_assignment.go b/azurerm/internal/services/lighthouse/resource_arm_lighthouse_assignment.go deleted file mode 100644 index 93a161ed3465..000000000000 --- a/azurerm/internal/services/lighthouse/resource_arm_lighthouse_assignment.go +++ /dev/null @@ -1,191 +0,0 @@ -package lighthouse - -import ( - "context" - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/managedservices/mgmt/2019-06-01/managedservices" - "github.com/hashicorp/go-uuid" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/validate" - resourceValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource" - subscriptionValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/subscription/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLighthouseAssignment() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLighthouseAssignmentCreate, - Read: resourceArmLighthouseAssignmentRead, - Delete: resourceArmLighthouseAssignmentDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - ValidateFunc: validation.IsUUID, - }, - - "lighthouse_definition_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.LighthouseDefinitionID, - }, - - "scope": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.Any(subscriptionValidate.SubscriptionID, resourceValidate.ValidateResourceGroupID), - }, - }, - } -} - -func resourceArmLighthouseAssignmentCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Lighthouse.AssignmentsClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - lighthouseAssignmentName := d.Get("name").(string) - if lighthouseAssignmentName == "" { - uuid, err := uuid.GenerateUUID() - if err != nil { - return fmt.Errorf("Error generating UUID for Lighthouse Assignment: %+v", err) - } - - lighthouseAssignmentName = uuid - } - - scope := d.Get("scope").(string) - - existing, err := client.Get(ctx, scope, lighthouseAssignmentName, utils.Bool(false)) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Lighthouse Assignment %q (Scope %q): %+v", lighthouseAssignmentName, scope, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_lighthouse_assignment", *existing.ID) - } - - parameters := managedservices.RegistrationAssignment{ - Properties: &managedservices.RegistrationAssignmentProperties{ - RegistrationDefinitionID: utils.String(d.Get("lighthouse_definition_id").(string)), - }, - } - - if _, err := client.CreateOrUpdate(ctx, scope, lighthouseAssignmentName, parameters); err != nil { - return fmt.Errorf("creating Lighthouse Assignment %q (Scope %q): %+v", lighthouseAssignmentName, scope, err) - } - - read, err := client.Get(ctx, scope, lighthouseAssignmentName, utils.Bool(false)) - if err != nil { - return fmt.Errorf("retrieving Lighthouse Assessment %q (Scope %q): %+v", lighthouseAssignmentName, scope, err) - } - - if read.ID == nil || *read.ID == "" { - return fmt.Errorf("ID was nil or empty for Lighthouse Assignment %q ID (scope %q) ID", lighthouseAssignmentName, scope) - } - - d.SetId(*read.ID) - - return resourceArmLighthouseAssignmentRead(d, meta) -} - -func resourceArmLighthouseAssignmentRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Lighthouse.AssignmentsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LighthouseAssignmentID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.Scope, id.Name, utils.Bool(false)) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] Lighthouse Assignment %q was not found (Scope %q)", id.Name, id.Scope) - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Lighthouse Assignment %q (Scope %q): %+v", id.Name, id.Scope, err) - } - - d.Set("name", resp.Name) - d.Set("scope", id.Scope) - - if props := resp.Properties; props != nil { - d.Set("lighthouse_definition_id", props.RegistrationDefinitionID) - } - - return nil -} - -func resourceArmLighthouseAssignmentDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Lighthouse.AssignmentsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LighthouseAssignmentID(d.Id()) - if err != nil { - return err - } - - _, err = client.Delete(ctx, id.Scope, id.Name) - if err != nil { - return fmt.Errorf("Error deleting Lighthouse Assignment %q at Scope %q: %+v", id.Name, id.Scope, err) - } - - stateConf := &resource.StateChangeConf{ - Pending: []string{"Deleting"}, - Target: []string{"Deleted"}, - Refresh: lighthouseAssignmentDeleteRefreshFunc(ctx, client, id.Scope, id.Name), - MinTimeout: 15 * time.Second, - Timeout: d.Timeout(schema.TimeoutDelete), - } - - if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Lighthouse Assignment %q (Scope %q) to be deleted: %s", id.Name, id.Scope, err) - } - - return nil -} - -func lighthouseAssignmentDeleteRefreshFunc(ctx context.Context, client *managedservices.RegistrationAssignmentsClient, scope string, lighthouseAssignmentName string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - expandLighthouseDefinition := true - res, err := client.Get(ctx, scope, lighthouseAssignmentName, &expandLighthouseDefinition) - if err != nil { - if utils.ResponseWasNotFound(res.Response) { - return res, "Deleted", nil - } - return nil, "Error", fmt.Errorf("Error issuing read request in lighthouseAssignmentDeleteRefreshFunc to Lighthouse Assignment %q (Scope %q): %s", lighthouseAssignmentName, scope, err) - } - - return res, "Deleting", nil - } -} diff --git a/azurerm/internal/services/lighthouse/resource_arm_lighthouse_definition.go b/azurerm/internal/services/lighthouse/resource_arm_lighthouse_definition.go deleted file mode 100644 index 73617bd6ff75..000000000000 --- a/azurerm/internal/services/lighthouse/resource_arm_lighthouse_definition.go +++ /dev/null @@ -1,249 +0,0 @@ -package lighthouse - -import ( - "fmt" - "log" - "time" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/subscription/validate" - - "github.com/Azure/azure-sdk-for-go/services/managedservices/mgmt/2019-06-01/managedservices" - "github.com/hashicorp/go-uuid" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/lighthouse/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLighthouseDefinition() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLighthouseDefinitionCreateUpdate, - Read: resourceArmLighthouseDefinitionRead, - Update: resourceArmLighthouseDefinitionCreateUpdate, - Delete: resourceArmLighthouseDefinitionDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "managing_tenant_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.IsUUID, - }, - - "scope": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.SubscriptionID, - }, - - "authorization": { - Type: schema.TypeSet, - Required: true, - MinItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "principal_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.IsUUID, - }, - - "role_definition_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.IsUUID, - }, - }, - }, - }, - - "description": { - Type: schema.TypeString, - Optional: true, - }, - - "lighthouse_definition_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - ValidateFunc: validation.IsUUID, - }, - }, - } -} - -func resourceArmLighthouseDefinitionCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Lighthouse.DefinitionsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - lighthouseDefinitionID := d.Get("lighthouse_definition_id").(string) - if lighthouseDefinitionID == "" { - uuid, err := uuid.GenerateUUID() - if err != nil { - return fmt.Errorf("Error generating UUID for Lighthouse Definition: %+v", err) - } - - lighthouseDefinitionID = uuid - } - - subscriptionID := meta.(*clients.Client).Account.SubscriptionId - if subscriptionID == "" { - return fmt.Errorf("Error reading Subscription for Lighthouse Definition %q", lighthouseDefinitionID) - } - - scope := d.Get("scope").(string) - - if d.IsNewResource() { - existing, err := client.Get(ctx, scope, lighthouseDefinitionID) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Lighthouse Definition %q (Scope %q): %+v", lighthouseDefinitionID, scope, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_lighthouse_definition", *existing.ID) - } - } - - parameters := managedservices.RegistrationDefinition{ - Properties: &managedservices.RegistrationDefinitionProperties{ - Description: utils.String(d.Get("description").(string)), - Authorizations: expandLighthouseDefinitionAuthorization(d.Get("authorization").(*schema.Set).List()), - RegistrationDefinitionName: utils.String(d.Get("name").(string)), - ManagedByTenantID: utils.String(d.Get("managing_tenant_id").(string)), - }, - } - - if _, err := client.CreateOrUpdate(ctx, lighthouseDefinitionID, scope, parameters); err != nil { - return fmt.Errorf("Error Creating/Updating Lighthouse Definition %q (Scope %q): %+v", lighthouseDefinitionID, scope, err) - } - - read, err := client.Get(ctx, scope, lighthouseDefinitionID) - if err != nil { - return err - } - - if read.ID == nil { - return fmt.Errorf("Cannot read Lighthouse Definition %q ID (scope %q) ID", lighthouseDefinitionID, scope) - } - - d.SetId(*read.ID) - - return resourceArmLighthouseDefinitionRead(d, meta) -} - -func resourceArmLighthouseDefinitionRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Lighthouse.DefinitionsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LighthouseDefinitionID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.Scope, id.LighthouseDefinitionID) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] Lighthouse Definition %q was not found (Scope %q)", id.LighthouseDefinitionID, id.Scope) - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Lighthouse Definition %q (Scope %q): %+v", id.LighthouseDefinitionID, id.Scope, err) - } - - d.Set("lighthouse_definition_id", resp.Name) - d.Set("scope", id.Scope) - - if props := resp.Properties; props != nil { - if err := d.Set("authorization", flattenLighthouseDefinitionAuthorization(props.Authorizations)); err != nil { - return fmt.Errorf("setting `authorization`: %+v", err) - } - d.Set("description", props.Description) - d.Set("name", props.RegistrationDefinitionName) - d.Set("managing_tenant_id", props.ManagedByTenantID) - } - - return nil -} - -func resourceArmLighthouseDefinitionDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Lighthouse.DefinitionsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LighthouseDefinitionID(d.Id()) - if err != nil { - return err - } - - _, err = client.Delete(ctx, id.LighthouseDefinitionID, id.Scope) - if err != nil { - return fmt.Errorf("Error deleting Lighthouse Definition %q at Scope %q: %+v", id.LighthouseDefinitionID, id.Scope, err) - } - - return nil -} - -func flattenLighthouseDefinitionAuthorization(input *[]managedservices.Authorization) []interface{} { - results := make([]interface{}, 0) - if input == nil { - return results - } - - for _, item := range *input { - principalID := "" - if item.PrincipalID != nil { - principalID = *item.PrincipalID - } - - roleDefinitionID := "" - if item.RoleDefinitionID != nil { - roleDefinitionID = *item.RoleDefinitionID - } - - results = append(results, map[string]interface{}{ - "role_definition_id": roleDefinitionID, - "principal_id": principalID, - }) - } - - return results -} - -func expandLighthouseDefinitionAuthorization(input []interface{}) *[]managedservices.Authorization { - results := make([]managedservices.Authorization, 0) - for _, item := range input { - v := item.(map[string]interface{}) - result := managedservices.Authorization{ - RoleDefinitionID: utils.String(v["role_definition_id"].(string)), - PrincipalID: utils.String(v["principal_id"].(string)), - } - results = append(results, result) - } - return &results -} diff --git a/azurerm/internal/services/lighthouse/tests/data_source_lighthouse_definition_test.go b/azurerm/internal/services/lighthouse/tests/data_source_lighthouse_definition_test.go deleted file mode 100644 index 22578c0ac7ee..000000000000 --- a/azurerm/internal/services/lighthouse/tests/data_source_lighthouse_definition_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/google/uuid" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMLighthouseDefinition_basic(t *testing.T) { - // Multiple tenants are needed to test this resource. - // Second tenant ID needs to be set as a environment variable ARM_TENANT_ID_ALT. - // ObjectId for user, usergroup or service principal from second Tenant needs to be set as a environment variable ARM_PRINCIPAL_ID_ALT_TENANT. - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - data := acceptance.BuildTestData(t, "data.azurerm_lighthouse_definition", "test") - id := uuid.New().String() - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceLighthouseDefinition_basic(id, secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "scope"), - resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("acctest-LD-%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Acceptance Test Lighthouse Definition"), - resource.TestMatchResourceAttr(data.ResourceName, "managing_tenant_id", validate.UUIDRegExp), - resource.TestMatchResourceAttr(data.ResourceName, "authorization.0.principal_id", validate.UUIDRegExp), - resource.TestMatchResourceAttr(data.ResourceName, "authorization.0.role_definition_id", validate.UUIDRegExp), - ), - }, - }, - }) -} - -func testAccDataSourceLighthouseDefinition_basic(id string, secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_role_definition" "contributor" { - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" -} - -resource "azurerm_lighthouse_definition" "test" { - lighthouse_definition_id = "%s" - name = "acctest-LD-%d" - description = "Acceptance Test Lighthouse Definition" - managing_tenant_id = "%s" - - authorization { - principal_id = "%s" - role_definition_id = data.azurerm_role_definition.contributor.role_definition_id - } -} - -data "azurerm_lighthouse_definition" "test" { - lighthouse_definition_id = azurerm_lighthouse_definition.test.lighthouse_definition_id -} -`, id, data.RandomInteger, secondTenantID, principalID) -} diff --git a/azurerm/internal/services/lighthouse/tests/resource_arm_lighthouse_assignment_test.go b/azurerm/internal/services/lighthouse/tests/resource_arm_lighthouse_assignment_test.go deleted file mode 100644 index b0113adc7555..000000000000 --- a/azurerm/internal/services/lighthouse/tests/resource_arm_lighthouse_assignment_test.go +++ /dev/null @@ -1,216 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/google/uuid" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMLighthouseAssignment_basic(t *testing.T) { - // Multiple tenants are needed to test this resource. - // Second tenant ID needs to be set as a environment variable ARM_TENANT_ID_ALT. - // ObjectId for user, usergroup or service principal from second Tenant needs to be set as a environment variable ARM_PRINCIPAL_ID_ALT_TENANT. - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - data := acceptance.BuildTestData(t, "azurerm_lighthouse_assignment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseAssignmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseAssignment_basic(uuid.New().String(), secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseAssignmentExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - ), - }, - }, - }) -} - -func TestAccAzureRMLighthouseAssignment_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lighthouse_assignment", "test") - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - id := uuid.New().String() - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseAssignmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseAssignment_basic(id, secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseAssignmentExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - ), - }, - { - Config: testAccAzureRMLighthouseAssignment_requiresImport(id, secondTenantID, principalID, data), - ExpectError: acceptance.RequiresImportError("azurerm_lighthouse_assignment"), - }, - }, - }) -} - -func TestAccAzureRMLighthouseAssignment_emptyID(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lighthouse_assignment", "test") - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseAssignmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseAssignment_emptyId(secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseAssignmentExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - ), - }, - }, - }) -} - -func testCheckAzureRMLighthouseAssignmentExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Lighthouse.AssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - scope := rs.Primary.Attributes["scope"] - lighthouseAssignmentName := rs.Primary.Attributes["name"] - expandLighthouseDefinition := true - - resp, err := client.Get(ctx, scope, lighthouseAssignmentName, &expandLighthouseDefinition) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Lighthouse Assignment %q (Scope: %q) does not exist", lighthouseAssignmentName, scope) - } - return fmt.Errorf("Bad: Get on LighthouseAssignmentsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMLighthouseAssignmentDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Lighthouse.AssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_lighthouse_assignment" { - continue - } - - scope := rs.Primary.Attributes["scope"] - lighthouseAssignmentName := rs.Primary.Attributes["name"] - expandLighthouseDefinition := true - - resp, err := client.Get(ctx, scope, lighthouseAssignmentName, &expandLighthouseDefinition) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - - return nil -} - -func testAccAzureRMLighthouseAssignment_basic(id string, secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_subscription" "primary" { -} - -data "azurerm_role_definition" "contributor" { - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" -} - -resource "azurerm_lighthouse_definition" "test" { - name = "acctest-LD-%d" - description = "Acceptance Test Lighthouse Definition" - managing_tenant_id = "%s" - - authorization { - principal_id = "%s" - role_definition_id = data.azurerm_role_definition.contributor.role_definition_id - } -} - -resource "azurerm_lighthouse_assignment" "test" { - name = "%s" - scope = data.azurerm_subscription.primary.id - lighthouse_definition_id = azurerm_lighthouse_definition.test.id -} - -`, data.RandomInteger, secondTenantID, principalID, id) -} - -func testAccAzureRMLighthouseAssignment_requiresImport(id string, secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lighthouse_assignment" "import" { - name = azurerm_lighthouse_assignment.test.name - lighthouse_definition_id = azurerm_lighthouse_assignment.test.lighthouse_definition_id - scope = azurerm_lighthouse_assignment.test.scope -} -`, testAccAzureRMLighthouseAssignment_basic(id, secondTenantID, principalID, data)) -} - -func testAccAzureRMLighthouseAssignment_emptyId(secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_subscription" "primary" { -} - -data "azurerm_role_definition" "contributor" { - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" -} - -resource "azurerm_lighthouse_definition" "test" { - name = "acctest-LD-%d" - description = "Acceptance Test Lighthouse Definition" - managing_tenant_id = "%s" - - authorization { - principal_id = "%s" - role_definition_id = data.azurerm_role_definition.contributor.role_definition_id - } -} - -resource "azurerm_lighthouse_assignment" "test" { - scope = data.azurerm_subscription.primary.id - lighthouse_definition_id = azurerm_lighthouse_definition.test.id -} -`, data.RandomInteger, secondTenantID, principalID) -} diff --git a/azurerm/internal/services/lighthouse/tests/resource_arm_lighthouse_definition_test.go b/azurerm/internal/services/lighthouse/tests/resource_arm_lighthouse_definition_test.go deleted file mode 100644 index 75c75fad2263..000000000000 --- a/azurerm/internal/services/lighthouse/tests/resource_arm_lighthouse_definition_test.go +++ /dev/null @@ -1,283 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/google/uuid" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMLighthouseDefinition_basic(t *testing.T) { - // Multiple tenants are needed to test this resource. - // Second tenant ID needs to be set as a environment variable ARM_TENANT_ID_ALT. - // ObjectId for user, usergroup or service principal from second Tenant needs to be set as a environment variable ARM_PRINCIPAL_ID_ALT_TENANT. - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseDefinitionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseDefinition_basic(uuid.New().String(), secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseDefinitionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "scope"), - resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), - ), - }, - }, - }) -} - -func TestAccAzureRMLighthouseDefinition_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - id := uuid.New().String() - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseDefinitionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseDefinition_basic(id, secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseDefinitionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "scope"), - resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), - ), - }, - { - Config: testAccAzureRMLighthouseDefinition_requiresImport(id, secondTenantID, principalID, data), - ExpectError: acceptance.RequiresImportError("azurerm_lighthouse_definition"), - }, - }, - }) -} - -func TestAccAzureRMLighthouseDefinition_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseDefinitionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseDefinition_complete(uuid.New().String(), secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseDefinitionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "scope"), - resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Acceptance Test Lighthouse Definition"), - ), - }, - data.ImportStep("lighthouse_definition_id"), - }, - }) -} - -func TestAccAzureRMLighthouseDefinition_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - id := uuid.New().String() - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseDefinitionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseDefinition_basic(id, secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseDefinitionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "scope"), - resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), - ), - }, - { - Config: testAccAzureRMLighthouseDefinition_complete(id, secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseDefinitionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "scope"), - resource.TestMatchResourceAttr(data.ResourceName, "lighthouse_definition_id", validate.UUIDRegExp), - resource.TestCheckResourceAttr(data.ResourceName, "description", "Acceptance Test Lighthouse Definition"), - ), - }, - }, - }) -} - -func TestAccAzureRMLighthouseDefinition_emptyID(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lighthouse_definition", "test") - secondTenantID := os.Getenv("ARM_TENANT_ID_ALT") - principalID := os.Getenv("ARM_PRINCIPAL_ID_ALT_TENANT") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLighthouseDefinitionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLighthouseDefinition_emptyId(secondTenantID, principalID, data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLighthouseDefinitionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "lighthouse_definition_id"), - ), - }, - }, - }) -} - -func testCheckAzureRMLighthouseDefinitionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Lighthouse.DefinitionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - scope := rs.Primary.Attributes["scope"] - lighthouseDefinitionID := rs.Primary.Attributes["lighthouse_definition_id"] - - resp, err := client.Get(ctx, scope, lighthouseDefinitionID) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Lighthouse Definition %q (Scope: %q) does not exist", lighthouseDefinitionID, scope) - } - return fmt.Errorf("Bad: Get on lighthouseDefinitionsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMLighthouseDefinitionDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Lighthouse.DefinitionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_lighthouse_definition" { - continue - } - - scope := rs.Primary.Attributes["scope"] - lighthouseDefinitionID := rs.Primary.Attributes["lighthouse_definition_id"] - - resp, err := client.Get(ctx, scope, lighthouseDefinitionID) - - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - - return nil -} - -func testAccAzureRMLighthouseDefinition_basic(id string, secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_role_definition" "contributor" { - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" -} - -resource "azurerm_lighthouse_definition" "test" { - lighthouse_definition_id = "%s" - name = "acctest-LD-%d" - managing_tenant_id = "%s" - - authorization { - principal_id = "%s" - role_definition_id = data.azurerm_role_definition.contributor.role_definition_id - } -} -`, id, data.RandomInteger, secondTenantID, principalID) -} - -func testAccAzureRMLighthouseDefinition_requiresImport(id string, secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lighthouse_definition" "import" { - name = azurerm_lighthouse_definition.test.name - lighthouse_definition_id = azurerm_lighthouse_definition.test.lighthouse_definition_id - managing_tenant_id = azurerm_lighthouse_definition.test.managing_tenant_id - authorization { - principal_id = azurerm_lighthouse_definition.test.managing_tenant_id - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" - } -} -`, testAccAzureRMLighthouseDefinition_basic(id, secondTenantID, principalID, data)) -} - -func testAccAzureRMLighthouseDefinition_complete(id string, secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_role_definition" "contributor" { - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" -} - -resource "azurerm_lighthouse_definition" "test" { - lighthouse_definition_id = "%s" - name = "acctest-LD-%d" - description = "Acceptance Test Lighthouse Definition" - managing_tenant_id = "%s" - - authorization { - principal_id = "%s" - role_definition_id = data.azurerm_role_definition.contributor.role_definition_id - } -} -`, id, data.RandomInteger, secondTenantID, principalID) -} - -func testAccAzureRMLighthouseDefinition_emptyId(secondTenantID string, principalID string, data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_role_definition" "contributor" { - role_definition_id = "b24988ac-6180-42a0-ab88-20f7382dd24c" -} - -resource "azurerm_lighthouse_definition" "test" { - name = "acctest-LD-%d" - description = "Acceptance Test Lighthouse Definition" - managing_tenant_id = "%s" - - authorization { - principal_id = "%s" - role_definition_id = data.azurerm_role_definition.contributor.role_definition_id - } -} -`, data.RandomInteger, secondTenantID, principalID) -} diff --git a/azurerm/internal/services/loadbalancer/backend_address_pool_data_source.go b/azurerm/internal/services/loadbalancer/backend_address_pool_data_source.go new file mode 100644 index 000000000000..67a26705c68a --- /dev/null +++ b/azurerm/internal/services/loadbalancer/backend_address_pool_data_source.go @@ -0,0 +1,95 @@ +package loadbalancer + +import ( + "fmt" + "time" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" +) + +func dataSourceArmLoadBalancerBackendAddressPool() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmLoadBalancerBackendAddressPoolRead, + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.NoZeroValues, + }, + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.LoadBalancerID, + }, + + "backend_ip_configurations": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceArmLoadBalancerBackendAddressPoolRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return err + } + + resp, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Load Balancer %q (Resource Group %q) for Backend Pool %q was not found", loadBalancerId.Name, loadBalancerId.ResourceGroup, name) + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Backend Address Pool %q: %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, name, err) + } + + bap, _, exists := FindLoadBalancerBackEndAddressPoolByName(&resp, name) + if !exists { + return fmt.Errorf("Backend Address Pool %q was not found in Load Balancer %q (Resource Group %q)", name, loadBalancerId.Name, loadBalancerId.ResourceGroup) + } + + d.SetId(*bap.ID) + + backendIPConfigurations := make([]interface{}, 0) + if props := bap.BackendAddressPoolPropertiesFormat; props != nil { + if beipConfigs := props.BackendIPConfigurations; beipConfigs != nil { + for _, config := range *beipConfigs { + ipConfig := make(map[string]interface{}) + if id := config.ID; id != nil { + ipConfig["id"] = *id + backendIPConfigurations = append(backendIPConfigurations, ipConfig) + } + } + } + } + + d.Set("backend_ip_configurations", backendIPConfigurations) + + return nil +} diff --git a/azurerm/internal/services/loadbalancer/backend_address_pool_resource.go b/azurerm/internal/services/loadbalancer/backend_address_pool_resource.go new file mode 100644 index 000000000000..6145594ea673 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/backend_address_pool_resource.go @@ -0,0 +1,235 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancerBackendAddressPool() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerBackendAddressPoolCreate, + Read: resourceArmLoadBalancerBackendAddressPoolRead, + Delete: resourceArmLoadBalancerBackendAddressPoolDelete, + + Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { + id, err := parse.LoadBalancerBackendAddressPoolID(input) + if err != nil { + return nil, err + } + + lbId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + return &lbId, nil + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LoadBalancerID, + }, + + "backend_ip_configurations": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + Set: schema.HashString, + }, + + "load_balancing_rules": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + Set: schema.HashString, + }, + }, + } +} + +func resourceArmLoadBalancerBackendAddressPoolCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return fmt.Errorf("parsing Load Balancer Name and Group: %+v", err) + } + + id := parse.NewLoadBalancerBackendAddressPoolID(subscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, d.Get("name").(string)) + + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + return fmt.Errorf("Load Balancer %q (Resource Group %q) for Backend Address Pool %q was not found", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName) + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Backend Address Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName, err) + } + + backendAddressPools := append(*loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools, network.BackendAddressPool{ + Name: utils.String(id.BackendAddressPoolName), + }) + existingPool, existingPoolIndex, exists := FindLoadBalancerBackEndAddressPoolByName(&loadBalancer, id.BackendAddressPoolName) + if exists { + if id.BackendAddressPoolName == *existingPool.Name { + if d.IsNewResource() { + return tf.ImportAsExistsError("azurerm_lb_backend_address_pool", *existingPool.ID) + } + + // this pool is being updated/reapplied remove old copy from the slice + backendAddressPools = append(backendAddressPools[:existingPoolIndex], backendAddressPools[existingPoolIndex+1:]...) + } + } + + loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools = &backendAddressPools + + future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for Backend Address Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (Resource Group %q) for Backend Address Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerBackendAddressPoolRead(d, meta) +} + +func resourceArmLoadBalancerBackendAddressPoolRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerBackendAddressPoolID(d.Id()) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q (resource group %q) not found. Removing Backend Pool %q from state", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Backend Address Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName, err) + } + + config, _, exists := FindLoadBalancerBackEndAddressPoolByName(&loadBalancer, id.BackendAddressPoolName) + if !exists { + log.Printf("[INFO] Load Balancer Backend Address Pool %q not found. Removing from state", id.BackendAddressPoolName) + d.SetId("") + return nil + } + + d.Set("name", config.Name) + d.Set("resource_group_name", id.ResourceGroup) + + var backendIpConfigurations []string + var loadBalancingRules []string + + if props := config.BackendAddressPoolPropertiesFormat; props != nil { + if configs := props.BackendIPConfigurations; configs != nil { + for _, backendConfig := range *configs { + backendIpConfigurations = append(backendIpConfigurations, *backendConfig.ID) + } + } + + if rules := props.LoadBalancingRules; rules != nil { + for _, rule := range *rules { + loadBalancingRules = append(loadBalancingRules, *rule.ID) + } + } + } + + d.Set("backend_ip_configurations", backendIpConfigurations) + d.Set("load_balancing_rules", loadBalancingRules) + + return nil +} + +func resourceArmLoadBalancerBackendAddressPoolDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerBackendAddressPoolID(d.Id()) + if err != nil { + return err + } + + loadBalancerId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Backend Address Pool %q: %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, id.BackendAddressPoolName, err) + } + _, index, exists := FindLoadBalancerBackEndAddressPoolByName(&loadBalancer, d.Get("name").(string)) + if !exists { + return nil + } + + oldBackEndPools := *loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools + newBackEndPools := append(oldBackEndPools[:index], oldBackEndPools[index+1:]...) + loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools = &newBackEndPools + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (resource group %q) to remove Backend Address Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (resource group %q) for Backend Address Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName, err) + } + + return nil +} diff --git a/azurerm/internal/services/loadbalancer/client/client.go b/azurerm/internal/services/loadbalancer/client/client.go new file mode 100644 index 000000000000..8fbdc483b63c --- /dev/null +++ b/azurerm/internal/services/loadbalancer/client/client.go @@ -0,0 +1,24 @@ +package client + +import ( + networkLegacy "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" +) + +type Client struct { + LoadBalancersClient *networkLegacy.LoadBalancersClient + LoadBalancingRulesClient *networkLegacy.LoadBalancerLoadBalancingRulesClient +} + +func NewClient(o *common.ClientOptions) *Client { + loadBalancersClient := networkLegacy.NewLoadBalancersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&loadBalancersClient.Client, o.ResourceManagerAuthorizer) + + loadBalancingRulesClient := networkLegacy.NewLoadBalancerLoadBalancingRulesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&loadBalancingRulesClient.Client, o.ResourceManagerAuthorizer) + + return &Client{ + LoadBalancersClient: &loadBalancersClient, + LoadBalancingRulesClient: &loadBalancingRulesClient, + } +} diff --git a/azurerm/internal/services/network/loadbalancer.go b/azurerm/internal/services/loadbalancer/loadbalancer.go similarity index 80% rename from azurerm/internal/services/network/loadbalancer.go rename to azurerm/internal/services/loadbalancer/loadbalancer.go index f3e7c891dcf1..ff12d09a5e6e 100644 --- a/azurerm/internal/services/network/loadbalancer.go +++ b/azurerm/internal/services/loadbalancer/loadbalancer.go @@ -1,30 +1,13 @@ -package network +package loadbalancer import ( - "context" - "fmt" - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" ) // TODO: refactor this -func retrieveLoadBalancerById(ctx context.Context, client *network.LoadBalancersClient, loadBalancerId parse.LoadBalancerId) (*network.LoadBalancer, bool, error) { - resp, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil, false, nil - } - return nil, false, fmt.Errorf("retrieving Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - return &resp, true, nil -} - func FindLoadBalancerBackEndAddressPoolByName(lb *network.LoadBalancer, name string) (*network.BackendAddressPool, int, bool) { if lb == nil || lb.LoadBalancerPropertiesFormat == nil || lb.LoadBalancerPropertiesFormat.BackendAddressPools == nil { return nil, -1, false @@ -126,13 +109,12 @@ func FindLoadBalancerProbeByName(lb *network.LoadBalancer, name string) (*networ func loadBalancerSubResourceImporter(parser func(input string) (*parse.LoadBalancerId, error)) *schema.ResourceImporter { return &schema.ResourceImporter{ State: func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - subscriptionId := meta.(*clients.Client).Account.SubscriptionId lbId, err := parser(d.Id()) if err != nil { return nil, err } - d.Set("loadbalancer_id", lbId.ID(subscriptionId)) + d.Set("loadbalancer_id", lbId.ID()) return []*schema.ResourceData{d}, nil }, } diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_backend_address_pool_data_source_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_backend_address_pool_data_source_test.go new file mode 100644 index 000000000000..b599507f090a --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_backend_address_pool_data_source_test.go @@ -0,0 +1,36 @@ +package loadbalancer_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +func TestAccAzureRMDataSourceLoadBalancerBackEndAddressPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_lb_backend_address_pool", "test") + r := LoadBalancerBackendAddressPool{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.dataSourceBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + ), + }, + }) +} + +func (r LoadBalancerBackendAddressPool) dataSourceBasic(data acceptance.TestData) string { + resource := r.basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_lb_backend_address_pool" "test" { + name = azurerm_lb_backend_address_pool.test.name + loadbalancer_id = azurerm_lb_backend_address_pool.test.loadbalancer_id +} +`, resource) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_backend_address_pool_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_backend_address_pool_resource_test.go new file mode 100644 index 000000000000..653de30d1ca4 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_backend_address_pool_resource_test.go @@ -0,0 +1,221 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancerBackendAddressPool struct { +} + +func TestAccAzureRMLoadBalancerBackEndAddressPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") + r := LoadBalancerBackendAddressPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerBackEndAddressPool_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") + r := LoadBalancerBackendAddressPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancerBackEndAddressPool_removal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") + r := LoadBalancerBackendAddressPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.removal(data), + Check: resource.ComposeTestCheckFunc( + r.IsMissing("azurerm_lb.test", fmt.Sprintf("Address-pool-%d", data.RandomInteger)), + ), + }, + }) +} + +func (r LoadBalancerBackendAddressPool) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LoadBalancerBackendAddressPoolID(state.ID) + if err != nil { + return nil, err + } + + lb, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return nil, fmt.Errorf("Load Balancer %q (resource group %q) not found for Backend Address Pool %q", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName) + } + return nil, fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Backend Address Pool %q", id.LoadBalancerName, id.ResourceGroup, id.BackendAddressPoolName) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.BackendAddressPools == nil || len(*props.BackendAddressPools) == 0 { + return nil, fmt.Errorf("Backend Pool %q not found in Load Balancer %q (resource group %q)", id.BackendAddressPoolName, id.LoadBalancerName, id.ResourceGroup) + } + + found := false + for _, v := range *props.BackendAddressPools { + if v.Name != nil && *v.Name == id.BackendAddressPoolName { + found = true + } + } + if !found { + return nil, fmt.Errorf("Backend Pool %q not found in Load Balancer %q (resource group %q)", id.BackendAddressPoolName, id.LoadBalancerName, id.ResourceGroup) + } + return utils.Bool(true), nil +} + +func (r LoadBalancerBackendAddressPool) IsMissing(loadBalancerName string, backendPoolName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).LoadBalancers.LoadBalancersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[loadBalancerName] + if !ok { + return fmt.Errorf("not found: %q", loadBalancerName) + } + + id, err := parse.LoadBalancerID(rs.Primary.ID) + if err != nil { + return err + } + + lb, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) not found while checking for Backend Address Pool removal", id.Name, id.ResourceGroup) + } + return fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Backend Address Pool removal", id.Name, id.ResourceGroup) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.BackendAddressPools == nil { + return fmt.Errorf("Backend Pool %q not found in Load Balancer %q (resource group %q)", backendPoolName, id.Name, id.ResourceGroup) + } + + found := false + for _, v := range *props.BackendAddressPools { + if v.Name != nil && *v.Name == backendPoolName { + found = true + } + } + if found { + return fmt.Errorf("Backend Pool %q not removed from Load Balancer %q (resource group %q)", backendPoolName, id.Name, id.ResourceGroup) + } + return nil + } +} + +func (r LoadBalancerBackendAddressPool) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_backend_address_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "Address-pool-%d" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerBackendAddressPool) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_backend_address_pool" "import" { + name = azurerm_lb_backend_address_pool.test.name + loadbalancer_id = azurerm_lb_backend_address_pool.test.loadbalancer_id + resource_group_name = azurerm_lb_backend_address_pool.test.resource_group_name +} +`, template) +} + +func (r LoadBalancerBackendAddressPool) removal(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_data_source.go b/azurerm/internal/services/loadbalancer/loadbalancer_data_source.go new file mode 100644 index 000000000000..ee56d6b3a0d2 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_data_source.go @@ -0,0 +1,204 @@ +package loadbalancer + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceArmLoadBalancer() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmLoadBalancerRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.NoZeroValues, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "location": azure.SchemaLocationForDataSource(), + + "sku": { + Type: schema.TypeString, + Computed: true, + }, + + "frontend_ip_configuration": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + + "subnet_id": { + Type: schema.TypeString, + Computed: true, + }, + + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + + "private_ip_address_version": { + Type: schema.TypeString, + Computed: true, + }, + + "public_ip_address_id": { + Type: schema.TypeString, + Computed: true, + }, + + "private_ip_address_allocation": { + Type: schema.TypeString, + Computed: true, + }, + + "zones": azure.SchemaZonesComputed(), + + "id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + + "private_ip_addresses": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceArmLoadBalancerRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + resp, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) was not found", name, resourceGroup) + } + + return fmt.Errorf("retrieving Load Balancer %s: %s", name, err) + } + + d.SetId(*resp.ID) + + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if sku := resp.Sku; sku != nil { + d.Set("sku", string(sku.Name)) + } + + if props := resp.LoadBalancerPropertiesFormat; props != nil { + if feipConfigs := props.FrontendIPConfigurations; feipConfigs != nil { + if err := d.Set("frontend_ip_configuration", flattenLoadBalancerDataSourceFrontendIpConfiguration(feipConfigs)); err != nil { + return fmt.Errorf("flattening `frontend_ip_configuration`: %+v", err) + } + + privateIpAddress := "" + privateIpAddresses := make([]string, 0) + for _, config := range *feipConfigs { + if feipProps := config.FrontendIPConfigurationPropertiesFormat; feipProps != nil { + if ip := feipProps.PrivateIPAddress; ip != nil { + if privateIpAddress == "" { + privateIpAddress = *feipProps.PrivateIPAddress + } + + privateIpAddresses = append(privateIpAddresses, *feipProps.PrivateIPAddress) + } + } + } + + d.Set("private_ip_address", privateIpAddress) + d.Set("private_ip_addresses", privateIpAddresses) + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func flattenLoadBalancerDataSourceFrontendIpConfiguration(ipConfigs *[]network.FrontendIPConfiguration) []interface{} { + result := make([]interface{}, 0) + if ipConfigs == nil { + return result + } + + for _, config := range *ipConfigs { + ipConfig := make(map[string]interface{}) + if config.Name != nil { + ipConfig["name"] = *config.Name + } + + if config.ID != nil { + ipConfig["id"] = *config.ID + } + + zones := make([]string, 0) + if zs := config.Zones; zs != nil { + zones = *zs + } + ipConfig["zones"] = zones + + if props := config.FrontendIPConfigurationPropertiesFormat; props != nil { + ipConfig["private_ip_address_allocation"] = props.PrivateIPAllocationMethod + + if subnet := props.Subnet; subnet != nil && subnet.ID != nil { + ipConfig["subnet_id"] = *subnet.ID + } + + if pip := props.PrivateIPAddress; pip != nil { + ipConfig["private_ip_address"] = *pip + } + + if props.PrivateIPAddressVersion != "" { + ipConfig["private_ip_address_version"] = string(props.PrivateIPAddressVersion) + } + + if pip := props.PublicIPAddress; pip != nil && pip.ID != nil { + ipConfig["public_ip_address_id"] = *pip.ID + } + } + + result = append(result, ipConfig) + } + return result +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_data_source_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_data_source_test.go new file mode 100644 index 000000000000..7dc8e13b3330 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_data_source_test.go @@ -0,0 +1,39 @@ +package loadbalancer_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +func TestAccAzureRMDataSourceLoadBalancer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_lb", "test") + d := LoadBalancer{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: d.dataSourceBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("Basic"), + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("tags.Environment").HasValue("production"), + check.That(data.ResourceName).Key("tags.Purpose").HasValue("AcceptanceTests"), + ), + }, + }) +} + +func (r LoadBalancer) dataSourceBasic(data acceptance.TestData) string { + resource := r.basic(data) + return fmt.Sprintf(` +%s + +data "azurerm_lb" "test" { + name = azurerm_lb.test.name + resource_group_name = azurerm_lb.test.resource_group_name +} +`, resource) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_nat_pool_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_nat_pool_resource_test.go new file mode 100644 index 000000000000..f047644c15bd --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_nat_pool_resource_test.go @@ -0,0 +1,369 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancerNatPool struct { +} + +func TestAccAzureRMLoadBalancerNatPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") + r := LoadBalancerNatPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerNatPool_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") + r := LoadBalancerNatPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancerNatPool_removal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") + r := LoadBalancerNatPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.removal(data), + Check: resource.ComposeTestCheckFunc( + r.IsMissing("azurerm_lb.test", fmt.Sprintf("NatPool-%d", data.RandomInteger)), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancerNatPool_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") + data2 := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test2") + + r := LoadBalancerNatPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiplePools(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("backend_port").HasValue("3390"), + ), + }, + data.ImportStep(), + { + Config: r.multiplePoolsUpdate(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("backend_port").HasValue("3391"), + ), + }, + data.ImportStep(), + }) +} + +func (r LoadBalancerNatPool) IsMissing(loadBalancerName string, natPoolName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).LoadBalancers.LoadBalancersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[loadBalancerName] + if !ok { + return fmt.Errorf("not found: %q", loadBalancerName) + } + + id, err := parse.LoadBalancerID(rs.Primary.ID) + if err != nil { + return err + } + + lb, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) not found while checking for Nat Pool removal", id.Name, id.ResourceGroup) + } + return fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Nat Pool removal", id.Name, id.ResourceGroup) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.InboundNatPools == nil { + return fmt.Errorf("Nat Pool %q not found in Load Balancer %q (resource group %q)", natPoolName, id.Name, id.ResourceGroup) + } + + found := false + for _, v := range *props.InboundNatPools { + if v.Name != nil && *v.Name == natPoolName { + found = true + } + } + if found { + return fmt.Errorf("Nat Pool %q not removed from Load Balancer %q (resource group %q)", natPoolName, id.Name, id.ResourceGroup) + } + return nil + } +} + +func (r LoadBalancerNatPool) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LoadBalancerInboundNatPoolID(state.ID) + if err != nil { + return nil, err + } + + lb, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return nil, fmt.Errorf("Load Balancer %q (resource group %q) not found for Nat Pool %q", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName) + } + return nil, fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Nat Pool %q", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.InboundNatPools == nil || len(*props.InboundNatPools) == 0 { + return nil, fmt.Errorf("Nat Pool %q not found in Load Balancer %q (resource group %q)", id.InboundNatPoolName, id.LoadBalancerName, id.ResourceGroup) + } + + found := false + for _, v := range *props.InboundNatPools { + if v.Name != nil && *v.Name == id.InboundNatPoolName { + found = true + } + } + if !found { + return nil, fmt.Errorf("Nat Pool %q not found in Load Balancer %q (resource group %q)", id.InboundNatPoolName, id.LoadBalancerName, id.ResourceGroup) + } + + return utils.Bool(found), nil +} + +func (r LoadBalancerNatPool) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_nat_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "NatPool-%d" + protocol = "Tcp" + frontend_port_start = 80 + frontend_port_end = 81 + backend_port = 3389 + frontend_ip_configuration_name = "one-%d" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerNatPool) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_nat_pool" "import" { + name = azurerm_lb_nat_pool.test.name + loadbalancer_id = azurerm_lb_nat_pool.test.loadbalancer_id + resource_group_name = azurerm_lb_nat_pool.test.resource_group_name + frontend_ip_configuration_name = azurerm_lb_nat_pool.test.frontend_ip_configuration_name + protocol = "Tcp" + frontend_port_start = 80 + frontend_port_end = 81 + backend_port = 3389 +} +`, template) +} + +func (r LoadBalancerNatPool) removal(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerNatPool) multiplePools(data, data2 acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_nat_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "NatPool-%d" + protocol = "Tcp" + frontend_port_start = 80 + frontend_port_end = 81 + backend_port = 3389 + + frontend_ip_configuration_name = "one-%d" +} + +resource "azurerm_lb_nat_pool" "test2" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "NatPool-%d" + protocol = "Tcp" + frontend_port_start = 82 + frontend_port_end = 83 + backend_port = 3390 + + frontend_ip_configuration_name = "one-%d" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data2.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerNatPool) multiplePoolsUpdate(data, data2 acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_nat_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "NatPool-%d" + protocol = "Tcp" + frontend_port_start = 80 + frontend_port_end = 81 + backend_port = 3389 + frontend_ip_configuration_name = "one-%d" +} + +resource "azurerm_lb_nat_pool" "test2" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "NatPool-%d" + protocol = "Tcp" + frontend_port_start = 82 + frontend_port_end = 83 + backend_port = 3391 + frontend_ip_configuration_name = "one-%d" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data2.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_nat_rule_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_nat_rule_resource_test.go new file mode 100644 index 000000000000..15ce47e73942 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_nat_rule_resource_test.go @@ -0,0 +1,356 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancerNatRule struct { +} + +func TestAccAzureRMLoadBalancerNatRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") + r := LoadBalancerNatRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerNatRule_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") + r := LoadBalancerNatRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "Standard"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerNatRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") + r := LoadBalancerNatRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Standard"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data, "Standard"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data, "Standard"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerNatRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") + r := LoadBalancerNatRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancerNatRule_removal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") + r := LoadBalancerNatRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.template(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + r.IsMissing("azurerm_lb.test", fmt.Sprintf("NatRule-%d", data.RandomInteger)), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancerNatRule_updateMultipleRules(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") + data2 := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test2") + + r := LoadBalancerNatRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleRules(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("frontend_port").HasValue("3390"), + check.That(data2.ResourceName).Key("backend_port").HasValue("3390"), + ), + }, + data.ImportStep(), + { + Config: r.multipleRulesUpdate(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("frontend_port").HasValue("3391"), + check.That(data2.ResourceName).Key("backend_port").HasValue("3391"), + ), + }, + data.ImportStep(), + }) +} + +func (r LoadBalancerNatRule) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LoadBalancerInboundNatRuleID(state.ID) + if err != nil { + return nil, err + } + + lb, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return nil, fmt.Errorf("Load Balancer %q (resource group %q) not found for Nat Rule %q", id.LoadBalancerName, id.ResourceGroup, id.InboundNatRuleName) + } + return nil, fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Nat Rule %q", id.LoadBalancerName, id.ResourceGroup, id.InboundNatRuleName) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.InboundNatRules == nil || len(*props.InboundNatRules) == 0 { + return nil, fmt.Errorf("Nat Rule %q not found in Load Balancer %q (resource group %q)", id.InboundNatRuleName, id.LoadBalancerName, id.ResourceGroup) + } + + found := false + for _, v := range *props.InboundNatRules { + if v.Name != nil && *v.Name == id.InboundNatRuleName { + found = true + } + } + if !found { + return nil, fmt.Errorf("Nat Rule %q not found in Load Balancer %q (resource group %q)", id.InboundNatRuleName, id.LoadBalancerName, id.ResourceGroup) + } + return utils.Bool(found), nil +} + +func (r LoadBalancerNatRule) IsMissing(loadBalancerName string, natRuleName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).LoadBalancers.LoadBalancersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[loadBalancerName] + if !ok { + return fmt.Errorf("not found: %q", loadBalancerName) + } + + id, err := parse.LoadBalancerID(rs.Primary.ID) + if err != nil { + return err + } + + lb, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) not found while checking for Nat Rule removal", id.Name, id.ResourceGroup) + } + return fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Nat Rule removal", id.Name, id.ResourceGroup) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.InboundNatRules == nil { + return fmt.Errorf("Nat Rule %q not found in Load Balancer %q (resource group %q)", natRuleName, id.Name, id.ResourceGroup) + } + + found := false + for _, v := range *props.InboundNatRules { + if v.Name != nil && *v.Name == natRuleName { + found = true + } + } + if found { + return fmt.Errorf("Nat Rule %q not removed from Load Balancer %q (resource group %q)", natRuleName, id.Name, id.ResourceGroup) + } + return nil + } +} + +func (r LoadBalancerNatRule) template(data acceptance.TestData, sku string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%[1]d" + location = "%[2]s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "%[3]s" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "%[3]s" + + frontend_ip_configuration { + name = "one-%[1]d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, sku) +} + +func (r LoadBalancerNatRule) basic(data acceptance.TestData, sku string) string { + template := r.template(data, sku) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_nat_rule" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "NatRule-%d" + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomInteger) +} + +func (r LoadBalancerNatRule) complete(data acceptance.TestData, sku string) string { + template := r.template(data, sku) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_nat_rule" "test" { + name = "NatRule-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + + enable_floating_ip = true + enable_tcp_reset = true + idle_timeout_in_minutes = 10 + + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomInteger) +} + +func (r LoadBalancerNatRule) requiresImport(data acceptance.TestData) string { + template := r.basic(data, "Basic") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_nat_rule" "import" { + name = azurerm_lb_nat_rule.test.name + loadbalancer_id = azurerm_lb_nat_rule.test.loadbalancer_id + resource_group_name = azurerm_lb_nat_rule.test.resource_group_name + frontend_ip_configuration_name = azurerm_lb_nat_rule.test.frontend_ip_configuration_name + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 +} +`, template) +} + +func (r LoadBalancerNatRule) multipleRules(data, data2 acceptance.TestData) string { + template := r.template(data, "Basic") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_nat_rule" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "NatRule-%d" + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} + +resource "azurerm_lb_nat_rule" "test2" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "NatRule-%d" + protocol = "Tcp" + frontend_port = 3390 + backend_port = 3390 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomInteger, data2.RandomInteger) +} + +func (r LoadBalancerNatRule) multipleRulesUpdate(data, data2 acceptance.TestData) string { + template := r.template(data, "Basic") + return fmt.Sprintf(` +%s +resource "azurerm_lb_nat_rule" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "NatRule-%d" + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} + +resource "azurerm_lb_nat_rule" "test2" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "NatRule-%d" + protocol = "Tcp" + frontend_port = 3391 + backend_port = 3391 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomInteger, data2.RandomInteger) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_outbound_rule_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_outbound_rule_resource_test.go new file mode 100644 index 000000000000..41b04de41621 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_outbound_rule_resource_test.go @@ -0,0 +1,492 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancerOutboundRule struct { +} + +func TestAccAzureRMLoadBalancerOutboundRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test") + r := LoadBalancerOutboundRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerOutboundRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test") + r := LoadBalancerOutboundRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancerOutboundRule_removal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test") + r := LoadBalancerOutboundRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.removal(data), + Check: resource.ComposeTestCheckFunc( + r.IsMissing("azurerm_lb.test", fmt.Sprintf("OutboundRule-%d", data.RandomInteger)), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancerOutboundRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test") + data2 := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test2") + r := LoadBalancerOutboundRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleRules(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + data2.ImportStep(), + { + Config: r.multipleRulesUpdate(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + data2.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerOutboundRule_withPublicIPPrefix(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test") + r := LoadBalancerOutboundRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withPublicIPPrefix(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r LoadBalancerOutboundRule) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LoadBalancerOutboundRuleID(state.ID) + if err != nil { + return nil, err + } + + lb, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return nil, fmt.Errorf("Load Balancer %q (resource group %q) not found for Outbound Rule %q", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName) + } + return nil, fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Outbound Rule %q", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.OutboundRules == nil || len(*props.OutboundRules) == 0 { + return nil, fmt.Errorf("Outbound Rule %q not found in Load Balancer %q (resource group %q)", id.OutboundRuleName, id.LoadBalancerName, id.ResourceGroup) + } + + found := false + for _, v := range *props.OutboundRules { + if v.Name != nil && *v.Name == id.OutboundRuleName { + found = true + } + } + if !found { + return nil, fmt.Errorf("Outbound Rule %q not found in Load Balancer %q (resource group %q)", id.OutboundRuleName, id.LoadBalancerName, id.ResourceGroup) + } + return utils.Bool(found), nil +} + +func (r LoadBalancerOutboundRule) IsMissing(loadBalancerName string, outboundRuleName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).LoadBalancers.LoadBalancersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[loadBalancerName] + if !ok { + return fmt.Errorf("not found: %q", loadBalancerName) + } + + id, err := parse.LoadBalancerID(rs.Primary.ID) + if err != nil { + return err + } + + lb, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) not found while checking for Outbound Rule removal", id.Name, id.ResourceGroup) + } + return fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Outbound Rule removal", id.Name, id.ResourceGroup) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.OutboundRules == nil { + return fmt.Errorf("Outbound Rule %q not found in Load Balancer %q (resource group %q)", outboundRuleName, id.Name, id.ResourceGroup) + } + + found := false + for _, v := range *props.OutboundRules { + if v.Name != nil && *v.Name == outboundRuleName { + found = true + } + } + if found { + return fmt.Errorf("Outbound Rule %q not removed from Load Balancer %q (resource group %q)", outboundRuleName, id.Name, id.ResourceGroup) + } + return nil + } +} + +func (r LoadBalancerOutboundRule) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_backend_address_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "be-%d" +} + +resource "azurerm_lb_outbound_rule" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "OutboundRule-%d" + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + protocol = "All" + + frontend_ip_configuration { + name = "one-%d" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerOutboundRule) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_outbound_rule" "import" { + name = azurerm_lb_outbound_rule.test.name + resource_group_name = azurerm_lb_outbound_rule.test.resource_group_name + loadbalancer_id = azurerm_lb_outbound_rule.test.loadbalancer_id + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + protocol = "All" + + frontend_ip_configuration { + name = azurerm_lb_outbound_rule.test.frontend_ip_configuration[0].name + } +} +`, template) +} + +func (r LoadBalancerOutboundRule) removal(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_lb_backend_address_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "be-%d" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerOutboundRule) multipleRules(data, data2 acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test1" { + name = "test-ip-1-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_public_ip" "test2" { + name = "test-ip-2-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + frontend_ip_configuration { + name = "fe1-%d" + public_ip_address_id = azurerm_public_ip.test1.id + } + + frontend_ip_configuration { + name = "fe2-%d" + public_ip_address_id = azurerm_public_ip.test2.id + } +} + +resource "azurerm_lb_backend_address_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "be-%d" +} + +resource "azurerm_lb_outbound_rule" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "OutboundRule-%d" + protocol = "Tcp" + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + + frontend_ip_configuration { + name = "fe1-%d" + } +} + +resource "azurerm_lb_outbound_rule" "test2" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "OutboundRule-%d" + protocol = "Udp" + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + + frontend_ip_configuration { + name = "fe2-%d" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data2.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerOutboundRule) multipleRulesUpdate(data, data2 acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test1" { + name = "test-ip-1-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_public_ip" "test2" { + name = "test-ip-2-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + frontend_ip_configuration { + name = "fe1-%d" + public_ip_address_id = azurerm_public_ip.test1.id + } + + frontend_ip_configuration { + name = "fe2-%d" + public_ip_address_id = azurerm_public_ip.test2.id + } +} + +resource "azurerm_lb_backend_address_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "be-%d" +} + +resource "azurerm_lb_outbound_rule" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "OutboundRule-%d" + protocol = "All" + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + + frontend_ip_configuration { + name = "fe1-%d" + } +} + +resource "azurerm_lb_outbound_rule" "test2" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "OutboundRule-%d" + protocol = "All" + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + + frontend_ip_configuration { + name = "fe2-%d" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data2.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerOutboundRule) withPublicIPPrefix(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip_prefix" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + prefix_length = 31 +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + frontend_ip_configuration { + name = "one-%d" + public_ip_prefix_id = azurerm_public_ip_prefix.test.id + } +} + +resource "azurerm_lb_backend_address_pool" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "be-%d" +} + +resource "azurerm_lb_outbound_rule" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "OutboundRule-%d" + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + protocol = "All" + + frontend_ip_configuration { + name = "one-%d" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_probe_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_probe_resource_test.go new file mode 100644 index 000000000000..88bd89151929 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_probe_resource_test.go @@ -0,0 +1,442 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancerProbe struct { +} + +func TestAccAzureRMLoadBalancerProbe_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") + r := LoadBalancerProbe{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerProbe_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") + r := LoadBalancerProbe{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancerProbe_removal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") + r := LoadBalancerProbe{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.removal(data), + Check: resource.ComposeTestCheckFunc( + r.IsMissing("azurerm_lb.test", fmt.Sprintf("probe-%d", data.RandomInteger)), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancerProbe_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") + data2 := acceptance.BuildTestData(t, "azurerm_lb_probe", "test2") + r := LoadBalancerProbe{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleProbes(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("port").HasValue("80"), + ), + }, + data.ImportStep(), + data2.ImportStep(), + { + Config: r.multipleProbesUpdate(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("port").HasValue("8080"), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancerProbe_updateProtocol(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") + r := LoadBalancerProbe{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.updateProtocolBefore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("Http"), + ), + }, + data.ImportStep(), + { + Config: r.updateProtocolAfter(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocol").HasValue("Tcp"), + ), + }, + }) +} + +func (r LoadBalancerProbe) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LoadBalancerProbeID(state.ID) + if err != nil { + return nil, err + } + + lb, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return nil, fmt.Errorf("Load Balancer %q (resource group %q) not found for Probe %q", id.LoadBalancerName, id.ResourceGroup, id.ProbeName) + } + return nil, fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Probe %q", id.LoadBalancerName, id.ResourceGroup, id.ProbeName) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.Probes == nil || len(*props.Probes) == 0 { + return nil, fmt.Errorf("Probe %q not found in Load Balancer %q (resource group %q)", id.ProbeName, id.LoadBalancerName, id.ResourceGroup) + } + + found := false + for _, v := range *props.Probes { + if v.Name != nil && *v.Name == id.ProbeName { + found = true + } + } + if !found { + return nil, fmt.Errorf("Probe %q not found in Load Balancer %q (resource group %q)", id.ProbeName, id.LoadBalancerName, id.ResourceGroup) + } + return utils.Bool(found), nil +} + +func (r LoadBalancerProbe) IsMissing(loadBalancerName string, probeName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).LoadBalancers.LoadBalancersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[loadBalancerName] + if !ok { + return fmt.Errorf("not found: %q", loadBalancerName) + } + + id, err := parse.LoadBalancerID(rs.Primary.ID) + if err != nil { + return err + } + + lb, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) not found while checking for Probe removal", id.Name, id.ResourceGroup) + } + return fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Probe removal", id.Name, id.ResourceGroup) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.Probes == nil { + return fmt.Errorf("Probe %q not found in Load Balancer %q (resource group %q)", probeName, id.Name, id.ResourceGroup) + } + + found := false + for _, v := range *props.Probes { + if v.Name != nil && *v.Name == probeName { + found = true + } + } + if found { + return fmt.Errorf("Probe %q not removed from Load Balancer %q (resource group %q)", probeName, id.Name, id.ResourceGroup) + } + return nil + } +} + +func (r LoadBalancerProbe) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_probe" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + port = 22 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerProbe) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_probe" "import" { + name = azurerm_lb_probe.test.name + loadbalancer_id = azurerm_lb_probe.test.loadbalancer_id + resource_group_name = azurerm_lb_probe.test.resource_group_name + port = 22 +} +`, template) +} + +func (r LoadBalancerProbe) removal(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerProbe) multipleProbes(data, data2 acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_probe" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + port = 22 +} + +resource "azurerm_lb_probe" "test2" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + port = 80 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data2.RandomInteger) +} + +func (r LoadBalancerProbe) multipleProbesUpdate(data, data2 acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_probe" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + port = 22 +} + +resource "azurerm_lb_probe" "test2" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + port = 8080 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data2.RandomInteger) +} + +func (r LoadBalancerProbe) updateProtocolBefore(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_probe" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + protocol = "Http" + request_path = "/" + port = 80 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancerProbe) updateProtocolAfter(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_lb_probe" "test" { + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + name = "probe-%d" + protocol = "Tcp" + port = 80 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_resource.go b/azurerm/internal/services/loadbalancer/loadbalancer_resource.go new file mode 100644 index 000000000000..85b98d52c763 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_resource.go @@ -0,0 +1,449 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancer() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerCreateUpdate, + Read: resourceArmLoadBalancerRead, + Update: resourceArmLoadBalancerCreateUpdate, + Delete: resourceArmLoadBalancerDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.LoadBalancerID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "sku": { + Type: schema.TypeString, + Optional: true, + Default: string(network.LoadBalancerSkuNameBasic), + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.LoadBalancerSkuNameBasic), + string(network.LoadBalancerSkuNameStandard), + }, true), + DiffSuppressFunc: suppress.CaseDifference, + }, + + "frontend_ip_configuration": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "subnet_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: azure.ValidateResourceIDOrEmpty, + }, + + "private_ip_address": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.Any( + validation.IsIPAddress, + validation.StringIsEmpty, + ), + }, + + "private_ip_address_version": { + Type: schema.TypeString, + Optional: true, + Default: string(network.IPv4), + ValidateFunc: validation.StringInSlice([]string{ + string(network.IPv4), + string(network.IPv6), + }, false), + }, + + "public_ip_address_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: azure.ValidateResourceIDOrEmpty, + }, + + "public_ip_prefix_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: azure.ValidateResourceIDOrEmpty, + }, + + "private_ip_address_allocation": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.Dynamic), + string(network.Static), + }, true), + StateFunc: state.IgnoreCase, + DiffSuppressFunc: suppress.CaseDifference, + }, + + "load_balancer_rules": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + Set: schema.HashString, + }, + + "inbound_nat_rules": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + Set: schema.HashString, + }, + + "outbound_rules": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + Set: schema.HashString, + }, + + "zones": azure.SchemaSingleZone(), + + "id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "private_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + "private_ip_addresses": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceArmLoadBalancerCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure ARM Load Balancer creation.") + + id := parse.NewLoadBalancerID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Load Balancer %q (Resource Group %q): %s", id.Name, id.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_lb", *existing.ID) + } + } + + location := azure.NormalizeLocation(d.Get("location").(string)) + sku := network.LoadBalancerSku{ + Name: network.LoadBalancerSkuName(d.Get("sku").(string)), + } + t := d.Get("tags").(map[string]interface{}) + expandedTags := tags.Expand(t) + + properties := network.LoadBalancerPropertiesFormat{} + + if _, ok := d.GetOk("frontend_ip_configuration"); ok { + properties.FrontendIPConfigurations = expandAzureRmLoadBalancerFrontendIpConfigurations(d) + } + + loadBalancer := network.LoadBalancer{ + Name: utils.String(id.Name), + Location: utils.String(location), + Tags: expandedTags, + Sku: &sku, + LoadBalancerPropertiesFormat: &properties, + } + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, loadBalancer) + if err != nil { + return fmt.Errorf("creating/updating Load Balancer %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("creating/Updating Load Balancer %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerRead(d, meta) +} + +func resourceArmLoadBalancerRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.Name) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer By ID: %+v", err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if sku := resp.Sku; sku != nil { + d.Set("sku", string(sku.Name)) + } + + if props := resp.LoadBalancerPropertiesFormat; props != nil { + if feipConfigs := props.FrontendIPConfigurations; feipConfigs != nil { + if err := d.Set("frontend_ip_configuration", flattenLoadBalancerFrontendIpConfiguration(feipConfigs)); err != nil { + return fmt.Errorf("Error flattening `frontend_ip_configuration`: %+v", err) + } + + privateIpAddress := "" + privateIpAddresses := make([]string, 0) + for _, config := range *feipConfigs { + if feipProps := config.FrontendIPConfigurationPropertiesFormat; feipProps != nil { + if ip := feipProps.PrivateIPAddress; ip != nil { + if privateIpAddress == "" { + privateIpAddress = *feipProps.PrivateIPAddress + } + + privateIpAddresses = append(privateIpAddresses, *feipProps.PrivateIPAddress) + } + } + } + + d.Set("private_ip_address", privateIpAddress) + d.Set("private_ip_addresses", privateIpAddresses) + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmLoadBalancerDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("deleting Load Balancer %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of Load Balancer %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return nil +} + +func expandAzureRmLoadBalancerFrontendIpConfigurations(d *schema.ResourceData) *[]network.FrontendIPConfiguration { + configs := d.Get("frontend_ip_configuration").([]interface{}) + frontEndConfigs := make([]network.FrontendIPConfiguration, 0, len(configs)) + + for _, configRaw := range configs { + data := configRaw.(map[string]interface{}) + + privateIpAllocationMethod := data["private_ip_address_allocation"].(string) + properties := network.FrontendIPConfigurationPropertiesFormat{ + PrivateIPAllocationMethod: network.IPAllocationMethod(privateIpAllocationMethod), + } + + if v := data["private_ip_address"].(string); v != "" { + properties.PrivateIPAddress = &v + } + + properties.PrivateIPAddressVersion = network.IPVersion(data["private_ip_address_version"].(string)) + + if v := data["public_ip_address_id"].(string); v != "" { + properties.PublicIPAddress = &network.PublicIPAddress{ + ID: &v, + } + } + + if v := data["public_ip_prefix_id"].(string); v != "" { + properties.PublicIPPrefix = &network.SubResource{ + ID: &v, + } + } + + if v := data["subnet_id"].(string); v != "" { + properties.Subnet = &network.Subnet{ + ID: &v, + } + } + + name := data["name"].(string) + zones := azure.ExpandZones(data["zones"].([]interface{})) + frontEndConfig := network.FrontendIPConfiguration{ + Name: &name, + FrontendIPConfigurationPropertiesFormat: &properties, + Zones: zones, + } + + frontEndConfigs = append(frontEndConfigs, frontEndConfig) + } + + return &frontEndConfigs +} + +func flattenLoadBalancerFrontendIpConfiguration(ipConfigs *[]network.FrontendIPConfiguration) []interface{} { + result := make([]interface{}, 0) + if ipConfigs == nil { + return result + } + + for _, config := range *ipConfigs { + ipConfig := make(map[string]interface{}) + + if config.Name != nil { + ipConfig["name"] = *config.Name + } + + if config.ID != nil { + ipConfig["id"] = *config.ID + } + + zones := make([]string, 0) + if zs := config.Zones; zs != nil { + zones = *zs + } + ipConfig["zones"] = zones + + if props := config.FrontendIPConfigurationPropertiesFormat; props != nil { + ipConfig["private_ip_address_allocation"] = string(props.PrivateIPAllocationMethod) + + if subnet := props.Subnet; subnet != nil { + ipConfig["subnet_id"] = *subnet.ID + } + + if pip := props.PrivateIPAddress; pip != nil { + ipConfig["private_ip_address"] = *pip + } + + if props.PrivateIPAddressVersion != "" { + ipConfig["private_ip_address_version"] = string(props.PrivateIPAddressVersion) + } + + if pip := props.PublicIPAddress; pip != nil { + ipConfig["public_ip_address_id"] = *pip.ID + } + + if pip := props.PublicIPPrefix; pip != nil { + ipConfig["public_ip_prefix_id"] = *pip.ID + } + + loadBalancingRules := make([]interface{}, 0) + if rules := props.LoadBalancingRules; rules != nil { + for _, rule := range *rules { + loadBalancingRules = append(loadBalancingRules, *rule.ID) + } + } + ipConfig["load_balancer_rules"] = schema.NewSet(schema.HashString, loadBalancingRules) + + inboundNatRules := make([]interface{}, 0) + if rules := props.InboundNatRules; rules != nil { + for _, rule := range *rules { + inboundNatRules = append(inboundNatRules, *rule.ID) + } + } + ipConfig["inbound_nat_rules"] = schema.NewSet(schema.HashString, inboundNatRules) + + outboundRules := make([]interface{}, 0) + if rules := props.OutboundRules; rules != nil { + for _, rule := range *rules { + outboundRules = append(outboundRules, *rule.ID) + } + } + ipConfig["outbound_rules"] = schema.NewSet(schema.HashString, outboundRules) + } + + result = append(result, ipConfig) + } + return result +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_resource_test.go new file mode 100644 index 000000000000..072e4bcbcd18 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_resource_test.go @@ -0,0 +1,500 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancer struct { +} + +func TestAccAzureRMLoadBalancer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancer_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancer_frontEndConfigPublicIPPrefix(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.frontEndConfigPublicIPPrefix(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_ip_configuration.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancer_frontEndConfig(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.frontEndConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_ip_configuration.#").HasValue("2"), + ), + }, + data.ImportStep(), + { + Config: r.frontEndConfigRemovalWithIP(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_ip_configuration.#").HasValue("1"), + ), + }, + data.ImportStep(), + { + Config: r.frontEndConfigRemoval(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_ip_configuration.#").HasValue("1"), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancer_tags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + check.That(data.ResourceName).Key("tags.Environment").HasValue("production"), + check.That(data.ResourceName).Key("tags.Purpose").HasValue("AcceptanceTests"), + ), + }, + data.ImportStep(), + { + Config: r.updatedTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Purpose").HasValue("AcceptanceTests"), + ), + }, + }) +} + +func TestAccAzureRMLoadBalancer_emptyPrivateIP(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.emptyPrivateIPAddress(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_ip_configuration.0.private_ip_address").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancer_privateIP(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb", "test") + r := LoadBalancer{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.privateIPAddress(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("frontend_ip_configuration.0.private_ip_address").Exists(), + ), + }, + }) +} + +func (r LoadBalancer) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + loadBalancerName := state.Attributes["name"] + resourceGroup := state.Attributes["resource_group_name"] + + resp, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, resourceGroup, loadBalancerName, "") + if err != nil { + if resp.StatusCode == http.StatusNotFound { + return nil, fmt.Errorf("Bad: Load Balancer %q (resource group: %q) does not exist", loadBalancerName, resourceGroup) + } + + return nil, fmt.Errorf("Bad: Get on loadBalancerClient: %+v", err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r LoadBalancer) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + Environment = "production" + Purpose = "AcceptanceTests" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r LoadBalancer) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_lb" "import" { + name = azurerm_lb.test.name + location = azurerm_lb.test.location + resource_group_name = azurerm_lb.test.resource_group_name + + tags = { + Environment = "production" + Purpose = "AcceptanceTests" + } +} +`, template) +} + +func (r LoadBalancer) standard(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + tags = { + Environment = "production" + Purpose = "AcceptanceTests" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r LoadBalancer) updatedTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + Purpose = "AcceptanceTests" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r LoadBalancer) frontEndConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_public_ip" "test1" { + name = "another-test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } + + frontend_ip_configuration { + name = "two-%d" + public_ip_address_id = azurerm_public_ip.test1.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancer) frontEndConfigRemovalWithIP(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_public_ip" "test1" { + name = "another-test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancer) frontEndConfigPublicIPPrefix(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_public_ip_prefix" "test" { + name = "test-ip-prefix-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + prefix_length = 31 +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + frontend_ip_configuration { + name = "prefix-%d" + public_ip_prefix_id = azurerm_public_ip_prefix.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancer) frontEndConfigRemoval(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" +} + +resource "azurerm_lb" "test" { + name = "acctest-loadbalancer-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + frontend_ip_configuration { + name = "one-%d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancer) emptyPrivateIPAddress(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctvn-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctsub-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_lb" "test" { + name = "acctestlb-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Basic" + + frontend_ip_configuration { + name = "Internal" + private_ip_address_allocation = "Dynamic" + private_ip_address = "" + subnet_id = azurerm_subnet.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r LoadBalancer) privateIPAddress(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctvn-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctsub-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_lb" "test" { + name = "acctestlb-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Basic" + + frontend_ip_configuration { + name = "Internal" + private_ip_address_allocation = "Static" + private_ip_address_version = "IPv4" + private_ip_address = "10.0.2.7" + subnet_id = azurerm_subnet.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_rule_data_source_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_rule_data_source_test.go new file mode 100644 index 000000000000..0d21ad3f8214 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_rule_data_source_test.go @@ -0,0 +1,112 @@ +package loadbalancer_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +func TestAccAzureRMDataSourceLoadBalancerRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicDataSource(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("frontend_ip_configuration_name").Exists(), + check.That(data.ResourceName).Key("protocol").Exists(), + check.That(data.ResourceName).Key("frontend_port").Exists(), + check.That(data.ResourceName).Key("backend_port").Exists(), + ), + }, + }) +} + +func TestAccAzureRMDataSourceLoadBalancerRule_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.completeDataSource(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("frontend_ip_configuration_name").Exists(), + check.That(data.ResourceName).Key("protocol").Exists(), + check.That(data.ResourceName).Key("frontend_port").Exists(), + check.That(data.ResourceName).Key("backend_port").Exists(), + check.That(data.ResourceName).Key("backend_address_pool_id").Exists(), + check.That(data.ResourceName).Key("probe_id").Exists(), + check.That(data.ResourceName).Key("enable_floating_ip").Exists(), + check.That(data.ResourceName).Key("enable_tcp_reset").Exists(), + check.That(data.ResourceName).Key("disable_outbound_snat").Exists(), + check.That(data.ResourceName).Key("idle_timeout_in_minutes").Exists(), + check.That(data.ResourceName).Key("load_distribution").Exists(), + ), + }, + }) +} + +func (r LoadBalancerRule) basicDataSource(data acceptance.TestData) string { + template := r.basic(data, "Basic") + return fmt.Sprintf(` +%s + +data "azurerm_lb_rule" "test" { + name = azurerm_lb_rule.test.name + resource_group_name = azurerm_lb_rule.test.resource_group_name + loadbalancer_id = azurerm_lb_rule.test.loadbalancer_id +} +`, template) +} + +func (r LoadBalancerRule) completeDataSource(data acceptance.TestData) string { + template := r.template(data, "Standard") + return fmt.Sprintf(` +%s +resource "azurerm_lb_backend_address_pool" "test" { + name = "LbPool-%s" + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id +} + +resource "azurerm_lb_probe" "test" { + name = "LbProbe-%s" + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + protocol = "Tcp" + port = 443 +} + +resource "azurerm_lb_rule" "test" { + name = "LbRule-%s" + resource_group_name = azurerm_resource_group.test.name + loadbalancer_id = azurerm_lb.test.id + + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + + disable_outbound_snat = true + enable_floating_ip = true + enable_tcp_reset = true + idle_timeout_in_minutes = 10 + + backend_address_pool_id = azurerm_lb_backend_address_pool.test.id + probe_id = azurerm_lb_probe.test.id + + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} + +data "azurerm_lb_rule" "test" { + name = azurerm_lb_rule.test.name + resource_group_name = azurerm_lb_rule.test.resource_group_name + loadbalancer_id = azurerm_lb_rule.test.loadbalancer_id +} +`, template, data.RandomStringOfLength(8), data.RandomStringOfLength(8), data.RandomStringOfLength(8)) +} diff --git a/azurerm/internal/services/loadbalancer/loadbalancer_rule_resource_test.go b/azurerm/internal/services/loadbalancer/loadbalancer_rule_resource_test.go new file mode 100644 index 000000000000..51b240a24afe --- /dev/null +++ b/azurerm/internal/services/loadbalancer/loadbalancer_rule_resource_test.go @@ -0,0 +1,412 @@ +package loadbalancer_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LoadBalancerRule struct { +} + +func TestAccAzureRMLoadBalancerRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerRule_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMLoadBalancerRule_removal(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + r := LoadBalancerRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.template(data, "Basic"), + Check: resource.ComposeTestCheckFunc( + r.IsMissing("azurerm_lb.test", fmt.Sprintf("LbRule-%s", data.RandomStringOfLength(8))), + ), + }, + }) +} + +// https://github.com/hashicorp/terraform/issues/9424 +func TestAccAzureRMLoadBalancerRule_inconsistentReads(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + r := LoadBalancerRule{} + p := LoadBalancerProbe{} + b := LoadBalancerBackendAddressPool{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.inconsistentRead(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That("azurerm_lb_probe.test").ExistsInAzure(p), + check.That("azurerm_lb_backend_address_pool.test").ExistsInAzure(b), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMLoadBalancerRule_updateMultipleRules(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") + data2 := acceptance.BuildTestData(t, "azurerm_lb_rule", "test2") + r := LoadBalancerRule{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleRules(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("frontend_port").HasValue("3390"), + check.That(data2.ResourceName).Key("backend_port").HasValue("3390"), + ), + }, + data.ImportStep(), + data2.ImportStep(), + { + Config: r.multipleRulesUpdate(data, data2), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).ExistsInAzure(r), + check.That(data2.ResourceName).Key("frontend_port").HasValue("3391"), + check.That(data2.ResourceName).Key("backend_port").HasValue("3391"), + ), + }, + data.ImportStep(), + data2.ImportStep(), + }) +} + +func (r LoadBalancerRule) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LoadBalancingRuleID(state.ID) + if err != nil { + return nil, err + } + + lb, err := client.LoadBalancers.LoadBalancersClient.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return nil, fmt.Errorf("Load Balancer %q (resource group %q) not found for Load Balancing Rule %q", id.LoadBalancerName, id.ResourceGroup, id.Name) + } + return nil, fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Load Balancing Rule %q", id.LoadBalancerName, id.ResourceGroup, id.Name) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.LoadBalancingRules == nil || len(*props.LoadBalancingRules) == 0 { + return nil, fmt.Errorf("Load Balancing Rule %q not found in Load Balancer %q (resource group %q)", id.Name, id.LoadBalancerName, id.ResourceGroup) + } + + found := false + for _, v := range *props.LoadBalancingRules { + if v.Name != nil && *v.Name == id.Name { + found = true + } + } + if !found { + return nil, fmt.Errorf("Load Balancing Rule %q not found in Load Balancer %q (resource group %q)", id.Name, id.LoadBalancerName, id.ResourceGroup) + } + return utils.Bool(found), nil +} + +func (r LoadBalancerRule) IsMissing(loadBalancerName string, loadBalancingRuleName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).LoadBalancers.LoadBalancersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[loadBalancerName] + if !ok { + return fmt.Errorf("not found: %q", loadBalancerName) + } + + id, err := parse.LoadBalancerID(rs.Primary.ID) + if err != nil { + return err + } + + lb, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(lb.Response) { + return fmt.Errorf("Load Balancer %q (resource group %q) not found while checking for Load Balancing Rule removal", id.Name, id.ResourceGroup) + } + return fmt.Errorf("failed reading Load Balancer %q (resource group %q) for Load Balancing Rule removal", id.Name, id.ResourceGroup) + } + props := lb.LoadBalancerPropertiesFormat + if props == nil || props.LoadBalancingRules == nil { + return fmt.Errorf("Load Balancing Rule %q not found in Load Balancer %q (resource group %q)", loadBalancingRuleName, id.Name, id.ResourceGroup) + } + + found := false + for _, v := range *props.LoadBalancingRules { + if v.Name != nil && *v.Name == loadBalancingRuleName { + found = true + } + } + if found { + return fmt.Errorf("Outbound Rule %q not removed from Load Balancer %q (resource group %q)", loadBalancingRuleName, id.Name, id.ResourceGroup) + } + return nil + } +} + +func (r LoadBalancerRule) template(data acceptance.TestData, sku string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-lb-%[1]d" + location = "%[2]s" +} + +resource "azurerm_public_ip" "test" { + name = "test-ip-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "%[3]s" +} + +resource "azurerm_lb" "test" { + name = "arm-test-loadbalancer-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "%[3]s" + + frontend_ip_configuration { + name = "one-%[1]d" + public_ip_address_id = azurerm_public_ip.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, sku) +} + +// nolint: unparam +func (r LoadBalancerRule) basic(data acceptance.TestData, sku string) string { + template := r.template(data, sku) + return fmt.Sprintf(` +%s + +resource "azurerm_lb_rule" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "LbRule-%s" + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomStringOfLength(8)) +} + +func (r LoadBalancerRule) complete(data acceptance.TestData) string { + template := r.template(data, "Standard") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_rule" "test" { + name = "LbRule-%s" + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + + disable_outbound_snat = true + enable_floating_ip = true + enable_tcp_reset = true + idle_timeout_in_minutes = 10 + + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomStringOfLength(8)) +} + +func (r LoadBalancerRule) requiresImport(data acceptance.TestData) string { + template := r.basic(data, "Basic") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_rule" "import" { + name = azurerm_lb_rule.test.name + resource_group_name = azurerm_lb_rule.test.resource_group_name + loadbalancer_id = azurerm_lb_rule.test.loadbalancer_id + frontend_ip_configuration_name = azurerm_lb_rule.test.frontend_ip_configuration_name + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 +} +`, template) +} + +// https://github.com/hashicorp/terraform/issues/9424 +func (r LoadBalancerRule) inconsistentRead(data acceptance.TestData) string { + template := r.template(data, "Basic") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_backend_address_pool" "test" { + name = "%d-address-pool" + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" +} + +resource "azurerm_lb_probe" "test" { + name = "probe-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + protocol = "Tcp" + port = 443 +} + +resource "azurerm_lb_rule" "test" { + name = "LbRule-%s" + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + protocol = "Tcp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomStringOfLength(8)) +} + +func (r LoadBalancerRule) multipleRules(data, data2 acceptance.TestData) string { + template := r.template(data, "Basic") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_rule" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "LbRule-%s" + protocol = "Udp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} + +resource "azurerm_lb_rule" "test2" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "LbRule-%s" + protocol = "Udp" + frontend_port = 3390 + backend_port = 3390 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomStringOfLength(8), data2.RandomStringOfLength(8)) +} + +func (r LoadBalancerRule) multipleRulesUpdate(data, data2 acceptance.TestData) string { + template := r.template(data, "Basic") + return fmt.Sprintf(` +%s + +resource "azurerm_lb_rule" "test" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "LbRule-%s" + protocol = "Udp" + frontend_port = 3389 + backend_port = 3389 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} + +resource "azurerm_lb_rule" "test2" { + resource_group_name = "${azurerm_resource_group.test.name}" + loadbalancer_id = "${azurerm_lb.test.id}" + name = "LbRule-%s" + protocol = "Udp" + frontend_port = 3391 + backend_port = 3391 + frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name +} +`, template, data.RandomStringOfLength(8), data2.RandomStringOfLength(8)) +} diff --git a/azurerm/internal/services/loadbalancer/nat_pool_resource.go b/azurerm/internal/services/loadbalancer/nat_pool_resource.go new file mode 100644 index 000000000000..9799ecd38cc6 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/nat_pool_resource.go @@ -0,0 +1,306 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + loadBalancerValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancerNatPool() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerNatPoolCreateUpdate, + Read: resourceArmLoadBalancerNatPoolRead, + Update: resourceArmLoadBalancerNatPoolCreateUpdate, + Delete: resourceArmLoadBalancerNatPoolDelete, + + Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { + id, err := parse.LoadBalancerInboundNatPoolID(input) + if err != nil { + return nil, err + } + + lbId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + return &lbId, nil + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: loadBalancerValidate.LoadBalancerID, + }, + + "protocol": { + Type: schema.TypeString, + Required: true, + StateFunc: state.IgnoreCase, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(network.TransportProtocolAll), + string(network.TransportProtocolTCP), + string(network.TransportProtocolUDP), + }, true), + }, + + "frontend_port_start": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "frontend_port_end": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "backend_port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "frontend_ip_configuration_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "frontend_ip_configuration_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceArmLoadBalancerNatPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return fmt.Errorf("parsing Load Balancer Name and Group: %+v", err) + } + + id := parse.NewLoadBalancerInboundNatPoolID(subscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, d.Get("name").(string)) + + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + newNatPool, err := expandAzureRmLoadBalancerNatPool(d, &loadBalancer) + if err != nil { + return fmt.Errorf("expanding NAT Pool: %+v", err) + } + + natPools := append(*loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools, *newNatPool) + + existingNatPool, existingNatPoolIndex, exists := FindLoadBalancerNatPoolByName(&loadBalancer, id.InboundNatPoolName) + if exists { + if id.InboundNatPoolName == *existingNatPool.Name { + if d.IsNewResource() { + return tf.ImportAsExistsError("azurerm_lb_nat_pool", *existingNatPool.ID) + } + + // this pool is being updated/reapplied remove old copy from the slice + natPools = append(natPools[:existingNatPoolIndex], natPools[existingNatPoolIndex+1:]...) + } + } + + loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools = &natPools + + future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for the update of Load Balancer %q (Resource Group %q) for Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerNatPoolRead(d, meta) +} + +func resourceArmLoadBalancerNatPoolRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerInboundNatPoolID(d.Id()) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + + config, _, exists := FindLoadBalancerNatPoolByName(&loadBalancer, id.InboundNatPoolName) + if !exists { + d.SetId("") + log.Printf("[INFO] Load Balancer Nat Pool %q not found. Removing from state", id.InboundNatPoolName) + return nil + } + + d.Set("name", config.Name) + d.Set("resource_group_name", id.ResourceGroup) + + if props := config.InboundNatPoolPropertiesFormat; props != nil { + backendPort := 0 + if props.BackendPort != nil { + backendPort = int(*props.BackendPort) + } + d.Set("backend_port", backendPort) + + frontendIPConfigName := "" + frontendIPConfigID := "" + if props.FrontendIPConfiguration != nil && props.FrontendIPConfiguration.ID != nil { + feid, err := parse.LoadBalancerFrontendIpConfigurationID(*props.FrontendIPConfiguration.ID) + if err != nil { + return err + } + + frontendIPConfigName = feid.FrontendIPConfigurationName + frontendIPConfigID = feid.ID() + } + d.Set("frontend_ip_configuration_id", frontendIPConfigID) + d.Set("frontend_ip_configuration_name", frontendIPConfigName) + + frontendPortRangeEnd := 0 + if props.FrontendPortRangeEnd != nil { + frontendPortRangeEnd = int(*props.FrontendPortRangeEnd) + } + d.Set("frontend_port_end", frontendPortRangeEnd) + + frontendPortRangeStart := 0 + if props.FrontendPortRangeStart != nil { + frontendPortRangeStart = int(*props.FrontendPortRangeStart) + } + d.Set("frontend_port_start", frontendPortRangeStart) + d.Set("protocol", string(props.Protocol)) + } + + return nil +} + +func resourceArmLoadBalancerNatPoolDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerInboundNatPoolID(d.Id()) + if err != nil { + return err + } + + loadBalancerId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for deletion of Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + + _, index, exists := FindLoadBalancerNatPoolByName(&loadBalancer, id.InboundNatPoolName) + if !exists { + return nil + } + + oldNatPools := *loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools + newNatPools := append(oldNatPools[:index], oldNatPools[index+1:]...) + loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools = &newNatPools + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of the Load Balancer %q (Resource Group %q) for Nat Pool %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatPoolName, err) + } + + return nil +} + +func expandAzureRmLoadBalancerNatPool(d *schema.ResourceData, lb *network.LoadBalancer) (*network.InboundNatPool, error) { + properties := network.InboundNatPoolPropertiesFormat{ + Protocol: network.TransportProtocol(d.Get("protocol").(string)), + FrontendPortRangeStart: utils.Int32(int32(d.Get("frontend_port_start").(int))), + FrontendPortRangeEnd: utils.Int32(int32(d.Get("frontend_port_end").(int))), + BackendPort: utils.Int32(int32(d.Get("backend_port").(int))), + } + + if v := d.Get("frontend_ip_configuration_name").(string); v != "" { + rule, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v) + if !exists { + return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v) + } + + properties.FrontendIPConfiguration = &network.SubResource{ + ID: rule.ID, + } + } + + return &network.InboundNatPool{ + Name: utils.String(d.Get("name").(string)), + InboundNatPoolPropertiesFormat: &properties, + }, nil +} diff --git a/azurerm/internal/services/loadbalancer/nat_rule_resource.go b/azurerm/internal/services/loadbalancer/nat_rule_resource.go new file mode 100644 index 000000000000..2fc5b167336c --- /dev/null +++ b/azurerm/internal/services/loadbalancer/nat_rule_resource.go @@ -0,0 +1,340 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + loadBalancerValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancerNatRule() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerNatRuleCreateUpdate, + Read: resourceArmLoadBalancerNatRuleRead, + Update: resourceArmLoadBalancerNatRuleCreateUpdate, + Delete: resourceArmLoadBalancerNatRuleDelete, + + Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { + id, err := parse.LoadBalancerInboundNatRuleID(input) + if err != nil { + return nil, err + } + + lbId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + return &lbId, nil + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: loadBalancerValidate.LoadBalancerID, + }, + + "protocol": { + Type: schema.TypeString, + Required: true, + StateFunc: state.IgnoreCase, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(network.TransportProtocolAll), + string(network.TransportProtocolTCP), + string(network.TransportProtocolUDP), + }, true), + }, + + "frontend_port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "backend_port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "frontend_ip_configuration_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "enable_floating_ip": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + }, + + "enable_tcp_reset": { + Type: schema.TypeBool, + Optional: true, + }, + + "idle_timeout_in_minutes": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + ValidateFunc: validation.IntBetween(4, 30), + }, + + "frontend_ip_configuration_id": { + Type: schema.TypeString, + Computed: true, + }, + + "backend_ip_configuration_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceArmLoadBalancerNatRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return fmt.Errorf("retrieving Load Balancer Name and Group: %+v", err) + } + id := parse.NewLoadBalancerInboundNatRuleID(subscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, d.Get("name").(string)) + + loadBalancerIdRaw := loadBalancerId.ID() + locks.ByID(loadBalancerIdRaw) + defer locks.UnlockByID(loadBalancerIdRaw) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Nat Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatRuleName, err) + } + + newNatRule, err := expandAzureRmLoadBalancerNatRule(d, &loadBalancer, *loadBalancerId) + if err != nil { + return fmt.Errorf("expanding NAT Rule: %+v", err) + } + + natRules := append(*loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules, *newNatRule) + + existingNatRule, existingNatRuleIndex, exists := FindLoadBalancerNatRuleByName(&loadBalancer, id.InboundNatRuleName) + if exists { + if id.InboundNatRuleName == *existingNatRule.Name { + if d.IsNewResource() { + return tf.ImportAsExistsError("azurerm_lb_nat_rule", *existingNatRule.ID) + } + + // this nat rule is being updated/reapplied remove old copy from the slice + natRules = append(natRules[:existingNatRuleIndex], natRules[existingNatRuleIndex+1:]...) + } + } + + loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules = &natRules + + future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for Nat Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatRuleName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (Resource Group %q) for Nat Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatRuleName, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerNatRuleRead(d, meta) +} + +func resourceArmLoadBalancerNatRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerInboundNatRuleID(d.Id()) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Nat Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.InboundNatRuleName, err) + } + + config, _, exists := FindLoadBalancerNatRuleByName(&loadBalancer, id.InboundNatRuleName) + if !exists { + d.SetId("") + log.Printf("[INFO] Load Balancer Nat Rule %q not found. Removing from state", id.InboundNatRuleName) + return nil + } + + d.Set("name", config.Name) + d.Set("resource_group_name", id.ResourceGroup) + + if props := config.InboundNatRulePropertiesFormat; props != nil { + backendIPConfigId := "" + if props.BackendIPConfiguration != nil && props.BackendIPConfiguration.ID != nil { + backendIPConfigId = *props.BackendIPConfiguration.ID + } + d.Set("backend_ip_configuration_id", backendIPConfigId) + + backendPort := 0 + if props.BackendPort != nil { + backendPort = int(*props.BackendPort) + } + d.Set("backend_port", backendPort) + d.Set("enable_floating_ip", props.EnableFloatingIP) + d.Set("enable_tcp_reset", props.EnableTCPReset) + + frontendIPConfigName := "" + frontendIPConfigID := "" + if props.FrontendIPConfiguration != nil && props.FrontendIPConfiguration.ID != nil { + feid, err := parse.LoadBalancerFrontendIpConfigurationID(*props.FrontendIPConfiguration.ID) + if err != nil { + return err + } + + frontendIPConfigName = feid.FrontendIPConfigurationName + frontendIPConfigID = feid.ID() + } + d.Set("frontend_ip_configuration_name", frontendIPConfigName) + d.Set("frontend_ip_configuration_id", frontendIPConfigID) + + frontendPort := 0 + if props.FrontendPort != nil { + frontendPort = int(*props.FrontendPort) + } + d.Set("frontend_port", frontendPort) + + idleTimeoutInMinutes := 0 + if props.IdleTimeoutInMinutes != nil { + idleTimeoutInMinutes = int(*props.IdleTimeoutInMinutes) + } + d.Set("idle_timeout_in_minutes", idleTimeoutInMinutes) + d.Set("protocol", string(props.Protocol)) + } + + return nil +} + +func resourceArmLoadBalancerNatRuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerInboundNatRuleID(d.Id()) + if err != nil { + return err + } + + loadBalancerId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Nat Rule %q: %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, id.InboundNatRuleName, err) + } + _, index, exists := FindLoadBalancerNatRuleByName(&loadBalancer, id.InboundNatRuleName) + if !exists { + return nil + } + + oldNatRules := *loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules + newNatRules := append(oldNatRules[:index], oldNatRules[index+1:]...) + loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules = &newNatRules + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, loadBalancer) + if err != nil { + return fmt.Errorf("Creating/Updating Load Balancer %q (Resource Group %q) %+v", id.LoadBalancerName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for the completion of Load Balancer updates for %q (Resource Group %q) %+v", id.LoadBalancerName, id.ResourceGroup, err) + } + + return nil +} + +func expandAzureRmLoadBalancerNatRule(d *schema.ResourceData, lb *network.LoadBalancer, loadBalancerId parse.LoadBalancerId) (*network.InboundNatRule, error) { + properties := network.InboundNatRulePropertiesFormat{ + Protocol: network.TransportProtocol(d.Get("protocol").(string)), + FrontendPort: utils.Int32(int32(d.Get("frontend_port").(int))), + BackendPort: utils.Int32(int32(d.Get("backend_port").(int))), + EnableTCPReset: utils.Bool(d.Get("enable_tcp_reset").(bool)), + } + + if v, ok := d.GetOk("enable_floating_ip"); ok { + properties.EnableFloatingIP = utils.Bool(v.(bool)) + } + + if v, ok := d.GetOk("idle_timeout_in_minutes"); ok { + properties.IdleTimeoutInMinutes = utils.Int32(int32(v.(int))) + } + + if v := d.Get("frontend_ip_configuration_name").(string); v != "" { + if _, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v); !exists { + return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v) + } + + id := parse.NewLoadBalancerFrontendIpConfigurationID(loadBalancerId.SubscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, v).ID() + properties.FrontendIPConfiguration = &network.SubResource{ + ID: utils.String(id), + } + } + + natRule := network.InboundNatRule{ + Name: utils.String(d.Get("name").(string)), + InboundNatRulePropertiesFormat: &properties, + } + + return &natRule, nil +} diff --git a/azurerm/internal/services/loadbalancer/outbound_rule_resource.go b/azurerm/internal/services/loadbalancer/outbound_rule_resource.go new file mode 100644 index 000000000000..229a51f9b2fc --- /dev/null +++ b/azurerm/internal/services/loadbalancer/outbound_rule_resource.go @@ -0,0 +1,352 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancerOutboundRule() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerOutboundRuleCreateUpdate, + Read: resourceArmLoadBalancerOutboundRuleRead, + Update: resourceArmLoadBalancerOutboundRuleCreateUpdate, + Delete: resourceArmLoadBalancerOutboundRuleDelete, + + Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { + id, err := parse.LoadBalancerOutboundRuleID(input) + if err != nil { + return nil, err + } + + lbId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + return &lbId, nil + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LoadBalancerID, + }, + + "frontend_ip_configuration": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "backend_address_pool_id": { + Type: schema.TypeString, + Required: true, + }, + + "protocol": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.TransportProtocolAll), + string(network.TransportProtocolTCP), + string(network.TransportProtocolUDP), + }, false), + }, + + "enable_tcp_reset": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "allocated_outbound_ports": { + Type: schema.TypeInt, + Optional: true, + Default: 1024, + }, + + "idle_timeout_in_minutes": { + Type: schema.TypeInt, + Optional: true, + Default: 4, + }, + }, + } +} + +func resourceArmLoadBalancerOutboundRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return err + } + loadBalancerIDRaw := loadBalancerId.ID() + id := parse.NewLoadBalancerOutboundRuleID(subscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, d.Get("name").(string)) + locks.ByID(loadBalancerIDRaw) + defer locks.UnlockByID(loadBalancerIDRaw) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + newOutboundRule, err := expandAzureRmLoadBalancerOutboundRule(d, &loadBalancer) + if err != nil { + return fmt.Errorf("expanding Load Balancer Outbound Rule: %+v", err) + } + + outboundRules := make([]network.OutboundRule, 0) + + if loadBalancer.LoadBalancerPropertiesFormat.OutboundRules != nil { + outboundRules = *loadBalancer.LoadBalancerPropertiesFormat.OutboundRules + } + + existingOutboundRule, existingOutboundRuleIndex, exists := FindLoadBalancerOutboundRuleByName(&loadBalancer, id.OutboundRuleName) + if exists { + if id.OutboundRuleName == *existingOutboundRule.Name { + if d.IsNewResource() { + return tf.ImportAsExistsError("azurerm_lb_outbound_rule", *existingOutboundRule.ID) + } + + // this outbound rule is being updated/reapplied remove old copy from the slice + outboundRules = append(outboundRules[:existingOutboundRuleIndex], outboundRules[existingOutboundRuleIndex+1:]...) + } + } + + outboundRules = append(outboundRules, *newOutboundRule) + + loadBalancer.LoadBalancerPropertiesFormat.OutboundRules = &outboundRules + + future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, loadBalancer) + if err != nil { + return fmt.Errorf("updating LoadBalancer %q (resource group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (resource group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerOutboundRuleRead(d, meta) +} + +func resourceArmLoadBalancerOutboundRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerOutboundRuleID(d.Id()) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + config, _, exists := FindLoadBalancerOutboundRuleByName(&loadBalancer, id.OutboundRuleName) + if !exists { + d.SetId("") + log.Printf("[INFO] Load Balancer Outbound Rule %q not found. Removing from state", id.OutboundRuleName) + return nil + } + + d.Set("name", config.Name) + d.Set("resource_group_name", id.ResourceGroup) + + if props := config.OutboundRulePropertiesFormat; props != nil { + allocatedOutboundPorts := 0 + if props.AllocatedOutboundPorts != nil { + allocatedOutboundPorts = int(*props.AllocatedOutboundPorts) + } + d.Set("allocated_outbound_ports", allocatedOutboundPorts) + + backendAddressPoolId := "" + if props.BackendAddressPool != nil && props.BackendAddressPool.ID != nil { + bapid, err := parse.LoadBalancerBackendAddressPoolID(*props.BackendAddressPool.ID) + if err != nil { + return err + } + + backendAddressPoolId = bapid.ID() + } + d.Set("backend_address_pool_id", backendAddressPoolId) + d.Set("enable_tcp_reset", props.EnableTCPReset) + + frontendIpConfigurations := make([]interface{}, 0) + for _, feConfig := range *props.FrontendIPConfigurations { + if feConfig.ID == nil { + continue + } + feid, err := parse.LoadBalancerFrontendIpConfigurationID(*feConfig.ID) + if err != nil { + return err + } + + frontendIpConfigurations = append(frontendIpConfigurations, map[string]interface{}{ + "id": feid.ID(), + "name": feid.FrontendIPConfigurationName, + }) + } + d.Set("frontend_ip_configuration", frontendIpConfigurations) + + idleTimeoutInMinutes := 0 + if props.IdleTimeoutInMinutes != nil { + idleTimeoutInMinutes = int(*props.IdleTimeoutInMinutes) + } + d.Set("idle_timeout_in_minutes", idleTimeoutInMinutes) + d.Set("protocol", string(props.Protocol)) + } + + return nil +} + +func resourceArmLoadBalancerOutboundRuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerOutboundRuleID(d.Id()) + if err != nil { + return err + } + + loadBalancerId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + _, index, exists := FindLoadBalancerOutboundRuleByName(&loadBalancer, id.OutboundRuleName) + if !exists { + return nil + } + + oldOutboundRules := *loadBalancer.LoadBalancerPropertiesFormat.OutboundRules + newOutboundRules := append(oldOutboundRules[:index], oldOutboundRules[index+1:]...) + loadBalancer.LoadBalancerPropertiesFormat.OutboundRules = &newOutboundRules + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (Resource Group %q) for Outbound Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.OutboundRuleName, err) + } + + return nil +} + +func expandAzureRmLoadBalancerOutboundRule(d *schema.ResourceData, lb *network.LoadBalancer) (*network.OutboundRule, error) { + properties := network.OutboundRulePropertiesFormat{ + Protocol: network.LoadBalancerOutboundRuleProtocol(d.Get("protocol").(string)), + } + + feConfigs := d.Get("frontend_ip_configuration").([]interface{}) + feConfigSubResources := make([]network.SubResource, 0) + + for _, raw := range feConfigs { + v := raw.(map[string]interface{}) + rule, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v["name"].(string)) + if !exists { + return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v["name"]) + } + + feConfigSubResource := network.SubResource{ + ID: rule.ID, + } + + feConfigSubResources = append(feConfigSubResources, feConfigSubResource) + } + + properties.FrontendIPConfigurations = &feConfigSubResources + + if v := d.Get("backend_address_pool_id").(string); v != "" { + properties.BackendAddressPool = &network.SubResource{ + ID: &v, + } + } + + if v, ok := d.GetOk("idle_timeout_in_minutes"); ok { + properties.IdleTimeoutInMinutes = utils.Int32(int32(v.(int))) + } + + if v, ok := d.GetOk("enable_tcp_reset"); ok { + properties.EnableTCPReset = utils.Bool(v.(bool)) + } + + if v, ok := d.GetOk("allocated_outbound_ports"); ok { + properties.AllocatedOutboundPorts = utils.Int32(int32(v.(int))) + } + + return &network.OutboundRule{ + Name: utils.String(d.Get("name").(string)), + OutboundRulePropertiesFormat: &properties, + }, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer.go b/azurerm/internal/services/loadbalancer/parse/load_balancer.go new file mode 100644 index 000000000000..0d967111f1bf --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewLoadBalancerID(subscriptionId, resourceGroup, name string) LoadBalancerId { + return LoadBalancerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id LoadBalancerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer", segmentsStr) +} + +func (id LoadBalancerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// LoadBalancerID parses a LoadBalancer ID into an LoadBalancerId struct +func LoadBalancerID(input string) (*LoadBalancerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_backend_address_pool.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_backend_address_pool.go new file mode 100644 index 000000000000..0eeb7b50367d --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_backend_address_pool.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerBackendAddressPoolId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + BackendAddressPoolName string +} + +func NewLoadBalancerBackendAddressPoolID(subscriptionId, resourceGroup, loadBalancerName, backendAddressPoolName string) LoadBalancerBackendAddressPoolId { + return LoadBalancerBackendAddressPoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + BackendAddressPoolName: backendAddressPoolName, + } +} + +func (id LoadBalancerBackendAddressPoolId) String() string { + segments := []string{ + fmt.Sprintf("Backend Address Pool Name %q", id.BackendAddressPoolName), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer Backend Address Pool", segmentsStr) +} + +func (id LoadBalancerBackendAddressPoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/backendAddressPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.BackendAddressPoolName) +} + +// LoadBalancerBackendAddressPoolID parses a LoadBalancerBackendAddressPool ID into an LoadBalancerBackendAddressPoolId struct +func LoadBalancerBackendAddressPoolID(input string) (*LoadBalancerBackendAddressPoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerBackendAddressPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.BackendAddressPoolName, err = id.PopSegment("backendAddressPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_backend_address_pool_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_backend_address_pool_test.go new file mode 100644 index 000000000000..e88d1b4960fe --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_backend_address_pool_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerBackendAddressPoolId{} + +func TestLoadBalancerBackendAddressPoolIDFormatter(t *testing.T) { + actual := NewLoadBalancerBackendAddressPoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "backendAddressPool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/backendAddressPools/backendAddressPool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerBackendAddressPoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerBackendAddressPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing BackendAddressPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for BackendAddressPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/backendAddressPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/backendAddressPools/backendAddressPool1", + Expected: &LoadBalancerBackendAddressPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + BackendAddressPoolName: "backendAddressPool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/BACKENDADDRESSPOOLS/BACKENDADDRESSPOOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerBackendAddressPoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.BackendAddressPoolName != v.Expected.BackendAddressPoolName { + t.Fatalf("Expected %q but got %q for BackendAddressPoolName", v.Expected.BackendAddressPoolName, actual.BackendAddressPoolName) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_frontend_ip_configuration.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_frontend_ip_configuration.go new file mode 100644 index 000000000000..94dc40eb5dad --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_frontend_ip_configuration.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerFrontendIpConfigurationId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + FrontendIPConfigurationName string +} + +func NewLoadBalancerFrontendIpConfigurationID(subscriptionId, resourceGroup, loadBalancerName, frontendIPConfigurationName string) LoadBalancerFrontendIpConfigurationId { + return LoadBalancerFrontendIpConfigurationId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + FrontendIPConfigurationName: frontendIPConfigurationName, + } +} + +func (id LoadBalancerFrontendIpConfigurationId) String() string { + segments := []string{ + fmt.Sprintf("Frontend I P Configuration Name %q", id.FrontendIPConfigurationName), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer Frontend Ip Configuration", segmentsStr) +} + +func (id LoadBalancerFrontendIpConfigurationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/frontendIPConfigurations/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.FrontendIPConfigurationName) +} + +// LoadBalancerFrontendIpConfigurationID parses a LoadBalancerFrontendIpConfiguration ID into an LoadBalancerFrontendIpConfigurationId struct +func LoadBalancerFrontendIpConfigurationID(input string) (*LoadBalancerFrontendIpConfigurationId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerFrontendIpConfigurationId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.FrontendIPConfigurationName, err = id.PopSegment("frontendIPConfigurations"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_frontend_ip_configuration_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_frontend_ip_configuration_test.go new file mode 100644 index 000000000000..41d5d9f20ee8 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_frontend_ip_configuration_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerFrontendIpConfigurationId{} + +func TestLoadBalancerFrontendIpConfigurationIDFormatter(t *testing.T) { + actual := NewLoadBalancerFrontendIpConfigurationID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "frontendIPConfig1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/frontendIPConfigurations/frontendIPConfig1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerFrontendIpConfigurationID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerFrontendIpConfigurationId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing FrontendIPConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for FrontendIPConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/frontendIPConfigurations/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/frontendIPConfigurations/frontendIPConfig1", + Expected: &LoadBalancerFrontendIpConfigurationId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + FrontendIPConfigurationName: "frontendIPConfig1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/FRONTENDIPCONFIGURATIONS/FRONTENDIPCONFIG1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerFrontendIpConfigurationID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.FrontendIPConfigurationName != v.Expected.FrontendIPConfigurationName { + t.Fatalf("Expected %q but got %q for FrontendIPConfigurationName", v.Expected.FrontendIPConfigurationName, actual.FrontendIPConfigurationName) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_pool.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_pool.go new file mode 100644 index 000000000000..0983bc37e39c --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_pool.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerInboundNatPoolId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + InboundNatPoolName string +} + +func NewLoadBalancerInboundNatPoolID(subscriptionId, resourceGroup, loadBalancerName, inboundNatPoolName string) LoadBalancerInboundNatPoolId { + return LoadBalancerInboundNatPoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + InboundNatPoolName: inboundNatPoolName, + } +} + +func (id LoadBalancerInboundNatPoolId) String() string { + segments := []string{ + fmt.Sprintf("Inbound Nat Pool Name %q", id.InboundNatPoolName), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer Inbound Nat Pool", segmentsStr) +} + +func (id LoadBalancerInboundNatPoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/inboundNatPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.InboundNatPoolName) +} + +// LoadBalancerInboundNatPoolID parses a LoadBalancerInboundNatPool ID into an LoadBalancerInboundNatPoolId struct +func LoadBalancerInboundNatPoolID(input string) (*LoadBalancerInboundNatPoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerInboundNatPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.InboundNatPoolName, err = id.PopSegment("inboundNatPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_pool_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_pool_test.go new file mode 100644 index 000000000000..d4b3927ca84d --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_pool_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerInboundNatPoolId{} + +func TestLoadBalancerInboundNatPoolIDFormatter(t *testing.T) { + actual := NewLoadBalancerInboundNatPoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatPools/pool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerInboundNatPoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerInboundNatPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing InboundNatPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for InboundNatPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatPools/pool1", + Expected: &LoadBalancerInboundNatPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + InboundNatPoolName: "pool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/INBOUNDNATPOOLS/POOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerInboundNatPoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.InboundNatPoolName != v.Expected.InboundNatPoolName { + t.Fatalf("Expected %q but got %q for InboundNatPoolName", v.Expected.InboundNatPoolName, actual.InboundNatPoolName) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_rule.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_rule.go new file mode 100644 index 000000000000..1a66c71e2572 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_rule.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerInboundNatRuleId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + InboundNatRuleName string +} + +func NewLoadBalancerInboundNatRuleID(subscriptionId, resourceGroup, loadBalancerName, inboundNatRuleName string) LoadBalancerInboundNatRuleId { + return LoadBalancerInboundNatRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + InboundNatRuleName: inboundNatRuleName, + } +} + +func (id LoadBalancerInboundNatRuleId) String() string { + segments := []string{ + fmt.Sprintf("Inbound Nat Rule Name %q", id.InboundNatRuleName), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer Inbound Nat Rule", segmentsStr) +} + +func (id LoadBalancerInboundNatRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/inboundNatRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.InboundNatRuleName) +} + +// LoadBalancerInboundNatRuleID parses a LoadBalancerInboundNatRule ID into an LoadBalancerInboundNatRuleId struct +func LoadBalancerInboundNatRuleID(input string) (*LoadBalancerInboundNatRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerInboundNatRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.InboundNatRuleName, err = id.PopSegment("inboundNatRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_rule_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_rule_test.go new file mode 100644 index 000000000000..bb88d5cd9541 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_inbound_nat_rule_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerInboundNatRuleId{} + +func TestLoadBalancerInboundNatRuleIDFormatter(t *testing.T) { + actual := NewLoadBalancerInboundNatRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "rule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatRules/rule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerInboundNatRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerInboundNatRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing InboundNatRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for InboundNatRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatRules/rule1", + Expected: &LoadBalancerInboundNatRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + InboundNatRuleName: "rule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/INBOUNDNATRULES/RULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerInboundNatRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.InboundNatRuleName != v.Expected.InboundNatRuleName { + t.Fatalf("Expected %q but got %q for InboundNatRuleName", v.Expected.InboundNatRuleName, actual.InboundNatRuleName) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_outbound_rule.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_outbound_rule.go new file mode 100644 index 000000000000..dc487be7deeb --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_outbound_rule.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerOutboundRuleId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + OutboundRuleName string +} + +func NewLoadBalancerOutboundRuleID(subscriptionId, resourceGroup, loadBalancerName, outboundRuleName string) LoadBalancerOutboundRuleId { + return LoadBalancerOutboundRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + OutboundRuleName: outboundRuleName, + } +} + +func (id LoadBalancerOutboundRuleId) String() string { + segments := []string{ + fmt.Sprintf("Outbound Rule Name %q", id.OutboundRuleName), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer Outbound Rule", segmentsStr) +} + +func (id LoadBalancerOutboundRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/outboundRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.OutboundRuleName) +} + +// LoadBalancerOutboundRuleID parses a LoadBalancerOutboundRule ID into an LoadBalancerOutboundRuleId struct +func LoadBalancerOutboundRuleID(input string) (*LoadBalancerOutboundRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerOutboundRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.OutboundRuleName, err = id.PopSegment("outboundRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_outbound_rule_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_outbound_rule_test.go new file mode 100644 index 000000000000..13966ea192a1 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_outbound_rule_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerOutboundRuleId{} + +func TestLoadBalancerOutboundRuleIDFormatter(t *testing.T) { + actual := NewLoadBalancerOutboundRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "rule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/outboundRules/rule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerOutboundRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerOutboundRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing OutboundRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for OutboundRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/outboundRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/outboundRules/rule1", + Expected: &LoadBalancerOutboundRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + OutboundRuleName: "rule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/OUTBOUNDRULES/RULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerOutboundRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.OutboundRuleName != v.Expected.OutboundRuleName { + t.Fatalf("Expected %q but got %q for OutboundRuleName", v.Expected.OutboundRuleName, actual.OutboundRuleName) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_probe.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_probe.go new file mode 100644 index 000000000000..ebdb95d0fa20 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_probe.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancerProbeId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + ProbeName string +} + +func NewLoadBalancerProbeID(subscriptionId, resourceGroup, loadBalancerName, probeName string) LoadBalancerProbeId { + return LoadBalancerProbeId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + ProbeName: probeName, + } +} + +func (id LoadBalancerProbeId) String() string { + segments := []string{ + fmt.Sprintf("Probe Name %q", id.ProbeName), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancer Probe", segmentsStr) +} + +func (id LoadBalancerProbeId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/probes/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.ProbeName) +} + +// LoadBalancerProbeID parses a LoadBalancerProbe ID into an LoadBalancerProbeId struct +func LoadBalancerProbeID(input string) (*LoadBalancerProbeId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancerProbeId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.ProbeName, err = id.PopSegment("probes"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_probe_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_probe_test.go new file mode 100644 index 000000000000..8fd9a315d25e --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_probe_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerProbeId{} + +func TestLoadBalancerProbeIDFormatter(t *testing.T) { + actual := NewLoadBalancerProbeID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "probe1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/probes/probe1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerProbeID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerProbeId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing ProbeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for ProbeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/probes/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/probes/probe1", + Expected: &LoadBalancerProbeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + ProbeName: "probe1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/PROBES/PROBE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerProbeID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.ProbeName != v.Expected.ProbeName { + t.Fatalf("Expected %q but got %q for ProbeName", v.Expected.ProbeName, actual.ProbeName) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancer_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancer_test.go new file mode 100644 index 000000000000..ac2ce16613ff --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancer_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancerId{} + +func TestLoadBalancerIDFormatter(t *testing.T) { + actual := NewLoadBalancerID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1", + Expected: &LoadBalancerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "loadBalancer1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancing_rule.go b/azurerm/internal/services/loadbalancer/parse/load_balancing_rule.go new file mode 100644 index 000000000000..006f8dc036e7 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancing_rule.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LoadBalancingRuleId struct { + SubscriptionId string + ResourceGroup string + LoadBalancerName string + Name string +} + +func NewLoadBalancingRuleID(subscriptionId, resourceGroup, loadBalancerName, name string) LoadBalancingRuleId { + return LoadBalancingRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + LoadBalancerName: loadBalancerName, + Name: name, + } +} + +func (id LoadBalancingRuleId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Load Balancer Name %q", id.LoadBalancerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Load Balancing Rule", segmentsStr) +} + +func (id LoadBalancingRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s/loadBalancingRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName, id.Name) +} + +// LoadBalancingRuleID parses a LoadBalancingRule ID into an LoadBalancingRuleId struct +func LoadBalancingRuleID(input string) (*LoadBalancingRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LoadBalancingRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("loadBalancingRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loadbalancer/parse/load_balancing_rule_test.go b/azurerm/internal/services/loadbalancer/parse/load_balancing_rule_test.go new file mode 100644 index 000000000000..49ad4b3dd057 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/parse/load_balancing_rule_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LoadBalancingRuleId{} + +func TestLoadBalancingRuleIDFormatter(t *testing.T) { + actual := NewLoadBalancingRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "loadBalancer1", "rule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/loadBalancingRules/rule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLoadBalancingRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LoadBalancingRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/loadBalancingRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/loadBalancingRules/rule1", + Expected: &LoadBalancingRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + LoadBalancerName: "loadBalancer1", + Name: "rule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/LOADBALANCINGRULES/RULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LoadBalancingRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.LoadBalancerName != v.Expected.LoadBalancerName { + t.Fatalf("Expected %q but got %q for LoadBalancerName", v.Expected.LoadBalancerName, actual.LoadBalancerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/probe_resource.go b/azurerm/internal/services/loadbalancer/probe_resource.go new file mode 100644 index 000000000000..1979e1241354 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/probe_resource.go @@ -0,0 +1,301 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + loadBalancerValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancerProbe() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerProbeCreateUpdate, + Read: resourceArmLoadBalancerProbeRead, + Update: resourceArmLoadBalancerProbeCreateUpdate, + Delete: resourceArmLoadBalancerProbeDelete, + + Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { + id, err := parse.LoadBalancerProbeID(input) + if err != nil { + return nil, err + } + + lbId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + return &lbId, nil + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: loadBalancerValidate.LoadBalancerID, + }, + + "protocol": { + Type: schema.TypeString, + Computed: true, + Optional: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(network.ProbeProtocolHTTP), + string(network.ProbeProtocolHTTPS), + string(network.ProbeProtocolTCP), + }, true), + }, + + "port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "request_path": { + Type: schema.TypeString, + Optional: true, + }, + + "interval_in_seconds": { + Type: schema.TypeInt, + Optional: true, + Default: 15, + ValidateFunc: validation.IntAtLeast(5), + }, + + "number_of_probes": { + Type: schema.TypeInt, + Optional: true, + Default: 2, + }, + + "load_balancer_rules": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + Set: schema.HashString, + }, + }, + } +} + +func resourceArmLoadBalancerProbeCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return err + } + loadBalancerIDRaw := loadBalancerId.ID() + id := parse.NewLoadBalancerProbeID(subscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, d.Get("name").(string)) + locks.ByID(loadBalancerIDRaw) + defer locks.UnlockByID(loadBalancerIDRaw) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing Proe %q from state", id.LoadBalancerName, id.ProbeName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Probe %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.ProbeName, err) + } + + newProbe := expandAzureRmLoadBalancerProbe(d) + probes := append(*loadBalancer.LoadBalancerPropertiesFormat.Probes, *newProbe) + + existingProbe, existingProbeIndex, exists := FindLoadBalancerProbeByName(&loadBalancer, id.ProbeName) + if exists { + if id.ProbeName == *existingProbe.Name { + if d.IsNewResource() { + return tf.ImportAsExistsError("azurerm_lb_probe", *existingProbe.ID) + } + + // this probe is being updated/reapplied remove old copy from the slice + probes = append(probes[:existingProbeIndex], probes[existingProbeIndex+1:]...) + } + } + + loadBalancer.LoadBalancerPropertiesFormat.Probes = &probes + + future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for Probe %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.ProbeName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (Resource Group %q) for Probe %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.ProbeName, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerProbeRead(d, meta) +} + +func resourceArmLoadBalancerProbeRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerProbeID(d.Id()) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Probe %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.ProbeName, err) + } + + config, _, exists := FindLoadBalancerProbeByName(&loadBalancer, id.ProbeName) + if !exists { + d.SetId("") + log.Printf("[INFO] Load Balancer Probe %q not found. Removing from state", id.ProbeName) + return nil + } + + d.Set("name", config.Name) + d.Set("resource_group_name", id.ResourceGroup) + + if props := config.ProbePropertiesFormat; props != nil { + intervalInSeconds := 0 + if props.IntervalInSeconds != nil { + intervalInSeconds = int(*props.IntervalInSeconds) + } + d.Set("interval_in_seconds", intervalInSeconds) + + numberOfProbes := 0 + if props.NumberOfProbes != nil { + numberOfProbes = int(*props.NumberOfProbes) + } + d.Set("number_of_probes", numberOfProbes) + + port := 0 + if props.Port != nil { + port = int(*props.Port) + } + d.Set("port", port) + d.Set("protocol", string(props.Protocol)) + d.Set("request_path", props.RequestPath) + + // TODO: parse/make these consistent + var loadBalancerRules []string + if rules := props.LoadBalancingRules; rules != nil { + for _, ruleConfig := range *rules { + if id := ruleConfig.ID; id != nil { + loadBalancerRules = append(loadBalancerRules, *id) + } + } + } + if err := d.Set("load_balancer_rules", loadBalancerRules); err != nil { + return fmt.Errorf("setting `load_balancer_rules` (Load Balancer %q Probe %q): %+v", id.LoadBalancerName, id.ProbeName, err) + } + } + + return nil +} + +func resourceArmLoadBalancerProbeDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancerProbeID(d.Id()) + if err != nil { + return err + } + + loadBalancerId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Probe %q: %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, id.ProbeName, err) + } + + _, index, exists := FindLoadBalancerProbeByName(&loadBalancer, id.ProbeName) + if !exists { + return nil + } + + oldProbes := *loadBalancer.LoadBalancerPropertiesFormat.Probes + newProbes := append(oldProbes[:index], oldProbes[index+1:]...) + loadBalancer.LoadBalancerPropertiesFormat.Probes = &newProbes + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, loadBalancer) + if err != nil { + return fmt.Errorf("updating Load Balancer %q (Resource Group %q) for deletion of Probe %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.ProbeName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (Resource Group %q) for deletion of Probe %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.ProbeName, err) + } + + return nil +} + +func expandAzureRmLoadBalancerProbe(d *schema.ResourceData) *network.Probe { + properties := network.ProbePropertiesFormat{ + NumberOfProbes: utils.Int32(int32(d.Get("number_of_probes").(int))), + IntervalInSeconds: utils.Int32(int32(d.Get("interval_in_seconds").(int))), + Port: utils.Int32(int32(d.Get("port").(int))), + } + + if v, ok := d.GetOk("protocol"); ok { + properties.Protocol = network.ProbeProtocol(v.(string)) + } + + if v, ok := d.GetOk("request_path"); ok { + properties.RequestPath = utils.String(v.(string)) + } + + return &network.Probe{ + Name: utils.String(d.Get("name").(string)), + ProbePropertiesFormat: &properties, + } +} diff --git a/azurerm/internal/services/loadbalancer/registration.go b/azurerm/internal/services/loadbalancer/registration.go new file mode 100644 index 000000000000..2face438cce0 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/registration.go @@ -0,0 +1,41 @@ +package loadbalancer + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +type Registration struct{} + +// Name is the name of this Service +func (r Registration) Name() string { + return "Load Balancer" +} + +// WebsiteCategories returns a list of categories which can be used for the sidebar +func (r Registration) WebsiteCategories() []string { + return []string{ + "Load Balancer", + } +} + +// SupportedDataSources returns the supported Data Sources supported by this Service +func (r Registration) SupportedDataSources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_lb": dataSourceArmLoadBalancer(), + "azurerm_lb_backend_address_pool": dataSourceArmLoadBalancerBackendAddressPool(), + "azurerm_lb_rule": dataSourceArmLoadBalancerRule(), + } +} + +// SupportedResources returns the supported Resources supported by this Service +func (r Registration) SupportedResources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_lb_backend_address_pool": resourceArmLoadBalancerBackendAddressPool(), + "azurerm_lb_nat_pool": resourceArmLoadBalancerNatPool(), + "azurerm_lb_nat_rule": resourceArmLoadBalancerNatRule(), + "azurerm_lb_probe": resourceArmLoadBalancerProbe(), + "azurerm_lb_outbound_rule": resourceArmLoadBalancerOutboundRule(), + "azurerm_lb_rule": resourceArmLoadBalancerRule(), + "azurerm_lb": resourceArmLoadBalancer(), + } +} diff --git a/azurerm/internal/services/loadbalancer/resourceids.go b/azurerm/internal/services/loadbalancer/resourceids.go new file mode 100644 index 000000000000..40d59d9ae753 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/resourceids.go @@ -0,0 +1,11 @@ +package loadbalancer + +// Load Balancers +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancer -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancerBackendAddressPool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/backendAddressPools/backendAddressPool1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancerFrontendIpConfiguration -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/frontendIPConfigurations/frontendIPConfig1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancerInboundNatPool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatPools/pool1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancerInboundNatRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatRules/rule1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancerOutboundRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/outboundRules/rule1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancerProbe -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/probes/probe1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LoadBalancingRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/loadBalancingRules/rule1 diff --git a/azurerm/internal/services/loadbalancer/rule_data_source.go b/azurerm/internal/services/loadbalancer/rule_data_source.go new file mode 100644 index 000000000000..b8daad7d4dc8 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/rule_data_source.go @@ -0,0 +1,180 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceArmLoadBalancerRule() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmLoadBalancerRuleRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.RuleName, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.LoadBalancerID, + }, + + "frontend_ip_configuration_name": { + Type: schema.TypeString, + Computed: true, + }, + + "protocol": { + Type: schema.TypeString, + Computed: true, + }, + + "frontend_port": { + Type: schema.TypeInt, + Computed: true, + }, + + "backend_port": { + Type: schema.TypeInt, + Computed: true, + }, + + "backend_address_pool_id": { + Type: schema.TypeString, + Computed: true, + }, + + "probe_id": { + Type: schema.TypeString, + Computed: true, + }, + + "enable_floating_ip": { + Type: schema.TypeBool, + Computed: true, + }, + + "enable_tcp_reset": { + Type: schema.TypeBool, + Computed: true, + }, + + "disable_outbound_snat": { + Type: schema.TypeBool, + Computed: true, + }, + + "idle_timeout_in_minutes": { + Type: schema.TypeInt, + Computed: true, + }, + + "load_distribution": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceArmLoadBalancerRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", loadBalancerId.Name) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Rule %q: %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, name, err) + } + + lbRuleClient := meta.(*clients.Client).LoadBalancers.LoadBalancingRulesClient + ctx, cancel = timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := lbRuleClient.Get(ctx, resourceGroup, *loadBalancer.Name, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Load Balancer Rule %q was not found in Load Balancer %q (Resource Group: %q)", name, loadBalancerId.Name, loadBalancerId.ResourceGroup) + } + + return fmt.Errorf("retrieving Load Balancer %s (resource group %q) for Rule %q: %s", loadBalancerId.Name, loadBalancerId.ResourceGroup, name, err) + } + + d.SetId(*resp.ID) + + if props := resp.LoadBalancingRulePropertiesFormat; props != nil { + frontendIPConfigurationName, err := parse.LoadBalancerFrontendIpConfigurationID(*props.FrontendIPConfiguration.ID) + if err != nil { + return err + } + + d.Set("frontend_ip_configuration_name", frontendIPConfigurationName.FrontendIPConfigurationName) + d.Set("protocol", props.Protocol) + d.Set("frontend_port", props.FrontendPort) + d.Set("backend_port", props.BackendPort) + + if props.BackendAddressPool != nil { + if err := d.Set("backend_address_pool_id", props.BackendAddressPool.ID); err != nil { + return fmt.Errorf("setting `backend_address_pool_id`: %+v", err) + } + } + + if props.Probe != nil { + if err := d.Set("probe_id", props.Probe.ID); err != nil { + return fmt.Errorf("setting `probe_id`: %+v", err) + } + } + + if err := d.Set("enable_floating_ip", props.EnableFloatingIP); err != nil { + return fmt.Errorf("setting `enable_floating_ip`: %+v", err) + } + + if err := d.Set("enable_tcp_reset", props.EnableTCPReset); err != nil { + return fmt.Errorf("setting `enable_tcp_reset`: %+v", err) + } + + if err := d.Set("disable_outbound_snat", props.DisableOutboundSnat); err != nil { + return fmt.Errorf("setting `disable_outbound_snat`: %+v", err) + } + + if err := d.Set("idle_timeout_in_minutes", props.IdleTimeoutInMinutes); err != nil { + return fmt.Errorf("setting `idle_timeout_in_minutes`: %+v", err) + } + + if err := d.Set("load_distribution", props.LoadDistribution); err != nil { + return fmt.Errorf("setting `load_distribution`: %+v", err) + } + } + + return nil +} diff --git a/azurerm/internal/services/loadbalancer/rule_resource.go b/azurerm/internal/services/loadbalancer/rule_resource.go new file mode 100644 index 000000000000..fb86e3eb44bc --- /dev/null +++ b/azurerm/internal/services/loadbalancer/rule_resource.go @@ -0,0 +1,386 @@ +package loadbalancer + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" + loadBalancerValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmLoadBalancerRule() *schema.Resource { + return &schema.Resource{ + Create: resourceArmLoadBalancerRuleCreateUpdate, + Read: resourceArmLoadBalancerRuleRead, + Update: resourceArmLoadBalancerRuleCreateUpdate, + Delete: resourceArmLoadBalancerRuleDelete, + + Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { + id, err := parse.LoadBalancingRuleID(input) + if err != nil { + return nil, err + } + + lbId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + return &lbId, nil + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: loadBalancerValidate.RuleName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "loadbalancer_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: loadBalancerValidate.LoadBalancerID, + }, + + "frontend_ip_configuration_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "frontend_ip_configuration_id": { + Type: schema.TypeString, + Computed: true, + }, + + "backend_address_pool_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + + "protocol": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(network.TransportProtocolAll), + string(network.TransportProtocolTCP), + string(network.TransportProtocolUDP), + }, true), + }, + + "frontend_port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumberOrZero, + }, + + "backend_port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumberOrZero, + }, + + "probe_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + + "enable_floating_ip": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "enable_tcp_reset": { + Type: schema.TypeBool, + Optional: true, + }, + + "disable_outbound_snat": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "idle_timeout_in_minutes": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + ValidateFunc: validation.IntBetween(4, 30), + }, + + "load_distribution": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + }, + } +} + +func resourceArmLoadBalancerRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) + if err != nil { + return err + } + + id := parse.NewLoadBalancingRuleID(subscriptionId, loadBalancerId.ResourceGroup, loadBalancerId.Name, d.Get("name").(string)) + + loadBalancerID := loadBalancerId.ID() + locks.ByID(loadBalancerID) + defer locks.UnlockByID(loadBalancerID) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.Name, err) + } + + newLbRule, err := expandAzureRmLoadBalancerRule(d, &loadBalancer) + if err != nil { + return fmt.Errorf("expanding Load Balancer Rule: %+v", err) + } + + lbRules := append(*loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules, *newLbRule) + + existingRule, existingRuleIndex, exists := FindLoadBalancerRuleByName(&loadBalancer, id.Name) + if exists { + if id.Name == *existingRule.Name { + if d.IsNewResource() { + return tf.ImportAsExistsError("azurerm_lb_rule", *existingRule.ID) + } + + // this rule is being updated/reapplied remove old copy from the slice + lbRules = append(lbRules[:existingRuleIndex], lbRules[existingRuleIndex+1:]...) + } + } + + loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules = &lbRules + + future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, loadBalancer) + if err != nil { + return fmt.Errorf("updating Loadbalancer %q (rsource group %q) for Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.Name, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Load Balancer %q (resource group %q) for Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.Name, err) + } + + d.SetId(id.ID()) + + return resourceArmLoadBalancerRuleRead(d, meta) +} + +func resourceArmLoadBalancerRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancingRuleID(d.Id()) + if err != nil { + return err + } + + loadBalancer, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.Name, err) + } + + config, _, exists := FindLoadBalancerRuleByName(&loadBalancer, id.Name) + if !exists { + d.SetId("") + log.Printf("[INFO] Load Balancer Rule %q not found. Removing from state", id.Name) + return nil + } + + d.Set("name", config.Name) + d.Set("resource_group_name", id.ResourceGroup) + + if props := config.LoadBalancingRulePropertiesFormat; props != nil { + d.Set("disable_outbound_snat", props.DisableOutboundSnat) + d.Set("enable_floating_ip", props.EnableFloatingIP) + d.Set("enable_tcp_reset", props.EnableTCPReset) + d.Set("protocol", string(props.Protocol)) + + backendPort := 0 + if props.BackendPort != nil { + backendPort = int(*props.BackendPort) + } + d.Set("backend_port", backendPort) + + backendAddressPoolId := "" + if props.BackendAddressPool != nil && props.BackendAddressPool.ID != nil { + backendAddressPoolId = *props.BackendAddressPool.ID + } + d.Set("backend_address_pool_id", backendAddressPoolId) + + frontendIPConfigName := "" + frontendIPConfigID := "" + if props.FrontendIPConfiguration != nil && props.FrontendIPConfiguration.ID != nil { + feid, err := parse.LoadBalancerFrontendIpConfigurationID(*props.FrontendIPConfiguration.ID) + if err != nil { + return err + } + + frontendIPConfigName = feid.FrontendIPConfigurationName + frontendIPConfigID = feid.ID() + } + d.Set("frontend_ip_configuration_name", frontendIPConfigName) + d.Set("frontend_ip_configuration_id", frontendIPConfigID) + + frontendPort := 0 + if props.FrontendPort != nil { + frontendPort = int(*props.FrontendPort) + } + d.Set("frontend_port", frontendPort) + + idleTimeoutInMinutes := 0 + if props.IdleTimeoutInMinutes != nil { + idleTimeoutInMinutes = int(*props.IdleTimeoutInMinutes) + } + d.Set("idle_timeout_in_minutes", idleTimeoutInMinutes) + + loadDistribution := "" + if props.LoadDistribution != "" { + loadDistribution = string(props.LoadDistribution) + } + d.Set("load_distribution", loadDistribution) + + probeId := "" + if props.Probe != nil && props.Probe.ID != nil { + probeId = *props.Probe.ID + } + d.Set("probe_id", probeId) + } + + return nil +} + +func resourceArmLoadBalancerRuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LoadBalancers.LoadBalancersClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LoadBalancingRuleID(d.Id()) + if err != nil { + return err + } + + loadBalancerId := parse.NewLoadBalancerID(id.SubscriptionId, id.ResourceGroup, id.LoadBalancerName) + loadBalancerIDRaw := loadBalancerId.ID() + locks.ByID(loadBalancerIDRaw) + defer locks.UnlockByID(loadBalancerIDRaw) + + loadBalancer, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") + if err != nil { + if utils.ResponseWasNotFound(loadBalancer.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("failed to retrieve Load Balancer %q (resource group %q) for Rule %q: %+v", id.LoadBalancerName, id.ResourceGroup, id.Name, err) + } + + _, index, exists := FindLoadBalancerRuleByName(&loadBalancer, d.Get("name").(string)) + if !exists { + return nil + } + + oldLbRules := *loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules + newLbRules := append(oldLbRules[:index], oldLbRules[index+1:]...) + loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules = &newLbRules + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, loadBalancer) + if err != nil { + return fmt.Errorf("Creating/Updating Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for completion of Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) + } + + return nil +} + +func expandAzureRmLoadBalancerRule(d *schema.ResourceData, lb *network.LoadBalancer) (*network.LoadBalancingRule, error) { + properties := network.LoadBalancingRulePropertiesFormat{ + Protocol: network.TransportProtocol(d.Get("protocol").(string)), + FrontendPort: utils.Int32(int32(d.Get("frontend_port").(int))), + BackendPort: utils.Int32(int32(d.Get("backend_port").(int))), + EnableFloatingIP: utils.Bool(d.Get("enable_floating_ip").(bool)), + EnableTCPReset: utils.Bool(d.Get("enable_tcp_reset").(bool)), + DisableOutboundSnat: utils.Bool(d.Get("disable_outbound_snat").(bool)), + } + + if v, ok := d.GetOk("idle_timeout_in_minutes"); ok { + properties.IdleTimeoutInMinutes = utils.Int32(int32(v.(int))) + } + + if v := d.Get("load_distribution").(string); v != "" { + properties.LoadDistribution = network.LoadDistribution(v) + } + + // TODO: ensure these ID's are consistent + if v := d.Get("frontend_ip_configuration_name").(string); v != "" { + rule, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v) + if !exists { + return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v) + } + + properties.FrontendIPConfiguration = &network.SubResource{ + ID: rule.ID, + } + } + + if v := d.Get("backend_address_pool_id").(string); v != "" { + properties.BackendAddressPool = &network.SubResource{ + ID: &v, + } + } + + if v := d.Get("probe_id").(string); v != "" { + properties.Probe = &network.SubResource{ + ID: &v, + } + } + + return &network.LoadBalancingRule{ + Name: utils.String(d.Get("name").(string)), + LoadBalancingRulePropertiesFormat: &properties, + }, nil +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_backend_address_pool_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_backend_address_pool_id.go new file mode 100644 index 000000000000..2b576a4c9d1f --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_backend_address_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerBackendAddressPoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerBackendAddressPoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_backend_address_pool_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_backend_address_pool_id_test.go new file mode 100644 index 000000000000..d6001d31cf5f --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_backend_address_pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerBackendAddressPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing BackendAddressPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for BackendAddressPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/backendAddressPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/backendAddressPools/backendAddressPool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/BACKENDADDRESSPOOLS/BACKENDADDRESSPOOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerBackendAddressPoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_frontend_ip_configuration_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_frontend_ip_configuration_id.go new file mode 100644 index 000000000000..129bc919d670 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_frontend_ip_configuration_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerFrontendIpConfigurationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerFrontendIpConfigurationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_frontend_ip_configuration_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_frontend_ip_configuration_id_test.go new file mode 100644 index 000000000000..a5e697bb1f06 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_frontend_ip_configuration_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerFrontendIpConfigurationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing FrontendIPConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for FrontendIPConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/frontendIPConfigurations/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/frontendIPConfigurations/frontendIPConfig1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/FRONTENDIPCONFIGURATIONS/FRONTENDIPCONFIG1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerFrontendIpConfigurationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_id.go new file mode 100644 index 000000000000..845221c8ef2f --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_id_test.go new file mode 100644 index 000000000000..d7582fd7b177 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_pool_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_pool_id.go new file mode 100644 index 000000000000..792a76ca2783 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerInboundNatPoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerInboundNatPoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_pool_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_pool_id_test.go new file mode 100644 index 000000000000..7bea0d8ea8a1 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerInboundNatPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing InboundNatPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for InboundNatPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatPools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/INBOUNDNATPOOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerInboundNatPoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_rule_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_rule_id.go new file mode 100644 index 000000000000..a604fbfabc7b --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerInboundNatRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerInboundNatRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_rule_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_rule_id_test.go new file mode 100644 index 000000000000..b71d6b037787 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_inbound_nat_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerInboundNatRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing InboundNatRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for InboundNatRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/inboundNatRules/rule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/INBOUNDNATRULES/RULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerInboundNatRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_outbound_rule_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_outbound_rule_id.go new file mode 100644 index 000000000000..706feb9e793c --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_outbound_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerOutboundRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerOutboundRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_outbound_rule_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_outbound_rule_id_test.go new file mode 100644 index 000000000000..fb496b02bff4 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_outbound_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerOutboundRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing OutboundRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for OutboundRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/outboundRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/outboundRules/rule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/OUTBOUNDRULES/RULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerOutboundRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_probe_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_probe_id.go new file mode 100644 index 000000000000..7a23b5e36bd7 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_probe_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancerProbeID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancerProbeID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancer_probe_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancer_probe_id_test.go new file mode 100644 index 000000000000..5df903a560ed --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancer_probe_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancerProbeID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing ProbeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for ProbeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/probes/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/probes/probe1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/PROBES/PROBE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancerProbeID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancing_rule_id.go b/azurerm/internal/services/loadbalancer/validate/load_balancing_rule_id.go new file mode 100644 index 000000000000..0732c1ea6587 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancing_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loadbalancer/parse" +) + +func LoadBalancingRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LoadBalancingRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loadbalancer/validate/load_balancing_rule_id_test.go b/azurerm/internal/services/loadbalancer/validate/load_balancing_rule_id_test.go new file mode 100644 index 000000000000..fcebbde5d321 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/load_balancing_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLoadBalancingRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for LoadBalancerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/loadBalancingRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/loadBalancers/loadBalancer1/loadBalancingRules/rule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/LOADBALANCERS/LOADBALANCER1/LOADBALANCINGRULES/RULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LoadBalancingRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loadbalancer/validate/rule_name.go b/azurerm/internal/services/loadbalancer/validate/rule_name.go new file mode 100644 index 000000000000..5764a9e2da3d --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/rule_name.go @@ -0,0 +1,36 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func RuleName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + if !regexp.MustCompile(`^[a-zA-Z_0-9.-]+$`).MatchString(value) { + errors = append(errors, fmt.Errorf( + "only word characters, numbers, underscores, periods, and hyphens allowed in %q: %q", + k, value)) + } + + if len(value) > 80 { + errors = append(errors, fmt.Errorf( + "%q cannot be longer than 80 characters: %q", k, value)) + } + + if len(value) == 0 { + errors = append(errors, fmt.Errorf( + "%q cannot be an empty string: %q", k, value)) + } + if !regexp.MustCompile(`[a-zA-Z0-9_]$`).MatchString(value) { + errors = append(errors, fmt.Errorf( + "%q must end with a word character, number, or underscore: %q", k, value)) + } + + if !regexp.MustCompile(`^[a-zA-Z0-9]`).MatchString(value) { + errors = append(errors, fmt.Errorf( + "%q must start with a word character or number: %q", k, value)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/loadbalancer/validate/rule_name_test.go b/azurerm/internal/services/loadbalancer/validate/rule_name_test.go new file mode 100644 index 000000000000..c3367c989ae3 --- /dev/null +++ b/azurerm/internal/services/loadbalancer/validate/rule_name_test.go @@ -0,0 +1,63 @@ +package validate + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" +) + +func TestRuleName_validation(t *testing.T) { + cases := []struct { + Value string + ErrCount int + }{ + { + Value: "-word", + ErrCount: 1, + }, + { + Value: "testing-", + ErrCount: 1, + }, + { + Value: "test#test", + ErrCount: 1, + }, + { + Value: acctest.RandStringFromCharSet(81, "abcdedfed"), + ErrCount: 1, + }, + { + Value: "test.rule", + ErrCount: 0, + }, + { + Value: "test_rule", + ErrCount: 0, + }, + { + Value: "test-rule", + ErrCount: 0, + }, + { + Value: "TestRule", + ErrCount: 0, + }, + { + Value: "Test123Rule", + ErrCount: 0, + }, + { + Value: "TestRule", + ErrCount: 0, + }, + } + + for _, tc := range cases { + _, errors := RuleName(tc.Value, "azurerm_lb_rule") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Azure RM Load Balancer Rule Name Label to trigger a validation error") + } + } +} diff --git a/azurerm/internal/services/loganalytics/client/client.go b/azurerm/internal/services/loganalytics/client/client.go index ae07726000ed..1b2a48b4f80c 100644 --- a/azurerm/internal/services/loganalytics/client/client.go +++ b/azurerm/internal/services/loganalytics/client/client.go @@ -1,12 +1,13 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/Azure/azure-sdk-for-go/services/preview/operationsmanagement/mgmt/2015-11-01-preview/operationsmanagement" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) type Client struct { + ClusterClient *operationalinsights.ClustersClient DataExportClient *operationalinsights.DataExportsClient DataSourcesClient *operationalinsights.DataSourcesClient LinkedServicesClient *operationalinsights.LinkedServicesClient @@ -14,10 +15,14 @@ type Client struct { SavedSearchesClient *operationalinsights.SavedSearchesClient SharedKeysClient *operationalinsights.SharedKeysClient SolutionsClient *operationsmanagement.SolutionsClient + StorageInsightsClient *operationalinsights.StorageInsightConfigsClient WorkspacesClient *operationalinsights.WorkspacesClient } func NewClient(o *common.ClientOptions) *Client { + ClusterClient := operationalinsights.NewClustersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&ClusterClient.Client, o.ResourceManagerAuthorizer) + DataExportClient := operationalinsights.NewDataExportsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&DataExportClient.Client, o.ResourceManagerAuthorizer) @@ -36,6 +41,9 @@ func NewClient(o *common.ClientOptions) *Client { SolutionsClient := operationsmanagement.NewSolutionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId, "Microsoft.OperationsManagement", "solutions", "testing") o.ConfigureClient(&SolutionsClient.Client, o.ResourceManagerAuthorizer) + StorageInsightsClient := operationalinsights.NewStorageInsightConfigsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&StorageInsightsClient.Client, o.ResourceManagerAuthorizer) + LinkedServicesClient := operationalinsights.NewLinkedServicesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&LinkedServicesClient.Client, o.ResourceManagerAuthorizer) @@ -43,6 +51,7 @@ func NewClient(o *common.ClientOptions) *Client { o.ConfigureClient(&LinkedStorageAccountClient.Client, o.ResourceManagerAuthorizer) return &Client{ + ClusterClient: &ClusterClient, DataExportClient: &DataExportClient, DataSourcesClient: &DataSourcesClient, LinkedServicesClient: &LinkedServicesClient, @@ -50,6 +59,7 @@ func NewClient(o *common.ClientOptions) *Client { SavedSearchesClient: &SavedSearchesClient, SharedKeysClient: &SharedKeysClient, SolutionsClient: &SolutionsClient, + StorageInsightsClient: &StorageInsightsClient, WorkspacesClient: &WorkspacesClient, } } diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/log_analytics_cluster.go new file mode 100644 index 000000000000..3575d1e94e68 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_cluster.go @@ -0,0 +1,46 @@ +package loganalytics + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +func logAnalyticsClusterWaitForState(ctx context.Context, meta interface{}, timeout time.Duration, resourceGroup string, clusterName string) *resource.StateChangeConf { + return &resource.StateChangeConf{ + Pending: []string{string(operationalinsights.Updating)}, + Target: []string{string(operationalinsights.Succeeded)}, + MinTimeout: 1 * time.Minute, + Timeout: timeout, + Refresh: logAnalyticsClusterRefresh(ctx, meta, resourceGroup, clusterName), + } +} + +func logAnalyticsClusterRefresh(ctx context.Context, meta interface{}, resourceGroup string, clusterName string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + + log.Printf("[INFO] checking on state of Log Analytics Cluster %q", clusterName) + + resp, err := client.Get(ctx, resourceGroup, clusterName) + if err != nil { + return nil, "nil", fmt.Errorf("polling for the status of Log Analytics Cluster %q (Resource Group %q): %v", clusterName, resourceGroup, err) + } + + if resp.ClusterProperties != nil { + if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded { + return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encountered: %q", clusterName, resourceGroup, string(resp.ClusterProperties.ProvisioningState)) + } + + return resp, string(resp.ClusterProperties.ProvisioningState), nil + } + + // I am not returning an error here as this might have just been a bad get + return resp, "nil", nil + } +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go new file mode 100644 index 000000000000..ce7a8069e1a9 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go @@ -0,0 +1,204 @@ +package loganalytics + +import ( + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceLogAnalyticsClusterCustomerManagedKey() *schema.Resource { + return &schema.Resource{ + Create: resourceLogAnalyticsClusterCustomerManagedKeyCreate, + Read: resourceLogAnalyticsClusterCustomerManagedKeyRead, + Update: resourceLogAnalyticsClusterCustomerManagedKeyUpdate, + Delete: resourceLogAnalyticsClusterCustomerManagedKeyDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(6 * time.Hour), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(6 * time.Hour), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "log_analytics_cluster_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LogAnalyticsClusterID, + }, + + "key_vault_key_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateKeyVaultChildIdVersionOptional, + }, + }, + } +} + +func resourceLogAnalyticsClusterCustomerManagedKeyCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + clusterIdRaw := d.Get("log_analytics_cluster_id").(string) + clusterId, err := parse.LogAnalyticsClusterID(clusterIdRaw) + if err != nil { + return err + } + + resp, err := client.Get(ctx, clusterId.ResourceGroup, clusterId.ClusterName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Log Analytics Cluster %q (resource group %q) was not found", clusterId.ClusterName, clusterId.ResourceGroup) + } + return fmt.Errorf("failed to get details of Log Analytics Cluster %q (resource group %q): %+v", clusterId.ClusterName, clusterId.ResourceGroup, err) + } + if resp.ClusterProperties != nil && resp.ClusterProperties.KeyVaultProperties != nil { + keyProps := *resp.ClusterProperties.KeyVaultProperties + if keyProps.KeyName != nil && *keyProps.KeyName != "" { + return tf.ImportAsExistsError("azurerm_log_analytics_cluster_customer_managed_key", fmt.Sprintf("%s/CMK", clusterIdRaw)) + } + } + + d.SetId(fmt.Sprintf("%s/CMK", clusterIdRaw)) + return resourceLogAnalyticsClusterCustomerManagedKeyUpdate(d, meta) +} + +func resourceLogAnalyticsClusterCustomerManagedKeyUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + keyId, err := azure.ParseKeyVaultChildIDVersionOptional(d.Get("key_vault_key_id").(string)) + if err != nil { + return fmt.Errorf("could not parse Key Vault Key ID: %+v", err) + } + + clusterId, err := parse.LogAnalyticsClusterID(d.Get("log_analytics_cluster_id").(string)) + if err != nil { + return err + } + + clusterPatch := operationalinsights.ClusterPatch{ + ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{ + KeyVaultProperties: &operationalinsights.KeyVaultProperties{ + KeyVaultURI: utils.String(keyId.KeyVaultBaseUrl), + KeyName: utils.String(keyId.Name), + KeyVersion: utils.String(keyId.Version), + }, + }, + } + + if _, err := client.Update(ctx, clusterId.ResourceGroup, clusterId.ClusterName, clusterPatch); err != nil { + return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", clusterId.ClusterName, clusterId.ResourceGroup, err) + } + + updateWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutUpdate), clusterId.ResourceGroup, clusterId.ClusterName) + + if _, err := updateWait.WaitForState(); err != nil { + return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.ClusterName, clusterId.ResourceGroup, err) + } + + return resourceLogAnalyticsClusterCustomerManagedKeyRead(d, meta) +} + +func resourceLogAnalyticsClusterCustomerManagedKeyRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + idRaw := strings.TrimRight(d.Id(), "/CMK") + + id, err := parse.LogAnalyticsClusterID(idRaw) + if err != nil { + return err + } + + d.Set("log_analytics_cluster_id", idRaw) + + resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Log Analytics %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", id.ClusterName, id.ResourceGroup, err) + } + + if props := resp.ClusterProperties; props != nil { + if kvProps := props.KeyVaultProperties; kvProps != nil { + var keyVaultUri, keyName, keyVersion string + if kvProps.KeyVaultURI != nil && *kvProps.KeyVaultURI != "" { + keyVaultUri = *kvProps.KeyVaultURI + } else { + return fmt.Errorf("empty value returned for Key Vault URI") + } + if kvProps.KeyName != nil && *kvProps.KeyName != "" { + keyName = *kvProps.KeyName + } else { + return fmt.Errorf("empty value returned for Key Vault Key Name") + } + if kvProps.KeyVersion != nil { + keyVersion = *kvProps.KeyVersion + } + keyVaultKeyId, err := azure.NewKeyVaultChildResourceID(keyVaultUri, "keys", keyName, keyVersion) + if err != nil { + return err + } + d.Set("key_vault_key_id", keyVaultKeyId) + } + } + + return nil +} + +func resourceLogAnalyticsClusterCustomerManagedKeyDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + clusterId, err := parse.LogAnalyticsClusterID(d.Get("log_analytics_cluster_id").(string)) + if err != nil { + return err + } + + clusterPatch := operationalinsights.ClusterPatch{ + ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{ + KeyVaultProperties: &operationalinsights.KeyVaultProperties{ + KeyVaultURI: nil, + KeyName: nil, + KeyVersion: nil, + }, + }, + } + + if _, err = client.Update(ctx, clusterId.ResourceGroup, clusterId.ClusterName, clusterPatch); err != nil { + return fmt.Errorf("removing Log Analytics Cluster Customer Managed Key from cluster %q (resource group %q)", clusterId.ClusterName, clusterId.ResourceGroup) + } + + deleteWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutDelete), clusterId.ResourceGroup, clusterId.ClusterName) + + if _, err := deleteWait.WaitForState(); err != nil { + return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.ClusterName, clusterId.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource_test.go new file mode 100644 index 000000000000..03e9f79eb4ad --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource_test.go @@ -0,0 +1,155 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LogAnalyticsClusterCustomerManagedKeyResource struct { +} + +func TestAccLogAnalyticsClusterCustomerManagedKey_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster_customer_managed_key", "test") + r := LogAnalyticsClusterCustomerManagedKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t LogAnalyticsClusterCustomerManagedKeyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsClusterID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.ClusterClient.Get(ctx, id.ResourceGroup, id.ClusterName) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Cluster Customer Managed Key (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsClusterCustomerManagedKeyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%[1]d" + location = "%[2]s" +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_log_analytics_cluster" "test" { + name = "acctest-LA-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + identity { + type = "SystemAssigned" + } +} + + +resource "azurerm_key_vault" "test" { + name = "vault%[3]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "premium" + + soft_delete_enabled = true + soft_delete_retention_days = 7 + purge_protection_enabled = true +} + + +resource "azurerm_key_vault_access_policy" "terraform" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "create", + "delete", + "get", + "update", + "list", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] + + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id +} + +resource "azurerm_key_vault_key" "test" { + name = "key-%[3]s" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] + + depends_on = [azurerm_key_vault_access_policy.terraform] +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + + key_permissions = [ + "get", + "unwrapkey", + "wrapkey" + ] + + tenant_id = azurerm_log_analytics_cluster.test.identity.0.tenant_id + object_id = azurerm_log_analytics_cluster.test.identity.0.principal_id + + depends_on = [azurerm_key_vault_access_policy.terraform] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r LogAnalyticsClusterCustomerManagedKeyResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_cluster_customer_managed_key" "test" { + log_analytics_cluster_id = azurerm_log_analytics_cluster.test.id + key_vault_key_id = azurerm_key_vault_key.test.id + + depends_on = [azurerm_key_vault_access_policy.test] +} + +`, r.template(data)) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go new file mode 100644 index 000000000000..02e6face4185 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go @@ -0,0 +1,296 @@ +package loganalytics + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceLogAnalyticsCluster() *schema.Resource { + return &schema.Resource{ + Create: resourceLogAnalyticsClusterCreate, + Read: resourceLogAnalyticsClusterRead, + Update: resourceLogAnalyticsClusterUpdate, + Delete: resourceLogAnalyticsClusterDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(6 * time.Hour), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(6 * time.Hour), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.LogAnalyticsClusterID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LogAnalyticsClusterName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "identity": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(operationalinsights.SystemAssigned), + }, false), + }, + + "principal_id": { + Type: schema.TypeString, + Computed: true, + }, + + "tenant_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + // Per the documentation cluster capacity must start at 1000 GB and can go above 3000 GB with an exception by Microsoft + // so I am not limiting the upperbound here by design + // https://docs.microsoft.com/en-us/azure/azure-monitor/platform/manage-cost-storage#log-analytics-dedicated-clusters + "size_gb": { + Type: schema.TypeInt, + Optional: true, + Default: 1000, + ValidateFunc: validation.All( + validation.IntAtLeast(1000), + validation.IntDivisibleBy(100), + ), + }, + + "cluster_id": { + Type: schema.TypeString, + Computed: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + id := parse.NewLogAnalyticsClusterID(subscriptionId, resourceGroup, name) + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_log_analytics_cluster", *existing.ID) + } + + parameters := operationalinsights.Cluster{ + Location: utils.String(location.Normalize(d.Get("location").(string))), + Identity: expandLogAnalyticsClusterIdentity(d.Get("identity").([]interface{})), + Sku: &operationalinsights.ClusterSku{ + Capacity: utils.Int64(int64(d.Get("size_gb").(int))), + Name: operationalinsights.CapacityReservation, + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) + if err != nil { + return fmt.Errorf("creating Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating future for Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if _, err = client.Get(ctx, resourceGroup, name); err != nil { + return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + createWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutCreate), id.ResourceGroup, id.ClusterName) + + if _, err := createWait.WaitForState(); err != nil { + return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", id.ClusterName, id.ResourceGroup, err) + } + + d.SetId(id.ID()) + return resourceLogAnalyticsClusterRead(d, meta) +} + +func resourceLogAnalyticsClusterRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LogAnalyticsClusterID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Log Analytics %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", id.ClusterName, id.ResourceGroup, err) + } + d.Set("name", id.ClusterName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + if err := d.Set("identity", flattenLogAnalyticsIdentity(resp.Identity)); err != nil { + return fmt.Errorf("setting `identity`: %+v", err) + } + if props := resp.ClusterProperties; props != nil { + d.Set("cluster_id", props.ClusterID) + } + + capacity := 0 + if sku := resp.Sku; sku != nil { + if sku.Capacity != nil { + capacity = int(*sku.Capacity) + } + } + d.Set("size_gb", capacity) + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LogAnalyticsClusterID(d.Id()) + if err != nil { + return err + } + + parameters := operationalinsights.ClusterPatch{} + + if d.HasChange("size_gb") { + parameters.Sku = &operationalinsights.ClusterSku{ + Capacity: utils.Int64(int64(d.Get("size_gb").(int))), + Name: operationalinsights.CapacityReservation, + } + } + + if d.HasChange("tags") { + parameters.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + if _, err := client.Update(ctx, id.ResourceGroup, id.ClusterName, parameters); err != nil { + return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", id.ClusterName, id.ResourceGroup, err) + } + + // Need to wait for the cluster to actually finish updating the resource before continuing + // since the service returns a 200 instantly while it's still updating in the background + log.Printf("[INFO] Checking for Log Analytics Cluster provisioning state") + + updateWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutUpdate), id.ResourceGroup, id.ClusterName) + + if _, err := updateWait.WaitForState(); err != nil { + return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", id.ClusterName, id.ResourceGroup, err) + } + + return resourceLogAnalyticsClusterRead(d, meta) +} + +func resourceLogAnalyticsClusterDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.ClusterClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LogAnalyticsClusterID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName) + if err != nil { + return fmt.Errorf("deleting Log Analytics Cluster %q (Resource Group %q): %+v", id.ClusterName, id.ResourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on deleting future for Log Analytics Cluster %q (Resource Group %q): %+v", id.ClusterName, id.ResourceGroup, err) + } + + return nil +} + +func expandLogAnalyticsClusterIdentity(input []interface{}) *operationalinsights.Identity { + if len(input) == 0 { + return nil + } + v := input[0].(map[string]interface{}) + return &operationalinsights.Identity{ + Type: operationalinsights.IdentityType(v["type"].(string)), + } +} + +func flattenLogAnalyticsIdentity(input *operationalinsights.Identity) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + var t operationalinsights.IdentityType + if input.Type != "" { + t = input.Type + } + var principalId string + if input.PrincipalID != nil { + principalId = *input.PrincipalID + } + var tenantId string + if input.TenantID != nil { + tenantId = *input.TenantID + } + return []interface{}{ + map[string]interface{}{ + "type": t, + "principal_id": principalId, + "tenant_id": tenantId, + }, + } +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource_test.go new file mode 100644 index 000000000000..0163b9a33cb7 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource_test.go @@ -0,0 +1,143 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LogAnalyticsClusterResource struct { +} + +func TestAccLogAnalyticsCluster_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test") + r := LogAnalyticsClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsCluster_resize(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test") + r := LogAnalyticsClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.resize(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsCluster_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test") + r := LogAnalyticsClusterResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t LogAnalyticsClusterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsClusterID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.ClusterClient.Get(ctx, id.ResourceGroup, id.ClusterName) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Cluster (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsClusterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r LogAnalyticsClusterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_cluster" "test" { + name = "acctest-LA-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + identity { + type = "SystemAssigned" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsClusterResource) resize(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_cluster" "test" { + name = "acctest-LA-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + size_gb = 1100 + + identity { + type = "SystemAssigned" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsClusterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_cluster" "import" { + name = azurerm_log_analytics_cluster.test.name + resource_group_name = azurerm_log_analytics_cluster.test.resource_group_name + location = azurerm_log_analytics_cluster.test.location + + identity { + type = "SystemAssigned" + } +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_data_export_resource.go b/azurerm/internal/services/loganalytics/log_analytics_data_export_resource.go index 9b2133d33b3c..9d824e881061 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_data_export_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_data_export_resource.go @@ -5,7 +5,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -18,12 +18,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsDataExport() *schema.Resource { +func resourceLogAnalyticsDataExport() *schema.Resource { return &schema.Resource{ - Create: resourceArmOperationalinsightsDataExportCreateUpdate, - Read: resourceArmOperationalinsightsDataExportRead, - Update: resourceArmOperationalinsightsDataExportCreateUpdate, - Delete: resourceArmOperationalinsightsDataExportDelete, + Create: resourceOperationalinsightsDataExportCreateUpdate, + Read: resourceOperationalinsightsDataExportRead, + Update: resourceOperationalinsightsDataExportCreateUpdate, + Delete: resourceOperationalinsightsDataExportDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -50,7 +50,7 @@ func resourceArmLogAnalyticsDataExport() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azure.ValidateResourceID, + ValidateFunc: validate.LogAnalyticsWorkspaceID, }, "destination_resource_id": { @@ -83,7 +83,7 @@ func resourceArmLogAnalyticsDataExport() *schema.Resource { } } -func resourceArmOperationalinsightsDataExportCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceOperationalinsightsDataExportCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataExportClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -96,10 +96,10 @@ func resourceArmOperationalinsightsDataExportCreateUpdate(d *schema.ResourceData } if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, workspace.Name, name) + existing, err := client.Get(ctx, resourceGroup, workspace.WorkspaceName, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err) + return fmt.Errorf("checking for present of existing Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.WorkspaceName, err) } } if existing.ID != nil && *existing.ID != "" { @@ -117,24 +117,24 @@ func resourceArmOperationalinsightsDataExportCreateUpdate(d *schema.ResourceData }, } - if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, name, parameters); err != nil { - return fmt.Errorf("creating/updating Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err) + if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.WorkspaceName, name, parameters); err != nil { + return fmt.Errorf("creating/updating Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.WorkspaceName, err) } - resp, err := client.Get(ctx, resourceGroup, workspace.Name, name) + resp, err := client.Get(ctx, resourceGroup, workspace.WorkspaceName, name) if err != nil { - return fmt.Errorf("retrieving Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err) + return fmt.Errorf("retrieving Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.WorkspaceName, err) } if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("empty or nil ID returned for Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.Name) + return fmt.Errorf("empty or nil ID returned for Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.WorkspaceName) } d.SetId(*resp.ID) - return resourceArmOperationalinsightsDataExportRead(d, meta) + return resourceOperationalinsightsDataExportRead(d, meta) } -func resourceArmOperationalinsightsDataExportRead(d *schema.ResourceData, meta interface{}) error { +func resourceOperationalinsightsDataExportRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataExportClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -144,28 +144,28 @@ func resourceArmOperationalinsightsDataExportRead(d *schema.ResourceData, meta i return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.DataexportName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Log Analytics %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("retrieving Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err) + return fmt.Errorf("retrieving Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", id.DataexportName, id.ResourceGroup, id.WorkspaceName, err) } - d.Set("name", id.Name) + d.Set("name", id.DataexportName) d.Set("resource_group_name", id.ResourceGroup) - d.Set("workspace_resource_id", id.WorkspaceID) + d.Set("workspace_resource_id", parse.NewLogAnalyticsWorkspaceID(id.SubscriptionId, id.ResourceGroup, id.WorkspaceName).ID()) if props := resp.DataExportProperties; props != nil { d.Set("export_rule_id", props.DataExportID) - d.Set("destination_resource_id", flattenArmDataExportDestination(props.Destination)) + d.Set("destination_resource_id", flattenDataExportDestination(props.Destination)) d.Set("enabled", props.Enable) d.Set("table_names", utils.FlattenStringSlice(props.TableNames)) } return nil } -func resourceArmOperationalinsightsDataExportDelete(d *schema.ResourceData, meta interface{}) error { +func resourceOperationalinsightsDataExportDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataExportClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -175,13 +175,13 @@ func resourceArmOperationalinsightsDataExportDelete(d *schema.ResourceData, meta return err } - if _, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil { - return fmt.Errorf("deleting Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err) + if _, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.DataexportName); err != nil { + return fmt.Errorf("deleting Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", id.DataexportName, id.ResourceGroup, id.WorkspaceName, err) } return nil } -func flattenArmDataExportDestination(input *operationalinsights.Destination) string { +func flattenDataExportDestination(input *operationalinsights.Destination) string { if input == nil { return "" } diff --git a/azurerm/internal/services/loganalytics/log_analytics_data_export_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_data_export_resource_test.go new file mode 100644 index 000000000000..0ca08e2eb875 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_data_export_resource_test.go @@ -0,0 +1,210 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +type LogAnalyticsDataExportRuleResource struct { +} + +func TestAccLogAnalyticsDataExportRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") + r := LogAnalyticsDataExportRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataExportRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") + r := LogAnalyticsDataExportRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicLower(data), + ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource + ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_data_export_rule"), + }, + }) +} + +func TestAccLogAnalyticsDataExportRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") + r := LogAnalyticsDataExportRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataExportRule_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") + r := LogAnalyticsDataExportRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t LogAnalyticsDataExportRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsDataExportID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.DataExportClient.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.DataexportName) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Data Export (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsDataExportRuleResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsads%s" + resource_group_name = azurerm_resource_group.test.name + + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) +} + +func (r LogAnalyticsDataExportRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_data_export_rule" "test" { + name = "acctest-DER-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + destination_resource_id = azurerm_storage_account.test.id + table_names = ["Heartbeat"] +} +`, r.template(data), data.RandomInteger) +} + +// I have to make this a lower case to get the requiresImport test to pass since the RP lowercases everything when it sends the data back to you +func (r LogAnalyticsDataExportRuleResource) basicLower(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_data_export_rule" "test" { + name = "acctest-der-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + destination_resource_id = azurerm_storage_account.test.id + table_names = ["Heartbeat"] +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsDataExportRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_data_export_rule" "import" { + name = azurerm_log_analytics_data_export_rule.test.name + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + destination_resource_id = azurerm_storage_account.test.id + table_names = ["Heartbeat"] +} +`, r.basicLower(data)) +} + +func (r LogAnalyticsDataExportRuleResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_data_export_rule" "test" { + name = "acctest-DER-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + destination_resource_id = azurerm_storage_account.test.id + table_names = ["Heartbeat", "Event"] +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsDataExportRuleResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_data_export_rule" "test" { + name = "acctest-DER-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + destination_resource_id = azurerm_storage_account.test.id + table_names = ["Heartbeat"] + enabled = true +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_datasource_import.go b/azurerm/internal/services/loganalytics/log_analytics_datasource_import.go index 6aeb91a5fb50..2c5684270d2e 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_datasource_import.go +++ b/azurerm/internal/services/loganalytics/log_analytics_datasource_import.go @@ -3,7 +3,7 @@ package loganalytics import ( "fmt" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" diff --git a/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource.go b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource.go index 1e7ce42cc765..2e67197f1e96 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource.go @@ -6,7 +6,7 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/structure" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -15,6 +15,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" @@ -22,12 +23,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsDataSourceWindowsEvent() *schema.Resource { +func resourceLogAnalyticsDataSourceWindowsEvent() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsDataSourceWindowsEventCreateUpdate, - Read: resourceArmLogAnalyticsDataSourceWindowsEventRead, - Update: resourceArmLogAnalyticsDataSourceWindowsEventCreateUpdate, - Delete: resourceArmLogAnalyticsDataSourceWindowsEventDelete, + Create: resourceLogAnalyticsDataSourceWindowsEventCreateUpdate, + Read: resourceLogAnalyticsDataSourceWindowsEventRead, + Update: resourceLogAnalyticsDataSourceWindowsEventCreateUpdate, + Delete: resourceLogAnalyticsDataSourceWindowsEventDelete, Importer: azSchema.ValidateResourceIDPriorToImportThen(func(id string) error { _, err := parse.LogAnalyticsDataSourceID(id) @@ -56,7 +57,7 @@ func resourceArmLogAnalyticsDataSourceWindowsEvent() *schema.Resource { Required: true, ForceNew: true, DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: ValidateAzureRmLogAnalyticsWorkspaceName, + ValidateFunc: validate.LogAnalyticsWorkspaceName, }, "event_log_name": { @@ -93,7 +94,7 @@ type dataSourceWindowsEventEventType struct { EventType string `json:"eventType"` } -func resourceArmLogAnalyticsDataSourceWindowsEventCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsDataSourceWindowsEventCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataSourcesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,10 +137,10 @@ func resourceArmLogAnalyticsDataSourceWindowsEventCreateUpdate(d *schema.Resourc } d.SetId(*resp.ID) - return resourceArmLogAnalyticsDataSourceWindowsEventRead(d, meta) + return resourceLogAnalyticsDataSourceWindowsEventRead(d, meta) } -func resourceArmLogAnalyticsDataSourceWindowsEventRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsDataSourceWindowsEventRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataSourcesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -181,7 +182,7 @@ func resourceArmLogAnalyticsDataSourceWindowsEventRead(d *schema.ResourceData, m return nil } -func resourceArmLogAnalyticsDataSourceWindowsEventDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsDataSourceWindowsEventDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataSourcesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource_test.go new file mode 100644 index 000000000000..dc10d40091ae --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_event_resource_test.go @@ -0,0 +1,168 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LogAnalyticsDataSourceWindowsEventResource struct { +} + +func TestAccLogAnalyticsDataSourceWindowsEvent_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") + r := LogAnalyticsDataSourceWindowsEventResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataSourceWindowsEvent_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") + r := LogAnalyticsDataSourceWindowsEventResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataSourceWindowsEvent_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") + r := LogAnalyticsDataSourceWindowsEventResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataSourceWindowsEvent_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") + r := LogAnalyticsDataSourceWindowsEventResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t LogAnalyticsDataSourceWindowsEventResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsDataSourceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.DataSourcesClient.Get(ctx, id.ResourceGroup, id.Workspace, id.Name) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Data Source Windows Event (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r LogAnalyticsDataSourceWindowsEventResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_datasource_windows_event" "test" { + name = "acctestLADS-WE-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_name = azurerm_log_analytics_workspace.test.name + event_log_name = "Application" + event_types = ["error"] +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsDataSourceWindowsEventResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_datasource_windows_event" "test" { + name = "acctestLADS-WE-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_name = azurerm_log_analytics_workspace.test.name + event_log_name = "Application" + event_types = ["InforMation", "warning", "Error"] +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsDataSourceWindowsEventResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_datasource_windows_event" "import" { + name = azurerm_log_analytics_datasource_windows_event.test.name + resource_group_name = azurerm_log_analytics_datasource_windows_event.test.resource_group_name + workspace_name = azurerm_log_analytics_datasource_windows_event.test.workspace_name + event_log_name = azurerm_log_analytics_datasource_windows_event.test.event_log_name + event_types = azurerm_log_analytics_datasource_windows_event.test.event_types +} +`, r.basic(data)) +} + +func (LogAnalyticsDataSourceWindowsEventResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource.go b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource.go index fb142cdbbd2d..dc13f5a90898 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource.go @@ -7,7 +7,7 @@ import ( "math" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/structure" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -16,17 +16,18 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter() *schema.Resource { +func resourceLogAnalyticsDataSourceWindowsPerformanceCounter() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate, - Read: resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterRead, - Update: resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate, - Delete: resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterDelete, + Create: resourceLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate, + Read: resourceLogAnalyticsDataSourceWindowsPerformanceCounterRead, + Update: resourceLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate, + Delete: resourceLogAnalyticsDataSourceWindowsPerformanceCounterDelete, Importer: azSchema.ValidateResourceIDPriorToImportThen(func(id string) error { _, err := parse.LogAnalyticsDataSourceID(id) @@ -55,7 +56,7 @@ func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter() *schema.Resour Required: true, ForceNew: true, DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: ValidateAzureRmLogAnalyticsWorkspaceName, + ValidateFunc: validate.LogAnalyticsWorkspaceName, }, "counter_name": { @@ -93,7 +94,7 @@ type dataSourceWindowsPerformanceCounterProperty struct { ObjectName string `json:"objectName"` } -func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataSourcesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -142,10 +143,10 @@ func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterCreateUpdate(d *s d.SetId(*resp.ID) - return resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterRead(d, meta) + return resourceLogAnalyticsDataSourceWindowsPerformanceCounterRead(d, meta) } -func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsDataSourceWindowsPerformanceCounterRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataSourcesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -189,7 +190,7 @@ func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterRead(d *schema.Re return nil } -func resourceArmLogAnalyticsDataSourceWindowsPerformanceCounterDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsDataSourceWindowsPerformanceCounterDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.DataSourcesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource_test.go new file mode 100644 index 000000000000..80cc18d00c40 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_datasource_windows_performance_counter_resource_test.go @@ -0,0 +1,183 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LogAnalyticsDataSourceWindowsPerformanceCounterResource struct { +} + +func TestAccLogAnalyticsDataSourceWindowsPerformanceCounter_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") + r := LogAnalyticsDataSourceWindowsPerformanceCounterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("object_name").HasValue("CPU"), + check.That(data.ResourceName).Key("instance_name").HasValue("*"), + check.That(data.ResourceName).Key("counter_name").HasValue("CPU"), + check.That(data.ResourceName).Key("interval_seconds").HasValue("10"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataSourceWindowsPerformanceCounter_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") + r := LogAnalyticsDataSourceWindowsPerformanceCounterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("object_name").HasValue("Mem"), + check.That(data.ResourceName).Key("instance_name").HasValue("inst1"), + check.That(data.ResourceName).Key("counter_name").HasValue("Mem"), + check.That(data.ResourceName).Key("interval_seconds").HasValue("20"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataSourceWindowsPerformanceCounter_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") + r := LogAnalyticsDataSourceWindowsPerformanceCounterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("object_name").HasValue("CPU"), + check.That(data.ResourceName).Key("instance_name").HasValue("*"), + check.That(data.ResourceName).Key("counter_name").HasValue("CPU"), + check.That(data.ResourceName).Key("interval_seconds").HasValue("10"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("object_name").HasValue("Mem"), + check.That(data.ResourceName).Key("instance_name").HasValue("inst1"), + check.That(data.ResourceName).Key("counter_name").HasValue("Mem"), + check.That(data.ResourceName).Key("interval_seconds").HasValue("20"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsDataSourceWindowsPerformanceCounter_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") + r := LogAnalyticsDataSourceWindowsPerformanceCounterResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t LogAnalyticsDataSourceWindowsPerformanceCounterResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsDataSourceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.DataSourcesClient.Get(ctx, id.ResourceGroup, id.Workspace, id.Name) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Data Source Windows Event (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r LogAnalyticsDataSourceWindowsPerformanceCounterResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_datasource_windows_performance_counter" "test" { + name = "acctestLADS-WPC-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_name = azurerm_log_analytics_workspace.test.name + object_name = "CPU" + instance_name = "*" + counter_name = "CPU" + interval_seconds = 10 +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsDataSourceWindowsPerformanceCounterResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_datasource_windows_performance_counter" "test" { + name = "acctestLADS-WPC-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_name = azurerm_log_analytics_workspace.test.name + object_name = "Mem" + instance_name = "inst1" + counter_name = "Mem" + interval_seconds = 20 +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsDataSourceWindowsPerformanceCounterResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_datasource_windows_performance_counter" "import" { + name = azurerm_log_analytics_datasource_windows_performance_counter.test.name + resource_group_name = azurerm_log_analytics_datasource_windows_performance_counter.test.resource_group_name + workspace_name = azurerm_log_analytics_datasource_windows_performance_counter.test.workspace_name + object_name = azurerm_log_analytics_datasource_windows_performance_counter.test.object_name + instance_name = azurerm_log_analytics_datasource_windows_performance_counter.test.instance_name + counter_name = azurerm_log_analytics_datasource_windows_performance_counter.test.counter_name + interval_seconds = azurerm_log_analytics_datasource_windows_performance_counter.test.interval_seconds +} +`, r.basic(data)) +} + +func (LogAnalyticsDataSourceWindowsPerformanceCounterResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_linked_service.go b/azurerm/internal/services/loganalytics/log_analytics_linked_service.go new file mode 100644 index 000000000000..a7df9734e2fb --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_linked_service.go @@ -0,0 +1,42 @@ +package loganalytics + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +func logAnalyticsLinkedServiceDeleteWaitForState(ctx context.Context, meta interface{}, timeout time.Duration, resourceGroup string, workspaceName string, serviceType string) *resource.StateChangeConf { + return &resource.StateChangeConf{ + Pending: []string{"Deleting"}, + Target: []string{"Deleted"}, + MinTimeout: 30 * time.Second, + Timeout: timeout, + Refresh: logAnalyticsLinkedServiceRefresh(ctx, meta, resourceGroup, workspaceName, serviceType), + } +} + +func logAnalyticsLinkedServiceRefresh(ctx context.Context, meta interface{}, resourceGroup string, workspaceName string, serviceType string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + client := meta.(*clients.Client).LogAnalytics.LinkedServicesClient + + log.Printf("[INFO] checking on state of Log Analytics Linked Service '%s/%s' (Resource Group %q)", workspaceName, serviceType, resourceGroup) + + resp, err := client.Get(ctx, resourceGroup, workspaceName, serviceType) + if err != nil { + return nil, "nil", fmt.Errorf("polling for the status of Log Analytics Linked Service '%s/%s' (Resource Group %q)", workspaceName, serviceType, resourceGroup) + } + + // (@WodansSon) - The service returns status code 200 even if the resource does not exist + // instead it returns an empty slice... + if props := resp.LinkedServiceProperties; props == nil { + return resp, "Deleted", nil + } + + return resp, "Deleting", nil + } +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource.go b/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource.go index c3ae33889c25..ee4eabb0c644 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource.go @@ -3,9 +3,10 @@ package loganalytics import ( "fmt" "log" + "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -13,17 +14,20 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + validateAuto "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/automation/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsLinkedService() *schema.Resource { +func resourceLogAnalyticsLinkedService() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsLinkedServiceCreateUpdate, - Read: resourceArmLogAnalyticsLinkedServiceRead, - Update: resourceArmLogAnalyticsLinkedServiceCreateUpdate, - Delete: resourceArmLogAnalyticsLinkedServiceDelete, + Create: resourceLogAnalyticsLinkedServiceCreateUpdate, + Read: resourceLogAnalyticsLinkedServiceRead, + Update: resourceLogAnalyticsLinkedServiceCreateUpdate, + Delete: resourceLogAnalyticsLinkedServiceDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -39,29 +43,62 @@ func resourceArmLogAnalyticsLinkedService() *schema.Resource { Schema: map[string]*schema.Schema{ "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + // TODO: Remove in 3.0 "workspace_name": { Type: schema.TypeString, - Required: true, - ForceNew: true, + Computed: true, + Optional: true, DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: ValidateAzureRmLogAnalyticsWorkspaceName, + ValidateFunc: validate.LogAnalyticsWorkspaceName, + ExactlyOneOf: []string{"workspace_name", "workspace_id"}, + Deprecated: "This field has been deprecated in favour of `workspace_id` and will be removed in a future version of the provider", }, + "workspace_id": { + Type: schema.TypeString, + Computed: true, + Optional: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: azure.ValidateResourceID, + ExactlyOneOf: []string{"workspace_name", "workspace_id"}, + }, + + // TODO: Remove in 3.0 "linked_service_name": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Default: "automation", + Type: schema.TypeString, + Computed: true, + Optional: true, + DiffSuppressFunc: suppress.CaseDifference, ValidateFunc: validation.StringInSlice([]string{ "automation", + "cluster", }, false), + Deprecated: "This field has been deprecated and will be removed in a future version of the provider", }, + // TODO: Remove in 3.0 "resource_id": { Type: schema.TypeString, - Required: true, - ForceNew: true, + Computed: true, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + ExactlyOneOf: []string{"read_access_id", "write_access_id", "resource_id"}, + Deprecated: "This field has been deprecated in favour of `read_access_id` and will be removed in a future version of the provider", + }, + + "read_access_id": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + ExactlyOneOf: []string{"read_access_id", "write_access_id", "resource_id"}, + }, + + "write_access_id": { + Type: schema.TypeString, + Optional: true, ValidateFunc: azure.ValidateResourceID, + ExactlyOneOf: []string{"read_access_id", "write_access_id", "resource_id"}, }, // Exported properties @@ -72,25 +109,150 @@ func resourceArmLogAnalyticsLinkedService() *schema.Resource { "tags": tags.Schema(), }, + + // TODO: Remove in 3.0 + CustomizeDiff: func(d *schema.ResourceDiff, v interface{}) error { + if d.HasChange("linked_service_name") { + oldServiceName, newServiceName := d.GetChange("linked_service_name") + + // This is an unneeded field, if it is removed you can safely ignore it + // as it's value can be(and is) derived via the 'read_access_id' field. It + // is only here for backwards compatibility to avoid a breaking change + if newServiceName.(string) != "" { + // Ignore change if it's in case only + if !strings.EqualFold(oldServiceName.(string), newServiceName.(string)) { + d.ForceNew("linked_service_name") + } + } + } + + if d.HasChange("workspace_id") { + forceNew := true + _, newWorkspaceName := d.GetChange("workspace_name") + oldWorkspaceID, newWorkspaceID := d.GetChange("workspace_id") + + // If the workspcae ID has been removed, only do a force new if the new workspace name + // and the old workspace ID points to different workspaces + if oldWorkspaceID.(string) != "" && newWorkspaceName.(string) != "" && newWorkspaceID.(string) == "" { + workspace, err := parse.LogAnalyticsWorkspaceID(oldWorkspaceID.(string)) + if err == nil { + if workspace.WorkspaceName == newWorkspaceName.(string) { + forceNew = false + } + } + } + + if forceNew { + d.ForceNew("workspace_id") + } + } + + if d.HasChange("workspace_name") { + forceNew := true + oldWorkspaceName, newWorkspaceName := d.GetChange("workspace_name") + _, newWorkspaceID := d.GetChange("workspace_id") + + // If the workspcae name has been removed, only do a force new if the new workspace ID + // and the old workspace name points to different workspaces + if oldWorkspaceName.(string) != "" && newWorkspaceID.(string) != "" && newWorkspaceName.(string) == "" { + workspace, err := parse.LogAnalyticsWorkspaceID(newWorkspaceID.(string)) + if err == nil { + if workspace.WorkspaceName == oldWorkspaceName.(string) { + forceNew = false + } + } + } + + if forceNew { + d.ForceNew("workspace_name") + } + } + + // TODO: Remove in 3.0 + if d.HasChange("resource_id") { + if resourceID := d.Get("resource_id").(string); resourceID != "" { + if _, err := validateAuto.AutomationAccountID(resourceID, "resource_id"); err != nil { + return fmt.Errorf("'resource_id' must be an Automation Account resource ID, got %q", resourceID) + } + } + } + + if d.HasChange("read_access_id") { + if readAccessID := d.Get("read_access_id").(string); readAccessID != "" { + if _, err := validateAuto.AutomationAccountID(readAccessID, "read_acces_id"); err != nil { + return fmt.Errorf("'read_access_id' must be an Automation Account resource ID, got %q", readAccessID) + } + } + } + + if d.HasChange("write_access_id") { + if writeAccessID := d.Get("write_access_id").(string); writeAccessID != "" { + if _, err := validate.LogAnalyticsClusterID(writeAccessID, "write_access_id"); err != nil { + return fmt.Errorf("'write_access_id' must be a Log Analytics Cluster resource ID, got %q", writeAccessID) + } + } + } + + return nil + }, } } -func resourceArmLogAnalyticsLinkedServiceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsLinkedServiceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.LinkedServicesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for AzureRM Log Analytics Linked Services creation.") - resGroup := d.Get("resource_group_name").(string) - workspaceName := d.Get("workspace_name").(string) - lsName := d.Get("linked_service_name").(string) + // TODO: Remove in 3.0 + var tmpSpace parse.LogAnalyticsWorkspaceId + var workspaceId string + + resourceGroup := d.Get("resource_group_name").(string) + readAccess := d.Get("read_access_id").(string) + writeAccess := d.Get("write_access_id").(string) + linkedServiceName := d.Get("linked_service_name").(string) + t := d.Get("tags").(map[string]interface{}) + + if resourceId := d.Get("resource_id").(string); resourceId != "" { + readAccess = resourceId + } + + if workspaceName := d.Get("workspace_name").(string); workspaceName != "" { + tmpSpace = parse.NewLogAnalyticsWorkspaceID(subscriptionId, resourceGroup, workspaceName) + workspaceId = tmpSpace.ID() + } else { + workspaceId = d.Get("workspace_id").(string) + } + + workspace, err := parse.LogAnalyticsWorkspaceID(workspaceId) + if err != nil { + return fmt.Errorf("Linked Service (Resource Group %q) unable to parse workspace id: %+v", resourceGroup, err) + } + + id := parse.NewLogAnalyticsLinkedServiceID(subscriptionId, resourceGroup, workspace.WorkspaceName, LogAnalyticsLinkedServiceType(readAccess)) + + if linkedServiceName != "" { + if !strings.EqualFold(linkedServiceName, LogAnalyticsLinkedServiceType(readAccess)) { + return fmt.Errorf("Linked Service '%s/%s' (Resource Group %q): 'linked_service_name' %q does not match expected value of %q", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup, linkedServiceName, LogAnalyticsLinkedServiceType(readAccess)) + } + } + + if strings.EqualFold(id.LinkedServiceName, "Cluster") && writeAccess == "" { + return fmt.Errorf("Linked Service '%s/%s' (Resource Group %q): A linked Log Analytics Cluster requires the 'write_access_id' attribute to be set", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup) + } + + if strings.EqualFold(id.LinkedServiceName, "Automation") && readAccess == "" { + return fmt.Errorf("Linked Service '%s/%s' (Resource Group %q): A linked Automation Account requires the 'read_access_id' attribute to be set", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup) + } if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, workspaceName, lsName) + existing, err := client.Get(ctx, resourceGroup, workspace.WorkspaceName, id.LinkedServiceName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Linked Service %q (Workspace %q / Resource Group %q): %s", lsName, workspaceName, resGroup, err) + return fmt.Errorf("checking for presence of existing Linked Service '%s/%s' (Resource Group %q): %+v", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup, err) } } @@ -99,35 +261,41 @@ func resourceArmLogAnalyticsLinkedServiceCreateUpdate(d *schema.ResourceData, me } } - resourceId := d.Get("resource_id").(string) - t := d.Get("tags").(map[string]interface{}) - parameters := operationalinsights.LinkedService{ - LinkedServiceProperties: &operationalinsights.LinkedServiceProperties{ - ResourceID: utils.String(resourceId), - }, - Tags: tags.Expand(t), + LinkedServiceProperties: &operationalinsights.LinkedServiceProperties{}, + Tags: tags.Expand(t), + } + + if id.LinkedServiceName == "Automation" { + parameters.LinkedServiceProperties.ResourceID = utils.String(readAccess) } - if _, err := client.CreateOrUpdate(ctx, resGroup, workspaceName, lsName, parameters); err != nil { - return fmt.Errorf("Error creating Linked Service %q (Workspace %q / Resource Group %q): %+v", lsName, workspaceName, resGroup, err) + if id.LinkedServiceName == "Cluster" { + parameters.LinkedServiceProperties.WriteAccessResourceID = utils.String(writeAccess) } - read, err := client.Get(ctx, resGroup, workspaceName, lsName) + future, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.WorkspaceName, id.LinkedServiceName, parameters) if err != nil { - return fmt.Errorf("Error retrieving Linked Service %q (Worksppce %q / Resource Group %q): %+v", lsName, workspaceName, resGroup, err) + return fmt.Errorf("creating Linked Service '%s/%s' (Resource Group %q): %+v", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup, err) } - if read.ID == nil { - return fmt.Errorf("Cannot read Linked Service %q (Workspace %q / Resource Group %q) ID", lsName, workspaceName, resGroup) + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating future for Linked Service '%s/%s' (Resource Group %q): %+v", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup, err) + } + + _, err = client.Get(ctx, resourceGroup, workspace.WorkspaceName, id.LinkedServiceName) + if err != nil { + return fmt.Errorf("retrieving Linked Service '%s/%s' (Resource Group %q): %+v", workspace.WorkspaceName, id.LinkedServiceName, resourceGroup, err) } - d.SetId(*read.ID) + d.SetId(id.ID()) - return resourceArmLogAnalyticsLinkedServiceRead(d, meta) + return resourceLogAnalyticsLinkedServiceRead(d, meta) } -func resourceArmLogAnalyticsLinkedServiceRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsLinkedServiceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.LinkedServicesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -136,32 +304,36 @@ func resourceArmLogAnalyticsLinkedServiceRead(d *schema.ResourceData, meta inter return err } - resGroup := id.ResourceGroup + resourceGroup := id.ResourceGroup workspaceName := id.Path["workspaces"] - lsName := id.Path["linkedservices"] + serviceType := id.Path["linkedServices"] + workspace := parse.NewLogAnalyticsWorkspaceID(subscriptionId, resourceGroup, workspaceName) - resp, err := client.Get(ctx, resGroup, workspaceName, lsName) + resp, err := client.Get(ctx, resourceGroup, workspaceName, serviceType) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on AzureRM Log Analytics Linked Service '%s': %+v", lsName, err) + return fmt.Errorf("making Read request on AzureRM Log Analytics Linked Service '%s/%s' (Resource Group %q): %+v", workspace.WorkspaceName, serviceType, resourceGroup, err) } d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) + d.Set("resource_group_name", resourceGroup) + d.Set("workspace_id", workspace.ID()) d.Set("workspace_name", workspaceName) - d.Set("linked_service_name", lsName) + d.Set("linked_service_name", serviceType) if props := resp.LinkedServiceProperties; props != nil { d.Set("resource_id", props.ResourceID) + d.Set("read_access_id", props.ResourceID) + d.Set("write_access_id", props.WriteAccessResourceID) } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmLogAnalyticsLinkedServiceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsLinkedServiceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.LinkedServicesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -171,20 +343,36 @@ func resourceArmLogAnalyticsLinkedServiceDelete(d *schema.ResourceData, meta int return err } - resGroup := id.ResourceGroup + resourceGroup := id.ResourceGroup workspaceName := id.Path["workspaces"] - lsName := id.Path["linkedservices"] + serviceType := id.Path["linkedServices"] - future, err := client.Delete(ctx, resGroup, workspaceName, lsName) + future, err := client.Delete(ctx, resourceGroup, workspaceName, serviceType) if err != nil { - return fmt.Errorf("error deleting Log Analytics Linked Service %q (Workspace %q / Resource Group %q): %+v", lsName, workspaceName, resGroup, err) + return fmt.Errorf("deleting Log Analytics Linked Service '%s/%s' (Resource Group %q): %+v", workspaceName, serviceType, resourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("waiting for deletion of Log Analytics Linked Service %q (Workspace %q / Resource Group %q): %+v", lsName, workspaceName, resGroup, err) + return fmt.Errorf("waiting for deletion of Log Analytics Linked Service '%s/%s' (Resource Group %q): %+v", workspaceName, serviceType, resourceGroup, err) } } + // (@WodansSon) - This is a bug in the service API, it returns instantly from the delete call with a 200 + // so we must wait for the state to change before we return from the delete function + deleteWait := logAnalyticsLinkedServiceDeleteWaitForState(ctx, meta, d.Timeout(schema.TimeoutDelete), resourceGroup, workspaceName, serviceType) + + if _, err := deleteWait.WaitForState(); err != nil { + return fmt.Errorf("waiting for Log Analytics Cluster to finish deleting '%s/%s' (Resource Group %q): %+v", workspaceName, serviceType, resourceGroup, err) + } + return nil } + +func LogAnalyticsLinkedServiceType(readAccessId string) string { + if readAccessId != "" { + return "Automation" + } + + return "Cluster" +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource_test.go new file mode 100644 index 000000000000..bcc30288b3c5 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_linked_service_resource_test.go @@ -0,0 +1,223 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type LogAnalyticsLinkedServiceResource struct { +} + +func TestAccLogAnalyticsLinkedService_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") + r := LogAnalyticsLinkedServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestLAW-%d/Automation", data.RandomInteger)), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsLinkedService_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") + r := LogAnalyticsLinkedServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestLAW-%d/Automation", data.RandomInteger)), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_linked_service"), + }, + }) +} + +func TestAccLogAnalyticsLinkedService_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") + r := LogAnalyticsLinkedServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +// TODO: Remove in 3.0 +func TestAccLogAnalyticsLinkedService_legacy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") + r := LogAnalyticsLinkedServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.legacy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsLinkedService_withWriteAccessResourceId(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") + r := LogAnalyticsLinkedServiceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withWriteAccessResourceId(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t LogAnalyticsLinkedServiceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + workspaceName := id.Path["workspaces"] + serviceType := id.Path["linkedServices"] + + resp, err := clients.LogAnalytics.LinkedServicesClient.Get(ctx, resourceGroup, workspaceName, serviceType) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Linked Service (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r LogAnalyticsLinkedServiceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_linked_service" "test" { + resource_group_name = azurerm_resource_group.test.name + workspace_id = azurerm_log_analytics_workspace.test.id + read_access_id = azurerm_automation_account.test.id +} +`, r.template(data)) +} + +func (r LogAnalyticsLinkedServiceResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_linked_service" "import" { + resource_group_name = azurerm_log_analytics_linked_service.test.resource_group_name + workspace_id = azurerm_log_analytics_linked_service.test.workspace_id + read_access_id = azurerm_log_analytics_linked_service.test.read_access_id +} +`, r.basic(data)) +} + +func (r LogAnalyticsLinkedServiceResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_linked_service" "test" { + resource_group_name = azurerm_resource_group.test.name + workspace_id = azurerm_log_analytics_workspace.test.id + read_access_id = azurerm_automation_account.test.id +} +`, r.template(data)) +} + +// TODO: Remove in 3.0 +func (r LogAnalyticsLinkedServiceResource) legacy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_linked_service" "test" { + resource_group_name = azurerm_resource_group.test.name + workspace_name = azurerm_log_analytics_workspace.test.name + linked_service_name = "automation" + resource_id = azurerm_automation_account.test.id +} +`, r.template(data)) +} + +func (LogAnalyticsLinkedServiceResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} + +resource "azurerm_automation_account" "test" { + name = "acctestAutomation-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "Basic" + + tags = { + Environment = "Test" + } +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r LogAnalyticsLinkedServiceResource) withWriteAccessResourceId(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_cluster" "test" { + name = "acctest-LA-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_log_analytics_linked_service" "test" { + resource_group_name = azurerm_resource_group.test.name + workspace_id = azurerm_log_analytics_workspace.test.id + write_access_id = azurerm_log_analytics_cluster.test.id +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource.go b/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource.go index ed09b642e1a4..5fb1cec85a04 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource.go @@ -6,7 +6,9 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" @@ -14,17 +16,16 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsLinkedStorageAccount() *schema.Resource { +func resourceLogAnalyticsLinkedStorageAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsLinkedStorageAccountCreateUpdate, - Read: resourceArmLogAnalyticsLinkedStorageAccountRead, - Update: resourceArmLogAnalyticsLinkedStorageAccountCreateUpdate, - Delete: resourceArmLogAnalyticsLinkedStorageAccountDelete, + Create: resourceLogAnalyticsLinkedStorageAccountCreateUpdate, + Read: resourceLogAnalyticsLinkedStorageAccountRead, + Update: resourceLogAnalyticsLinkedStorageAccountCreateUpdate, + Delete: resourceLogAnalyticsLinkedStorageAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -47,8 +48,9 @@ func resourceArmLogAnalyticsLinkedStorageAccount() *schema.Resource { strings.ToLower(string(operationalinsights.CustomLogs)), strings.ToLower(string(operationalinsights.AzureWatson)), strings.ToLower(string(operationalinsights.Query)), - strings.ToLower(string(operationalinsights.Ingestion)), strings.ToLower(string(operationalinsights.Alerts)), + // Value removed from enum in 2020-08-01, but effectively still works + "Ingestion", }, false), }, @@ -58,9 +60,7 @@ func resourceArmLogAnalyticsLinkedStorageAccount() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - // https://github.com/Azure/azure-rest-api-specs/issues/9633 - DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validate.LogAnalyticsWorkspaceID, }, "storage_account_ids": { @@ -76,7 +76,7 @@ func resourceArmLogAnalyticsLinkedStorageAccount() *schema.Resource { } } -func resourceArmLogAnalyticsLinkedStorageAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsLinkedStorageAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.LinkedStorageAccountClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -89,10 +89,10 @@ func resourceArmLogAnalyticsLinkedStorageAccountCreateUpdate(d *schema.ResourceD } if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, workspace.Name, dataSourceType) + existing, err := client.Get(ctx, resourceGroup, workspace.WorkspaceName, dataSourceType) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for present of existing Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", dataSourceType, resourceGroup, workspace.Name, err) + return fmt.Errorf("checking for present of existing Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", dataSourceType, resourceGroup, workspace.WorkspaceName, err) } } if existing.ID != nil && *existing.ID != "" { @@ -105,24 +105,24 @@ func resourceArmLogAnalyticsLinkedStorageAccountCreateUpdate(d *schema.ResourceD StorageAccountIds: utils.ExpandStringSlice(d.Get("storage_account_ids").(*schema.Set).List()), }, } - if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, dataSourceType, parameters); err != nil { - return fmt.Errorf("creating/updating Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", dataSourceType, resourceGroup, workspace.Name, err) + if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.WorkspaceName, dataSourceType, parameters); err != nil { + return fmt.Errorf("creating/updating Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", dataSourceType, resourceGroup, workspace.WorkspaceName, err) } - resp, err := client.Get(ctx, resourceGroup, workspace.Name, dataSourceType) + resp, err := client.Get(ctx, resourceGroup, workspace.WorkspaceName, dataSourceType) if err != nil { - return fmt.Errorf("retrieving Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", dataSourceType, resourceGroup, workspace.Name, err) + return fmt.Errorf("retrieving Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", dataSourceType, resourceGroup, workspace.WorkspaceName, err) } if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("empty or nil ID returned for Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q) ID", dataSourceType, resourceGroup, workspace.Name) + return fmt.Errorf("empty or nil ID returned for Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q) ID", dataSourceType, resourceGroup, workspace.WorkspaceName) } d.SetId(*resp.ID) - return resourceArmLogAnalyticsLinkedStorageAccountRead(d, meta) + return resourceLogAnalyticsLinkedStorageAccountRead(d, meta) } -func resourceArmLogAnalyticsLinkedStorageAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsLinkedStorageAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.LinkedStorageAccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -132,7 +132,7 @@ func resourceArmLogAnalyticsLinkedStorageAccountRead(d *schema.ResourceData, met return err } - dataSourceType := operationalinsights.DataSourceType(id.Name) + dataSourceType := operationalinsights.DataSourceType(id.LinkedStorageAccountName) resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, dataSourceType) if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -140,12 +140,12 @@ func resourceArmLogAnalyticsLinkedStorageAccountRead(d *schema.ResourceData, met d.SetId("") return nil } - return fmt.Errorf("retrieving Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err) + return fmt.Errorf("retrieving Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", id.LinkedStorageAccountName, id.ResourceGroup, id.WorkspaceName, err) } - d.Set("data_source_type", *resp.Name) + d.Set("data_source_type", resp.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("workspace_resource_id", id.WorkspaceID) + d.Set("workspace_resource_id", parse.NewLogAnalyticsWorkspaceID(id.SubscriptionId, id.ResourceGroup, id.WorkspaceName).ID()) if props := resp.LinkedStorageAccountsProperties; props != nil { d.Set("storage_account_ids", utils.FlattenStringSlice(props.StorageAccountIds)) } @@ -153,7 +153,7 @@ func resourceArmLogAnalyticsLinkedStorageAccountRead(d *schema.ResourceData, met return nil } -func resourceArmLogAnalyticsLinkedStorageAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsLinkedStorageAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.LinkedStorageAccountClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -163,9 +163,9 @@ func resourceArmLogAnalyticsLinkedStorageAccountDelete(d *schema.ResourceData, m return err } - dataSourceType := operationalinsights.DataSourceType(id.Name) + dataSourceType := operationalinsights.DataSourceType(id.LinkedStorageAccountName) if _, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, dataSourceType); err != nil { - return fmt.Errorf("deleting Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err) + return fmt.Errorf("deleting Log Analytics Linked Storage Account %q (Resource Group %q / workspaceName %q): %+v", id.LinkedStorageAccountName, id.ResourceGroup, id.WorkspaceName, err) } return nil } diff --git a/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource_test.go new file mode 100644 index 000000000000..c8639e9c11f6 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_linked_storage_account_resource_test.go @@ -0,0 +1,179 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LogAnalyticsLinkedStorageAccountResource struct { +} + +func TestAcclogAnalyticsLinkedStorageAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") + r := LogAnalyticsLinkedStorageAccountResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAcclogAnalyticsLinkedStorageAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") + r := LogAnalyticsLinkedStorageAccountResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAcclogAnalyticsLinkedStorageAccount_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") + r := LogAnalyticsLinkedStorageAccountResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAcclogAnalyticsLinkedStorageAccount_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") + r := LogAnalyticsLinkedStorageAccountResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t LogAnalyticsLinkedStorageAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsLinkedStorageAccountID(state.ID) + if err != nil { + return nil, err + } + dataSourceType := operationalinsights.DataSourceType(id.LinkedStorageAccountName) + + resp, err := clients.LogAnalytics.LinkedStorageAccountClient.Get(ctx, id.ResourceGroup, id.WorkspaceName, dataSourceType) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Linked Service Storage Account (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsLinkedStorageAccountResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsap%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) +} + +func (r LogAnalyticsLinkedStorageAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_linked_storage_account" "test" { + data_source_type = "customlogs" + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + storage_account_ids = [azurerm_storage_account.test.id] +} +`, r.template(data)) +} + +func (r LogAnalyticsLinkedStorageAccountResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_linked_storage_account" "import" { + data_source_type = azurerm_log_analytics_linked_storage_account.test.data_source_type + resource_group_name = azurerm_log_analytics_linked_storage_account.test.resource_group_name + workspace_resource_id = azurerm_log_analytics_linked_storage_account.test.workspace_resource_id + storage_account_ids = [azurerm_storage_account.test.id] +} +`, r.basic(data)) +} + +func (r LogAnalyticsLinkedStorageAccountResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "test2" { + name = "acctestsas%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_log_analytics_linked_storage_account" "test" { + data_source_type = "customlogs" + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + storage_account_ids = [azurerm_storage_account.test.id, azurerm_storage_account.test2.id] +} +`, r.template(data), data.RandomString) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource.go b/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource.go index e9c48842f6fa..32772aa42070 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" @@ -15,15 +15,16 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsSavedSearch() *schema.Resource { +func resourceLogAnalyticsSavedSearch() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsSavedSearchCreate, - Read: resourceArmLogAnalyticsSavedSearchRead, - Delete: resourceArmLogAnalyticsSavedSearchDelete, + Create: resourceLogAnalyticsSavedSearchCreate, + Read: resourceLogAnalyticsSavedSearchRead, + Delete: resourceLogAnalyticsSavedSearchDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -94,11 +95,13 @@ func resourceArmLogAnalyticsSavedSearch() *schema.Resource { ), }, }, + + "tags": tags.ForceNewSchema(), }, } } -func resourceArmLogAnalyticsSavedSearchCreate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsSavedSearchCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.SavedSearchesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -112,10 +115,10 @@ func resourceArmLogAnalyticsSavedSearchCreate(d *schema.ResourceData, meta inter } if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + existing, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Log Analytics Saved Search %q (WorkSpace %q / Resource Group %q): %s", name, id.Name, id.ResourceGroup, err) + return fmt.Errorf("checking for presence of existing Log Analytics Saved Search %q (WorkSpace %q / Resource Group %q): %s", name, id.WorkspaceName, id.ResourceGroup, err) } } @@ -130,6 +133,7 @@ func resourceArmLogAnalyticsSavedSearchCreate(d *schema.ResourceData, meta inter DisplayName: utils.String(d.Get("display_name").(string)), Query: utils.String(d.Get("query").(string)), FunctionAlias: utils.String(d.Get("function_alias").(string)), + Tags: expandSavedSearchTag(d.Get("tags").(map[string]interface{})), // expand tags because it's defined as object set in service }, } @@ -144,44 +148,46 @@ func resourceArmLogAnalyticsSavedSearchCreate(d *schema.ResourceData, meta inter parameters.SavedSearchProperties.FunctionParameters = utils.String(strings.Join(result, ", ")) } - if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, parameters); err != nil { - return fmt.Errorf("creating Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %+v", name, id.Name, id.ResourceGroup, err) + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.WorkspaceName, name, parameters); err != nil { + return fmt.Errorf("creating Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %+v", name, id.WorkspaceName, id.ResourceGroup, err) } - read, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + read, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, name) if err != nil { - return fmt.Errorf("retrieving Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %+v", name, id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %+v", name, id.WorkspaceName, id.ResourceGroup, err) } if read.ID == nil { - return fmt.Errorf("cannot read Log Analytics Saved Search %q (WorkSpace %q / Resource Group %q): %s", name, id.Name, id.ResourceGroup, err) + return fmt.Errorf("cannot read Log Analytics Saved Search %q (WorkSpace %q / Resource Group %q): %s", name, id.WorkspaceName, id.ResourceGroup, err) } d.SetId(*read.ID) - return resourceArmLogAnalyticsSavedSearchRead(d, meta) + return resourceLogAnalyticsSavedSearchRead(d, meta) } -func resourceArmLogAnalyticsSavedSearchRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsSavedSearchRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.SavedSearchesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.LogAnalyticsSavedSearchID(d.Id()) + // FIXME: @favoretti: API returns ID without a leading slash + id, err := parse.LogAnalyticsSavedSearchID(fmt.Sprintf("/%s", strings.TrimPrefix(d.Id(), "/"))) if err != nil { return err } - workspaceId := parse.NewLogAnalyticsWorkspaceID(id.WorkspaceName, id.ResourceGroup).ID(meta.(*clients.Client).Account.SubscriptionId) + workspaceId := parse.NewLogAnalyticsWorkspaceID(subscriptionId, id.ResourceGroup, id.WorkspaceName).ID() - resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.SavedSearcheName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("retrieving Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %s", id.Name, id.WorkspaceName, id.ResourceGroup, err) + return fmt.Errorf("retrieving Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %s", id.WorkspaceName, id.WorkspaceName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.SavedSearcheName) d.Set("log_analytics_workspace_id", workspaceId) if props := resp.SavedSearchProperties; props != nil { @@ -194,23 +200,61 @@ func resourceArmLogAnalyticsSavedSearchRead(d *schema.ResourceData, meta interfa functionParams = strings.Split(*props.FunctionParameters, ", ") } d.Set("function_parameters", functionParams) + + // flatten tags because it's defined as object set in service + if err := d.Set("tags", flattenSavedSearchTag(props.Tags)); err != nil { + return fmt.Errorf("setting `tag`: %+v", err) + } } return nil } -func resourceArmLogAnalyticsSavedSearchDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsSavedSearchDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.SavedSearchesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.LogAnalyticsSavedSearchID(d.Id()) + // FIXME: @favoretti: API returns ID without a leading slash + id, err := parse.LogAnalyticsSavedSearchID(fmt.Sprintf("/%s", strings.TrimPrefix(d.Id(), "/"))) if err != nil { return err } - if _, err = client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil { - return fmt.Errorf("deleting Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %s", id.Name, id.WorkspaceName, id.ResourceGroup, err) + if _, err = client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.SavedSearcheName); err != nil { + return fmt.Errorf("deleting Saved Search %q (Log Analytics Workspace %q / Resource Group %q): %s", id.WorkspaceName, id.WorkspaceName, id.ResourceGroup, err) } return nil } + +func expandSavedSearchTag(input map[string]interface{}) *[]operationalinsights.Tag { + results := make([]operationalinsights.Tag, 0) + for key, value := range input { + result := operationalinsights.Tag{ + Name: utils.String(key), + Value: utils.String(value.(string)), + } + results = append(results, result) + } + return &results +} + +func flattenSavedSearchTag(input *[]operationalinsights.Tag) map[string]interface{} { + results := make(map[string]interface{}) + if input == nil { + return results + } + + for _, item := range *input { + var key string + if item.Name != nil { + key = *item.Name + } + var value string + if item.Value != nil { + value = *item.Value + } + results[key] = value + } + return results +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource_test.go new file mode 100644 index 000000000000..c6dff3a3059c --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_saved_search_resource_test.go @@ -0,0 +1,206 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +type LogAnalyticsSavedSearchResource struct { +} + +func TestAccLogAnalyticsSavedSearch_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") + r := LogAnalyticsSavedSearchResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsSavedSearch_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") + r := LogAnalyticsSavedSearchResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsSavedSearch_withTag(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") + r := LogAnalyticsSavedSearchResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTag(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsSavedSearch_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") + r := LogAnalyticsSavedSearchResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_saved_search"), + }, + }) +} + +func (t LogAnalyticsSavedSearchResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsSavedSearchID(fmt.Sprintf("/%s", strings.TrimPrefix(state.ID, "/"))) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.SavedSearchesClient.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.SavedSearcheName) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Linked Service Saved Search (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsSavedSearchResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_log_analytics_saved_search" "test" { + name = "acctestLASS-%d" + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.id + + category = "Saved Search Test Category" + display_name = "Create or Update Saved Search Test" + query = "Heartbeat | summarize Count() by Computer | take a" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r LogAnalyticsSavedSearchResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_saved_search" "import" { + name = azurerm_log_analytics_saved_search.test.name + log_analytics_workspace_id = azurerm_log_analytics_saved_search.test.log_analytics_workspace_id + + category = azurerm_log_analytics_saved_search.test.category + display_name = azurerm_log_analytics_saved_search.test.display_name + query = azurerm_log_analytics_saved_search.test.query +} +`, r.basic(data)) +} + +func (LogAnalyticsSavedSearchResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_log_analytics_saved_search" "test" { + name = "acctestLASS-%d" + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.id + + category = "Saved Search Test Category" + display_name = "Create or Update Saved Search Test" + query = "Heartbeat | summarize Count() by Computer | take a" + + function_alias = "heartbeat_func" + function_parameters = ["a:int=1"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (LogAnalyticsSavedSearchResource) withTag(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_log_analytics_saved_search" "test" { + name = "acctestLASS-%d" + log_analytics_workspace_id = azurerm_log_analytics_workspace.test.id + + category = "Saved Search Test Category" + display_name = "Create or Update Saved Search Test" + query = "Heartbeat | summarize Count() by Computer | take a" + + tags = { + "Environment" = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_solution_resource.go b/azurerm/internal/services/loganalytics/log_analytics_solution_resource.go index 279dd6d26ccf..fc35e90a88d4 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_solution_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_solution_resource.go @@ -6,6 +6,9 @@ import ( "strings" "time" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/Azure/azure-sdk-for-go/services/preview/operationsmanagement/mgmt/2015-11-01-preview/operationsmanagement" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" @@ -18,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsSolution() *schema.Resource { +func resourceLogAnalyticsSolution() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsSolutionCreateUpdate, - Read: resourceArmLogAnalyticsSolutionRead, - Update: resourceArmLogAnalyticsSolutionCreateUpdate, - Delete: resourceArmLogAnalyticsSolutionDelete, + Create: resourceLogAnalyticsSolutionCreateUpdate, + Read: resourceLogAnalyticsSolutionRead, + Update: resourceLogAnalyticsSolutionCreateUpdate, + Delete: resourceLogAnalyticsSolutionDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -47,7 +50,7 @@ func resourceArmLogAnalyticsSolution() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureRmLogAnalyticsWorkspaceName, + ValidateFunc: validate.LogAnalyticsWorkspaceName, }, "workspace_resource_id": { @@ -89,11 +92,13 @@ func resourceArmLogAnalyticsSolution() *schema.Resource { }, }, }, + + "tags": tags.Schema(), }, } } -func resourceArmLogAnalyticsSolutionCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsSolutionCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.SolutionsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -130,6 +135,7 @@ func resourceArmLogAnalyticsSolutionCreateUpdate(d *schema.ResourceData, meta in Properties: &operationsmanagement.SolutionProperties{ WorkspaceResourceID: utils.String(workspaceID), }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } future, err := client.CreateOrUpdate(ctx, resGroup, name, parameters) @@ -152,10 +158,10 @@ func resourceArmLogAnalyticsSolutionCreateUpdate(d *schema.ResourceData, meta in d.SetId(*solution.ID) - return resourceArmLogAnalyticsSolutionRead(d, meta) + return resourceLogAnalyticsSolutionRead(d, meta) } -func resourceArmLogAnalyticsSolutionRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsSolutionRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.SolutionsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -207,10 +213,10 @@ func resourceArmLogAnalyticsSolutionRead(d *schema.ResourceData, meta interface{ return fmt.Errorf("Error setting `plan`: %+v", err) } - return nil + return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmLogAnalyticsSolutionDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsSolutionDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.SolutionsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/loganalytics/log_analytics_solution_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_solution_resource_test.go new file mode 100644 index 000000000000..8b3f78ee6de0 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_solution_resource_test.go @@ -0,0 +1,173 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type LogAnalyticsSolutionResource struct { +} + +func TestAccLogAnalyticsSolution_basicContainerMonitoring(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_solution", "test") + r := LogAnalyticsSolutionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.containerMonitoring(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsSolution_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_solution", "test") + r := LogAnalyticsSolutionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.containerMonitoring(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_solution"), + }, + }) +} + +func TestAccLogAnalyticsSolution_basicSecurity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_solution", "test") + r := LogAnalyticsSolutionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.security(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t LogAnalyticsSolutionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resGroup := id.ResourceGroup + name := id.Path["solutions"] + + resp, err := clients.LogAnalytics.SolutionsClient.Get(ctx, resGroup, name) + if err != nil { + return nil, fmt.Errorf("reading Log Analytics Solution (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsSolutionResource) containerMonitoring(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_log_analytics_solution" "test" { + solution_name = "ContainerInsights" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + workspace_name = azurerm_log_analytics_workspace.test.name + + plan { + publisher = "Microsoft" + product = "OMSGallery/ContainerInsights" + } + + tags = { + Environment = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r LogAnalyticsSolutionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_solution" "import" { + solution_name = azurerm_log_analytics_solution.test.solution_name + location = azurerm_log_analytics_solution.test.location + resource_group_name = azurerm_log_analytics_solution.test.resource_group_name + workspace_resource_id = azurerm_log_analytics_solution.test.workspace_resource_id + workspace_name = azurerm_log_analytics_solution.test.workspace_name + + plan { + publisher = "Microsoft" + product = "OMSGallery/ContainerInsights" + } +} +`, r.containerMonitoring(data)) +} + +func (LogAnalyticsSolutionResource) security(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" +} + +resource "azurerm_log_analytics_solution" "test" { + solution_name = "Security" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + workspace_resource_id = azurerm_log_analytics_workspace.test.id + workspace_name = azurerm_log_analytics_workspace.test.name + + plan { + publisher = "Microsoft" + product = "OMSGallery/Security" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go new file mode 100644 index 000000000000..006e135158f3 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go @@ -0,0 +1,18 @@ +package loganalytics + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func logAnalyticsStorageInsightsImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + if _, err := parse.LogAnalyticsStorageInsightsID(d.Id()); err != nil { + return []*schema.ResourceData{d}, err + } + + if v, ok := d.GetOk("storage_account_key"); ok && v.(string) != "" { + d.Set("storage_account_key", v) + } + + return []*schema.ResourceData{d}, nil +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go new file mode 100644 index 000000000000..4333a7ff9b98 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go @@ -0,0 +1,207 @@ +package loganalytics + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceLogAnalyticsStorageInsights() *schema.Resource { + return &schema.Resource{ + Create: resourceLogAnalyticsStorageInsightsCreateUpdate, + Read: resourceLogAnalyticsStorageInsightsRead, + Update: resourceLogAnalyticsStorageInsightsCreateUpdate, + Delete: resourceLogAnalyticsStorageInsightsDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: &schema.ResourceImporter{ + State: logAnalyticsStorageInsightsImporter, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LogAnalyticsStorageInsightsName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "workspace_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LogAnalyticsWorkspaceID, + }, + + "storage_account_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: storageValidate.StorageAccountID, + }, + + "storage_account_key": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validation.All( + validation.StringIsNotEmpty, + validate.IsBase64Encoded, + ), + }, + + "blob_container_names": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.NoZeroValues, + }, + }, + + "table_names": { + Type: schema.TypeSet, + Optional: true, + MinItems: 1, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.NoZeroValues, + }, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceLogAnalyticsStorageInsightsCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + storageAccountId := d.Get("storage_account_id").(string) + storageAccountKey := d.Get("storage_account_key").(string) + + workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_id").(string)) + if err != nil { + return err + } + id := parse.NewLogAnalyticsStorageInsightsID(subscriptionId, resourceGroup, workspace.WorkspaceName, name) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, id.WorkspaceName, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, id.WorkspaceName, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_log_analytics_storage_insights", id.ID()) + } + } + + parameters := operationalinsights.StorageInsight{ + StorageInsightProperties: &operationalinsights.StorageInsightProperties{ + StorageAccount: expandStorageInsightConfigStorageAccount(storageAccountId, storageAccountKey), + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + if _, ok := d.GetOk("table_names"); ok { + parameters.StorageInsightProperties.Tables = utils.ExpandStringSlice(d.Get("table_names").(*schema.Set).List()) + } + + if _, ok := d.GetOk("blob_container_names"); ok { + parameters.StorageInsightProperties.Containers = utils.ExpandStringSlice(d.Get("blob_container_names").(*schema.Set).List()) + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, id.WorkspaceName, name, parameters); err != nil { + return fmt.Errorf("creating/updating Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, id.WorkspaceName, err) + } + + d.SetId(id.ID()) + return resourceLogAnalyticsStorageInsightsRead(d, meta) +} + +func resourceLogAnalyticsStorageInsightsRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LogAnalyticsStorageInsightsID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.StorageInsightConfigName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Log Analytics Storage Insights %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", id.StorageInsightConfigName, id.ResourceGroup, id.WorkspaceName, err) + } + + d.Set("name", id.StorageInsightConfigName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("workspace_id", parse.NewLogAnalyticsWorkspaceID(id.SubscriptionId, id.ResourceGroup, id.WorkspaceName).ID()) + + if props := resp.StorageInsightProperties; props != nil { + d.Set("blob_container_names", utils.FlattenStringSlice(props.Containers)) + storageAccountId := "" + if props.StorageAccount != nil && props.StorageAccount.ID != nil { + storageAccountId = *props.StorageAccount.ID + } + d.Set("storage_account_id", storageAccountId) + d.Set("table_names", utils.FlattenStringSlice(props.Tables)) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceLogAnalyticsStorageInsightsDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LogAnalyticsStorageInsightsID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.StorageInsightConfigName); err != nil { + return fmt.Errorf("deleting LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.StorageInsightConfigName, id.ResourceGroup, id.WorkspaceName, err) + } + return nil +} + +func expandStorageInsightConfigStorageAccount(id string, key string) *operationalinsights.StorageAccount { + return &operationalinsights.StorageAccount{ + ID: utils.String(id), + Key: utils.String(key), + } +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource_test.go new file mode 100644 index 000000000000..cfc0d0181d1e --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource_test.go @@ -0,0 +1,228 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LogAnalyticsStorageInsightsResource struct { +} + +func TestAccLogAnalyticsStorageInsights_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test") + r := LogAnalyticsStorageInsightsResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), // key is not returned by the API + }) +} + +func TestAccLogAnalyticsStorageInsights_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test") + r := LogAnalyticsStorageInsightsResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccLogAnalyticsStorageInsights_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test") + r := LogAnalyticsStorageInsightsResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), // key is not returned by the API + }) +} + +func TestAccLogAnalyticsStorageInsights_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test") + r := LogAnalyticsStorageInsightsResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), // key is not returned by the API + }) +} + +func TestAccLogAnalyticsStorageInsights_updateStorageAccount(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test") + r := LogAnalyticsStorageInsightsResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), + { + Config: r.updateStorageAccount(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_key"), // key is not returned by the API + }) +} + +func (t LogAnalyticsStorageInsightsResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsStorageInsightsID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.StorageInsightsClient.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.StorageInsightConfigName) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Storage Insights (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsStorageInsightsResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-la-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 +} + +resource "azurerm_storage_account" "test" { + name = "acctestsads%s" + resource_group_name = azurerm_resource_group.test.name + + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) +} + +func (r LogAnalyticsStorageInsightsResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_storage_insights" "test" { + name = "acctest-la-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_id = azurerm_log_analytics_workspace.test.id + + storage_account_id = azurerm_storage_account.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsStorageInsightsResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_storage_insights" "import" { + name = azurerm_log_analytics_storage_insights.test.name + resource_group_name = azurerm_log_analytics_storage_insights.test.resource_group_name + workspace_id = azurerm_log_analytics_storage_insights.test.workspace_id + + storage_account_id = azurerm_storage_account.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key +} +`, r.basic(data)) +} + +func (r LogAnalyticsStorageInsightsResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_storage_insights" "test" { + name = "acctest-LA-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_id = azurerm_log_analytics_workspace.test.id + + blob_container_names = ["wad-iis-logfiles"] + table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"] + + storage_account_id = azurerm_storage_account.test.id + storage_account_key = azurerm_storage_account.test.primary_access_key +} +`, r.template(data), data.RandomInteger) +} + +func (r LogAnalyticsStorageInsightsResource) updateStorageAccount(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "test2" { + name = "acctestsads%s" + resource_group_name = azurerm_resource_group.test.name + + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_log_analytics_storage_insights" "test" { + name = "acctest-la-%d" + resource_group_name = azurerm_resource_group.test.name + workspace_id = azurerm_log_analytics_workspace.test.id + + blob_container_names = ["wad-iis-logfiles"] + table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"] + + storage_account_id = azurerm_storage_account.test2.id + storage_account_key = azurerm_storage_account.test2.primary_access_key +} +`, r.template(data), data.RandomStringOfLength(6), data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_workspace_data_source_test.go b/azurerm/internal/services/loganalytics/log_analytics_workspace_data_source_test.go new file mode 100644 index 000000000000..4fb453bbda91 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_workspace_data_source_test.go @@ -0,0 +1,67 @@ +package loganalytics_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type LogAnalyticsWorkspaceDataSource struct { +} + +func TestAccDataSourceLogAnalyticsWorkspace_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicWithDataSource(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("pergb2018"), + check.That(data.ResourceName).Key("retention_in_days").HasValue("30"), + check.That(data.ResourceName).Key("daily_quota_gb").HasValue("-1"), + ), + }, + }) +} + +func TestAccDataSourceLogAnalyticsWorkspace_volumeCapWithDataSource(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.volumeCapWithDataSource(data, 4.5), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku").HasValue("pergb2018"), + check.That(data.ResourceName).Key("retention_in_days").HasValue("30"), + check.That(data.ResourceName).Key("daily_quota_gb").HasValue("4.5"), + ), + }, + }) +} + +func (LogAnalyticsWorkspaceDataSource) basicWithDataSource(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_log_analytics_workspace" "test" { + name = azurerm_log_analytics_workspace.test.name + resource_group_name = azurerm_log_analytics_workspace.test.resource_group_name +} +`, LogAnalyticsWorkspaceResource{}.complete(data)) +} + +func (LogAnalyticsWorkspaceDataSource) volumeCapWithDataSource(data acceptance.TestData, volumeCapGb float64) string { + return fmt.Sprintf(` +%s + +data "azurerm_log_analytics_workspace" "test" { + name = azurerm_log_analytics_workspace.test.name + resource_group_name = azurerm_log_analytics_workspace.test.resource_group_name +} +`, LogAnalyticsWorkspaceResource{}.withVolumeCap(data, volumeCapGb)) +} diff --git a/azurerm/internal/services/loganalytics/log_analytics_workspace_resource.go b/azurerm/internal/services/loganalytics/log_analytics_workspace_resource.go index f314dea4bea1..83314cdc6dfb 100644 --- a/azurerm/internal/services/loganalytics/log_analytics_workspace_resource.go +++ b/azurerm/internal/services/loganalytics/log_analytics_workspace_resource.go @@ -3,10 +3,10 @@ package loganalytics import ( "fmt" "log" - "regexp" + "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" + "github.com/Azure/azure-sdk-for-go/services/operationalinsights/mgmt/2020-08-01/operationalinsights" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -14,20 +14,31 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/migration" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmLogAnalyticsWorkspace() *schema.Resource { +func resourceLogAnalyticsWorkspace() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogAnalyticsWorkspaceCreateUpdate, - Read: resourceArmLogAnalyticsWorkspaceRead, - Update: resourceArmLogAnalyticsWorkspaceCreateUpdate, - Delete: resourceArmLogAnalyticsWorkspaceDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, + Create: resourceLogAnalyticsWorkspaceCreateUpdate, + Read: resourceLogAnalyticsWorkspaceRead, + Update: resourceLogAnalyticsWorkspaceCreateUpdate, + Delete: resourceLogAnalyticsWorkspaceDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.LogAnalyticsWorkspaceID(id) + return err + }), + + SchemaVersion: 2, + StateUpgraders: []schema.StateUpgrader{ + migration.WorkspaceV0ToV1(), + migration.WorkspaceV1ToV2(), }, Timeouts: &schema.ResourceTimeout{ @@ -42,13 +53,25 @@ func resourceArmLogAnalyticsWorkspace() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateAzureRmLogAnalyticsWorkspaceName, + ValidateFunc: validate.LogAnalyticsWorkspaceName, }, "location": azure.SchemaLocation(), "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + "internet_ingestion_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + + "internet_query_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + "sku": { Type: schema.TypeString, Optional: true, @@ -63,7 +86,7 @@ func resourceArmLogAnalyticsWorkspace() *schema.Resource { string(operationalinsights.WorkspaceSkuNameEnumStandard), "Unlimited", // TODO check if this is actually no longer valid, removed in v28.0.0 of the SDK }, true), - DiffSuppressFunc: suppress.CaseDifference, + DiffSuppressFunc: logAnalyticsLinkedServiceSkuChangeCaseDifference, }, "retention_in_days": { @@ -74,10 +97,11 @@ func resourceArmLogAnalyticsWorkspace() *schema.Resource { }, "daily_quota_gb": { - Type: schema.TypeFloat, - Optional: true, - Default: -1.0, - ValidateFunc: validation.Any(validation.FloatBetween(-1, -1), validation.FloatAtLeast(0)), + Type: schema.TypeFloat, + Optional: true, + Default: -1.0, + DiffSuppressFunc: dailyQuotaGbDiffSuppressFunc, + ValidateFunc: validation.FloatAtLeast(0), }, "workspace_id": { @@ -108,20 +132,22 @@ func resourceArmLogAnalyticsWorkspace() *schema.Resource { } } -func resourceArmLogAnalyticsWorkspaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsWorkspaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.WorkspacesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for AzureRM Log Analytics Workspace creation.") name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewLogAnalyticsWorkspaceID(subscriptionId, resourceGroup, name) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, name) + existing, err := client.Get(ctx, resourceGroup, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Log Analytics Workspace %q (Resource Group %q): %s", name, resGroup, err) + return fmt.Errorf("Error checking for presence of existing Log Analytics Workspace %q (Resource Group %q): %s", name, resourceGroup, err) } } @@ -136,8 +162,30 @@ func resourceArmLogAnalyticsWorkspaceCreateUpdate(d *schema.ResourceData, meta i Name: operationalinsights.WorkspaceSkuNameEnum(skuName), } + // (@WodansSon) - If the workspace is connected to a cluster via the linked service resource + // the workspace cannot be modified since the linked service changes the sku value within + // the workspace + if !d.IsNewResource() { + resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName) + if err == nil { + if azSku := resp.Sku; azSku != nil { + if strings.EqualFold(string(azSku.Name), "lacluster") { + return fmt.Errorf("Log Analytics Workspace %q (Resource Group %q): cannot be modified while it is connected to a Log Analytics cluster", name, resourceGroup) + } + } + } + } + + internetIngestionEnabled := operationalinsights.Disabled + if d.Get("internet_ingestion_enabled").(bool) { + internetIngestionEnabled = operationalinsights.Enabled + } + internetQueryEnabled := operationalinsights.Disabled + if d.Get("internet_query_enabled").(bool) { + internetQueryEnabled = operationalinsights.Enabled + } + retentionInDays := int32(d.Get("retention_in_days").(int)) - dailyQuotaGb := d.Get("daily_quota_gb").(float64) t := d.Get("tags").(map[string]interface{}) @@ -146,38 +194,37 @@ func resourceArmLogAnalyticsWorkspaceCreateUpdate(d *schema.ResourceData, meta i Location: &location, Tags: tags.Expand(t), WorkspaceProperties: &operationalinsights.WorkspaceProperties{ - Sku: sku, - RetentionInDays: &retentionInDays, - WorkspaceCapping: &operationalinsights.WorkspaceCapping{ - DailyQuotaGb: &dailyQuotaGb, - }, + Sku: sku, + PublicNetworkAccessForIngestion: internetIngestionEnabled, + PublicNetworkAccessForQuery: internetQueryEnabled, + RetentionInDays: &retentionInDays, }, } - future, err := client.CreateOrUpdate(ctx, resGroup, name, parameters) - if err != nil { - return err - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return err + dailyQuotaGb, ok := d.GetOk("daily_quota_gb") + if ok && strings.EqualFold(skuName, string(operationalinsights.WorkspaceSkuNameEnumFree)) { + return fmt.Errorf("`Free` tier SKU quota is not configurable and is hard set to 0.5GB") + } else if !strings.EqualFold(skuName, string(operationalinsights.WorkspaceSkuNameEnumFree)) { + parameters.WorkspaceProperties.WorkspaceCapping = &operationalinsights.WorkspaceCapping{ + DailyQuotaGb: utils.Float(dailyQuotaGb.(float64)), + } } - read, err := client.Get(ctx, resGroup, name) + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) if err != nil { return err } - if read.ID == nil { - return fmt.Errorf("Cannot read Log Analytics Workspace '%s' (resource group %s) ID", name, resGroup) + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return err } - d.SetId(*read.ID) + d.SetId(id.ID()) - return resourceArmLogAnalyticsWorkspaceRead(d, meta) + return resourceLogAnalyticsWorkspaceRead(d, meta) } -func resourceArmLogAnalyticsWorkspaceRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsWorkspaceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.WorkspacesClient sharedKeysClient := meta.(*clients.Client).LogAnalytics.SharedKeysClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -187,13 +234,13 @@ func resourceArmLogAnalyticsWorkspaceRead(d *schema.ResourceData, meta interface return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on AzureRM Log Analytics workspaces '%s': %+v", id.Name, err) + return fmt.Errorf("Error making Read request on AzureRM Log Analytics workspaces '%s': %+v", id.WorkspaceName, err) } d.Set("name", resp.Name) @@ -202,21 +249,27 @@ func resourceArmLogAnalyticsWorkspaceRead(d *schema.ResourceData, meta interface d.Set("location", azure.NormalizeLocation(*location)) } + d.Set("internet_ingestion_enabled", resp.PublicNetworkAccessForIngestion == operationalinsights.Enabled) + d.Set("internet_query_enabled", resp.PublicNetworkAccessForQuery == operationalinsights.Enabled) + d.Set("workspace_id", resp.CustomerID) d.Set("portal_url", "") if sku := resp.Sku; sku != nil { d.Set("sku", sku.Name) } d.Set("retention_in_days", resp.RetentionInDays) - if workspaceCapping := resp.WorkspaceCapping; workspaceCapping != nil { + if resp.WorkspaceProperties != nil && resp.WorkspaceProperties.Sku != nil && strings.EqualFold(string(resp.WorkspaceProperties.Sku.Name), string(operationalinsights.WorkspaceSkuNameEnumFree)) { + // Special case for "Free" tier + d.Set("daily_quota_gb", utils.Float(0.5)) + } else if workspaceCapping := resp.WorkspaceCapping; workspaceCapping != nil { d.Set("daily_quota_gb", resp.WorkspaceCapping.DailyQuotaGb) } else { d.Set("daily_quota_gb", utils.Float(-1)) } - sharedKeys, err := sharedKeysClient.GetSharedKeys(ctx, id.ResourceGroup, id.Name) + sharedKeys, err := sharedKeysClient.GetSharedKeys(ctx, id.ResourceGroup, id.WorkspaceName) if err != nil { - log.Printf("[ERROR] Unable to List Shared keys for Log Analytics workspaces %s: %+v", id.Name, err) + log.Printf("[ERROR] Unable to List Shared keys for Log Analytics workspaces %s: %+v", id.WorkspaceName, err) } else { d.Set("primary_shared_key", sharedKeys.PrimarySharedKey) d.Set("secondary_shared_key", sharedKeys.SecondarySharedKey) @@ -225,7 +278,7 @@ func resourceArmLogAnalyticsWorkspaceRead(d *schema.ResourceData, meta interface return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmLogAnalyticsWorkspaceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogAnalyticsWorkspaceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).LogAnalytics.WorkspacesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -235,31 +288,36 @@ func resourceArmLogAnalyticsWorkspaceDelete(d *schema.ResourceData, meta interfa } force := false - future, err := client.Delete(ctx, id.ResourceGroup, id.Name, utils.Bool(force)) + future, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, utils.Bool(force)) if err != nil { - return fmt.Errorf("issuing AzureRM delete request for Log Analytics Workspaces '%s': %+v", id.Name, err) + return fmt.Errorf("issuing AzureRM delete request for Log Analytics Workspaces '%s': %+v", id.WorkspaceName, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("waiting for deletion of Log Analytics Worspace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("waiting for deletion of Log Analytics Worspace %q (Resource Group %q): %+v", id.WorkspaceName, id.ResourceGroup, err) } } return nil } -func ValidateAzureRmLogAnalyticsWorkspaceName(v interface{}, _ string) (warnings []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile("^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$").MatchString(value) { - errors = append(errors, fmt.Errorf("Workspace Name can only contain alphabet, number, and '-' character. You can not use '-' as the start and end of the name")) +func dailyQuotaGbDiffSuppressFunc(_, _, _ string, d *schema.ResourceData) bool { + // (@jackofallops) - 'free' is a legacy special case that is always set to 0.5GB + if skuName := d.Get("sku").(string); strings.EqualFold(skuName, string(operationalinsights.WorkspaceSkuNameEnumFree)) { + return true } - length := len(value) - if length > 63 || 4 > length { - errors = append(errors, fmt.Errorf("Workspace Name can only be between 4 and 63 letters")) + return false +} + +func logAnalyticsLinkedServiceSkuChangeCaseDifference(k, old, new string, d *schema.ResourceData) bool { + // (@WodansSon) - This is needed because if you connect your workspace to a log analytics linked service resource it + // will modify the value of your sku to "lacluster". We are currently in negotiations with the service team to + // see if there is another way of doing this, for now this is the workaround + if old == "lacluster" { + old = new } - return warnings, errors + return suppress.CaseDifference(k, old, new, d) } diff --git a/azurerm/internal/services/loganalytics/log_analytics_workspace_resource_test.go b/azurerm/internal/services/loganalytics/log_analytics_workspace_resource_test.go new file mode 100644 index 000000000000..7c504944e1e0 --- /dev/null +++ b/azurerm/internal/services/loganalytics/log_analytics_workspace_resource_test.go @@ -0,0 +1,432 @@ +package loganalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +type LogAnalyticsWorkspaceResource struct { +} + +func TestAccLogAnalyticsWorkspace_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_workspace"), + }, + }) +} + +func TestAccLogAnalyticsWorkspace_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_freeTier(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.freeTier(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_withDefaultSku(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withDefaultSku(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_withVolumeCap(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withVolumeCap(data, 4.5), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_removeVolumeCap(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withVolumeCap(data, 5.5), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.removeVolumeCap(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("daily_quota_gb").HasValue("-1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_withInternetIngestionEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withInternetIngestionEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withInternetIngestionEnabledUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogAnalyticsWorkspace_withInternetQueryEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") + r := LogAnalyticsWorkspaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withInternetQueryEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withInternetQueryEnabledUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t LogAnalyticsWorkspaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LogAnalyticsWorkspaceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.LogAnalytics.WorkspacesClient.Get(ctx, id.ResourceGroup, id.WorkspaceName) + if err != nil { + return nil, fmt.Errorf("readingLog Analytics Workspace (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LogAnalyticsWorkspaceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r LogAnalyticsWorkspaceResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_log_analytics_workspace" "import" { + name = azurerm_log_analytics_workspace.test.name + location = azurerm_log_analytics_workspace.test.location + resource_group_name = azurerm_log_analytics_workspace.test.resource_group_name + sku = "PerGB2018" +} +`, r.basic(data)) +} + +func (LogAnalyticsWorkspaceResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 + + tags = { + Environment = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) freeTier(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Free" + retention_in_days = 7 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) withDefaultSku(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) withVolumeCap(data acceptance.TestData, volumeCapGb float64) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 + daily_quota_gb = %f + + tags = { + Environment = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, volumeCapGb) +} + +func (LogAnalyticsWorkspaceResource) removeVolumeCap(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 + + tags = { + Environment = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) withInternetIngestionEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + internet_ingestion_enabled = true + sku = "PerGB2018" + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) withInternetIngestionEnabledUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + internet_ingestion_enabled = false + sku = "PerGB2018" + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) withInternetQueryEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + internet_query_enabled = true + sku = "PerGB2018" + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (LogAnalyticsWorkspaceResource) withInternetQueryEnabledUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestLAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + internet_query_enabled = false + sku = "PerGB2018" + retention_in_days = 30 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/loganalytics/migration/workspace_v0_to_v1.go b/azurerm/internal/services/loganalytics/migration/workspace_v0_to_v1.go new file mode 100644 index 000000000000..aa64651ab5b0 --- /dev/null +++ b/azurerm/internal/services/loganalytics/migration/workspace_v0_to_v1.go @@ -0,0 +1,101 @@ +package migration + +import ( + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" +) + +func WorkspaceV0ToV1() schema.StateUpgrader { + return schema.StateUpgrader{ + Version: 0, + Type: workspaceV0V1Schema().CoreConfigSchema().ImpliedType(), + Upgrade: workspaceUpgradeV0ToV1, + } +} + +func workspaceV0V1Schema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + + "internet_ingestion_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + + "internet_query_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + + "sku": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + + "retention_in_days": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + + "daily_quota_gb": { + Type: schema.TypeFloat, + Optional: true, + Default: -1.0, + }, + + "workspace_id": { + Type: schema.TypeString, + Computed: true, + }, + + "portal_url": { + Type: schema.TypeString, + Computed: true, + }, + + "primary_shared_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_shared_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func workspaceUpgradeV0ToV1(rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + + log.Printf("[DEBUG] Migrating IDs to correct casing for Log Analytics Workspace") + name := rawState["name"].(string) + resourceGroup := rawState["resource_group_name"].(string) + id := parse.NewLogAnalyticsWorkspaceID(subscriptionId, resourceGroup, name) + + rawState["id"] = id.ID() + return rawState, nil +} diff --git a/azurerm/internal/services/loganalytics/migration/workspace_v1_to_v2.go b/azurerm/internal/services/loganalytics/migration/workspace_v1_to_v2.go new file mode 100644 index 000000000000..107c1aecf3c4 --- /dev/null +++ b/azurerm/internal/services/loganalytics/migration/workspace_v1_to_v2.go @@ -0,0 +1,31 @@ +package migration + +import ( + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func WorkspaceV1ToV2() schema.StateUpgrader { + // V1 to V2 is the same as v0 to v1 - to workaround a historical issue where `resource_group` was + // used in place of `resource_group_name` - ergo using the same schema is fine. + return schema.StateUpgrader{ + Version: 1, + Type: workspaceV0V1Schema().CoreConfigSchema().ImpliedType(), + Upgrade: workspaceUpgradeV1ToV2, + } +} + +func workspaceUpgradeV1ToV2(rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + + log.Printf("[DEBUG] Migrating IDs to correct casing for Log Analytics Workspace") + name := rawState["name"].(string) + resourceGroup := rawState["resource_group_name"].(string) + id := parse.NewLogAnalyticsWorkspaceID(subscriptionId, resourceGroup, name) + + rawState["id"] = id.ID() + return rawState, nil +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go new file mode 100644 index 000000000000..2623f328a067 --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LogAnalyticsClusterId struct { + SubscriptionId string + ResourceGroup string + ClusterName string +} + +func NewLogAnalyticsClusterID(subscriptionId, resourceGroup, clusterName string) LogAnalyticsClusterId { + return LogAnalyticsClusterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ClusterName: clusterName, + } +} + +func (id LogAnalyticsClusterId) String() string { + segments := []string{ + fmt.Sprintf("Cluster Name %q", id.ClusterName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Cluster", segmentsStr) +} + +func (id LogAnalyticsClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName) +} + +// LogAnalyticsClusterID parses a LogAnalyticsCluster ID into an LogAnalyticsClusterId struct +func LogAnalyticsClusterID(input string) (*LogAnalyticsClusterId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LogAnalyticsClusterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ClusterName, err = id.PopSegment("clusters"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go new file mode 100644 index 000000000000..5b9aae650424 --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LogAnalyticsClusterId{} + +func TestLogAnalyticsClusterIDFormatter(t *testing.T) { + actual := NewLogAnalyticsClusterID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/clusters/cluster1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLogAnalyticsClusterID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LogAnalyticsClusterId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Error: true, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/clusters/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/clusters/cluster1", + Expected: &LogAnalyticsClusterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ClusterName: "cluster1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/CLUSTERS/CLUSTER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LogAnalyticsClusterID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ClusterName != v.Expected.ClusterName { + t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) + } + } +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_data_export.go b/azurerm/internal/services/loganalytics/parse/log_analytics_data_export.go index 67f15d50edfe..7e0f64fc6284 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_data_export.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_data_export.go @@ -1,43 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type LogAnalyticsDataExportId struct { - ResourceGroup string - WorkspaceName string - WorkspaceID string - Name string + SubscriptionId string + ResourceGroup string + WorkspaceName string + DataexportName string +} + +func NewLogAnalyticsDataExportID(subscriptionId, resourceGroup, workspaceName, dataexportName string) LogAnalyticsDataExportId { + return LogAnalyticsDataExportId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, + DataexportName: dataexportName, + } +} + +func (id LogAnalyticsDataExportId) String() string { + segments := []string{ + fmt.Sprintf("Dataexport Name %q", id.DataexportName), + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Data Export", segmentsStr) +} + +func (id LogAnalyticsDataExportId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/dataexports/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName, id.DataexportName) } +// LogAnalyticsDataExportID parses a LogAnalyticsDataExport ID into an LogAnalyticsDataExportId struct func LogAnalyticsDataExportID(input string) (*LogAnalyticsDataExportId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Log Analytics Data Export Rule ID %q: %+v", input, err) + return nil, err } - logAnalyticsDataExport := LogAnalyticsDataExportId{ - ResourceGroup: id.ResourceGroup, + resourceId := LogAnalyticsDataExportId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if logAnalyticsDataExport.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if logAnalyticsDataExport.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsDataExport.WorkspaceName); err != nil { - return nil, fmt.Errorf("formatting Log Analytics Data Export Rule workspace ID %q", input) + + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { + return nil, err } - if logAnalyticsDataExport.Name, err = id.PopSegment("dataExports"); err != nil { - // API Issue the casing changes for the ID - if logAnalyticsDataExport.Name, err = id.PopSegment("dataexports"); err != nil { - return nil, err - } + if resourceId.DataexportName, err = id.PopSegment("dataexports"); err != nil { + return nil, err } + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &logAnalyticsDataExport, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go index cc53682a7f5e..519a5e2f4272 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go @@ -1,77 +1,128 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = LogAnalyticsDataExportId{} + +func TestLogAnalyticsDataExportIDFormatter(t *testing.T) { + actual := NewLogAnalyticsDataExportID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1", "dataExport1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataexports/dataExport1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestLogAnalyticsDataExportID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *LogAnalyticsDataExportId }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Error: true, }, + { - Name: "Missing DataExport Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataExports", - Expected: nil, + // missing DataexportName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Error: true, }, + { - Name: "operationalinsights DataExport ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataExports/dataExport1", + // missing value for DataexportName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataexports/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataexports/dataExport1", Expected: &LogAnalyticsDataExportId{ - ResourceGroup: "resourceGroup1", - WorkspaceName: "workspace1", - Name: "dataExport1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", + DataexportName: "dataExport1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/DataExports/dataExport1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/DATAEXPORTS/DATAEXPORT1", + Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := LogAnalyticsDataExportID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - if actual.WorkspaceName != v.Expected.WorkspaceName { t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if actual.DataexportName != v.Expected.DataexportName { + t.Fatalf("Expected %q but got %q for DataexportName", v.Expected.DataexportName, actual.DataexportName) } } } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_data_source_windows_event.go b/azurerm/internal/services/loganalytics/parse/log_analytics_data_source_windows_event.go new file mode 100644 index 000000000000..9528f8cbe65b --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_data_source_windows_event.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LogAnalyticsDataSourceWindowsEventId struct { + SubscriptionId string + ResourceGroup string + WorkspaceName string + DataSourceName string +} + +func NewLogAnalyticsDataSourceWindowsEventID(subscriptionId, resourceGroup, workspaceName, dataSourceName string) LogAnalyticsDataSourceWindowsEventId { + return LogAnalyticsDataSourceWindowsEventId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, + DataSourceName: dataSourceName, + } +} + +func (id LogAnalyticsDataSourceWindowsEventId) String() string { + segments := []string{ + fmt.Sprintf("Data Source Name %q", id.DataSourceName), + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Data Source Windows Event", segmentsStr) +} + +func (id LogAnalyticsDataSourceWindowsEventId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/dataSources/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName, id.DataSourceName) +} + +// LogAnalyticsDataSourceWindowsEventID parses a LogAnalyticsDataSourceWindowsEvent ID into an LogAnalyticsDataSourceWindowsEventId struct +func LogAnalyticsDataSourceWindowsEventID(input string) (*LogAnalyticsDataSourceWindowsEventId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LogAnalyticsDataSourceWindowsEventId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { + return nil, err + } + if resourceId.DataSourceName, err = id.PopSegment("dataSources"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_data_source_windows_event_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_data_source_windows_event_test.go new file mode 100644 index 000000000000..c76b3541fdd6 --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_data_source_windows_event_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LogAnalyticsDataSourceWindowsEventId{} + +func TestLogAnalyticsDataSourceWindowsEventIDFormatter(t *testing.T) { + actual := NewLogAnalyticsDataSourceWindowsEventID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1", "dataSource1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataSources/dataSource1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLogAnalyticsDataSourceWindowsEventID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LogAnalyticsDataSourceWindowsEventId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Error: true, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Error: true, + }, + + { + // missing DataSourceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Error: true, + }, + + { + // missing value for DataSourceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataSources/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataSources/dataSource1", + Expected: &LogAnalyticsDataSourceWindowsEventId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", + DataSourceName: "dataSource1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/DATASOURCES/DATASOURCE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LogAnalyticsDataSourceWindowsEventID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.WorkspaceName != v.Expected.WorkspaceName { + t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) + } + if actual.DataSourceName != v.Expected.DataSourceName { + t.Fatalf("Expected %q but got %q for DataSourceName", v.Expected.DataSourceName, actual.DataSourceName) + } + } +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_linked_service.go b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_service.go new file mode 100644 index 000000000000..438ab126052e --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_service.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LogAnalyticsLinkedServiceId struct { + SubscriptionId string + ResourceGroup string + WorkspaceName string + LinkedServiceName string +} + +func NewLogAnalyticsLinkedServiceID(subscriptionId, resourceGroup, workspaceName, linkedServiceName string) LogAnalyticsLinkedServiceId { + return LogAnalyticsLinkedServiceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, + LinkedServiceName: linkedServiceName, + } +} + +func (id LogAnalyticsLinkedServiceId) String() string { + segments := []string{ + fmt.Sprintf("Linked Service Name %q", id.LinkedServiceName), + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Linked Service", segmentsStr) +} + +func (id LogAnalyticsLinkedServiceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/linkedServices/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName, id.LinkedServiceName) +} + +// LogAnalyticsLinkedServiceID parses a LogAnalyticsLinkedService ID into an LogAnalyticsLinkedServiceId struct +func LogAnalyticsLinkedServiceID(input string) (*LogAnalyticsLinkedServiceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LogAnalyticsLinkedServiceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { + return nil, err + } + if resourceId.LinkedServiceName, err = id.PopSegment("linkedServices"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_linked_service_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_service_test.go new file mode 100644 index 000000000000..4ef59f4c90ab --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_service_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LogAnalyticsLinkedServiceId{} + +func TestLogAnalyticsLinkedServiceIDFormatter(t *testing.T) { + actual := NewLogAnalyticsLinkedServiceID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1", "linkedService1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/linkedService1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLogAnalyticsLinkedServiceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LogAnalyticsLinkedServiceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Error: true, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Error: true, + }, + + { + // missing LinkedServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Error: true, + }, + + { + // missing value for LinkedServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/linkedService1", + Expected: &LogAnalyticsLinkedServiceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", + LinkedServiceName: "linkedService1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/LINKEDSERVICES/LINKEDSERVICE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LogAnalyticsLinkedServiceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.WorkspaceName != v.Expected.WorkspaceName { + t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) + } + if actual.LinkedServiceName != v.Expected.LinkedServiceName { + t.Fatalf("Expected %q but got %q for LinkedServiceName", v.Expected.LinkedServiceName, actual.LinkedServiceName) + } + } +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account.go b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account.go index 0bc7e6e200e9..62f23a1c19c3 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account.go @@ -1,43 +1,75 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type LogAnalyticsLinkedStorageAccountId struct { - ResourceGroup string - WorkspaceName string - WorkspaceID string - Name string + SubscriptionId string + ResourceGroup string + WorkspaceName string + LinkedStorageAccountName string +} + +func NewLogAnalyticsLinkedStorageAccountID(subscriptionId, resourceGroup, workspaceName, linkedStorageAccountName string) LogAnalyticsLinkedStorageAccountId { + return LogAnalyticsLinkedStorageAccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, + LinkedStorageAccountName: linkedStorageAccountName, + } +} + +func (id LogAnalyticsLinkedStorageAccountId) String() string { + segments := []string{ + fmt.Sprintf("Linked Storage Account Name %q", id.LinkedStorageAccountName), + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Linked Storage Account", segmentsStr) +} + +func (id LogAnalyticsLinkedStorageAccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/linkedStorageAccounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName, id.LinkedStorageAccountName) } +// LogAnalyticsLinkedStorageAccountID parses a LogAnalyticsLinkedStorageAccount ID into an LogAnalyticsLinkedStorageAccountId struct func LogAnalyticsLinkedStorageAccountID(input string) (*LogAnalyticsLinkedStorageAccountId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Log Analytics Linked Storage Account ID %q: %+v", input, err) + return nil, err } - logAnalyticsLinkedStorageAccount := LogAnalyticsLinkedStorageAccountId{ - ResourceGroup: id.ResourceGroup, + resourceId := LogAnalyticsLinkedStorageAccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if logAnalyticsLinkedStorageAccount.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { - return nil, err + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if logAnalyticsLinkedStorageAccount.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsLinkedStorageAccount.WorkspaceName); err != nil { - return nil, fmt.Errorf("formatting Log Analytics Data Export Rule workspace ID %q", input) + + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { + return nil, err } - var name string - if name, err = id.PopSegment("linkedStorageAccounts"); err == nil { - logAnalyticsLinkedStorageAccount.Name = string(operationalinsights.DataSourceType(name)) - } else { + if resourceId.LinkedStorageAccountName, err = id.PopSegment("linkedStorageAccounts"); err != nil { return nil, err } + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &logAnalyticsLinkedStorageAccount, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account_test.go index 6c8697fbe888..12c138d896d1 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account_test.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_linked_storage_account_test.go @@ -1,78 +1,128 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) -func TestOperationalinsightsLinkedStorageAccountID(t *testing.T) { +var _ resourceid.Formatter = LogAnalyticsLinkedStorageAccountId{} + +func TestLogAnalyticsLinkedStorageAccountIDFormatter(t *testing.T) { + actual := NewLogAnalyticsLinkedStorageAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1", "query").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/query" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLogAnalyticsLinkedStorageAccountID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *LogAnalyticsLinkedStorageAccountId }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Error: true, }, + { - Name: "Missing LinkedStorageAccount Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts", - Expected: nil, + // missing LinkedStorageAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Error: true, }, + { - Name: "Log Analytics Linked Storage Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/dataSourceType1", + // missing value for LinkedStorageAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/query", Expected: &LogAnalyticsLinkedStorageAccountId{ - ResourceGroup: "resourceGroup1", - WorkspaceName: "workspace1", - WorkspaceID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1", - Name: "dataSourceType1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", + LinkedStorageAccountName: "query", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/LinkedStorageAccounts/dataSourceType1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/LINKEDSTORAGEACCOUNTS/QUERY", + Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := LogAnalyticsLinkedStorageAccountID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - if actual.WorkspaceName != v.Expected.WorkspaceName { t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if actual.LinkedStorageAccountName != v.Expected.LinkedStorageAccountName { + t.Fatalf("Expected %q but got %q for LinkedStorageAccountName", v.Expected.LinkedStorageAccountName, actual.LinkedStorageAccountName) } } } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search.go b/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search.go index 820f3a2c54ee..9d8a18bf9b24 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search.go @@ -1,32 +1,69 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type LogAnalyticsSavedSearchId struct { - ResourceGroup string - WorkspaceName string - Name string + SubscriptionId string + ResourceGroup string + WorkspaceName string + SavedSearcheName string +} + +func NewLogAnalyticsSavedSearchID(subscriptionId, resourceGroup, workspaceName, savedSearcheName string) LogAnalyticsSavedSearchId { + return LogAnalyticsSavedSearchId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, + SavedSearcheName: savedSearcheName, + } } +func (id LogAnalyticsSavedSearchId) String() string { + segments := []string{ + fmt.Sprintf("Saved Searche Name %q", id.SavedSearcheName), + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Saved Search", segmentsStr) +} + +func (id LogAnalyticsSavedSearchId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/savedSearches/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName, id.SavedSearcheName) +} + +// LogAnalyticsSavedSearchID parses a LogAnalyticsSavedSearch ID into an LogAnalyticsSavedSearchId struct func LogAnalyticsSavedSearchID(input string) (*LogAnalyticsSavedSearchId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Log Analytics Saved Search ID %q: %+v", input, err) + return nil, err } - search := LogAnalyticsSavedSearchId{ - ResourceGroup: id.ResourceGroup, + resourceId := LogAnalyticsSavedSearchId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if search.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if search.Name, err = id.PopSegment("savedSearches"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { + return nil, err + } + if resourceId.SavedSearcheName, err = id.PopSegment("savedSearches"); err != nil { return nil, err } @@ -34,5 +71,5 @@ func LogAnalyticsSavedSearchID(input string) (*LogAnalyticsSavedSearchId, error) return nil, err } - return &search, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search_test.go index 44fcbc86d13a..9cb1e6635be3 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search_test.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_saved_search_test.go @@ -1,55 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = LogAnalyticsSavedSearchId{} + +func TestLogAnalyticsSavedSearchIDFormatter(t *testing.T) { + actual := NewLogAnalyticsSavedSearchID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1", "search1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/search1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestLogAnalyticsSavedSearchID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *LogAnalyticsSavedSearchId + Input string + Error bool + Expected *LogAnalyticsSavedSearchId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/", + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", Error: true, }, + + { + // missing SavedSearcheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Error: true, + }, + { - Name: "Missing Saved Search Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches", + // missing value for SavedSearcheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/", Error: true, }, + { - Name: "Workspace Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/search1", - Error: false, - Expect: &LogAnalyticsSavedSearchId{ - ResourceGroup: "resGroup1", - WorkspaceName: "workspace1", - Name: "search1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/search1", + Expected: &LogAnalyticsSavedSearchId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", + SavedSearcheName: "search1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/SAVEDSEARCHES/SEARCH1", + Error: true, + }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := LogAnalyticsSavedSearchID(v.Input) if err != nil { @@ -57,15 +106,23 @@ func TestLogAnalyticsSavedSearchID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.WorkspaceName != v.Expected.WorkspaceName { + t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) + } + if actual.SavedSearcheName != v.Expected.SavedSearcheName { + t.Fatalf("Expected %q but got %q for SavedSearcheName", v.Expected.SavedSearcheName, actual.SavedSearcheName) } } } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_solution.go b/azurerm/internal/services/loganalytics/parse/log_analytics_solution.go new file mode 100644 index 000000000000..92b92422b52f --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_solution.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LogAnalyticsSolutionId struct { + SubscriptionId string + ResourceGroup string + SolutionName string +} + +func NewLogAnalyticsSolutionID(subscriptionId, resourceGroup, solutionName string) LogAnalyticsSolutionId { + return LogAnalyticsSolutionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + SolutionName: solutionName, + } +} + +func (id LogAnalyticsSolutionId) String() string { + segments := []string{ + fmt.Sprintf("Solution Name %q", id.SolutionName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Solution", segmentsStr) +} + +func (id LogAnalyticsSolutionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationsManagement/solutions/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.SolutionName) +} + +// LogAnalyticsSolutionID parses a LogAnalyticsSolution ID into an LogAnalyticsSolutionId struct +func LogAnalyticsSolutionID(input string) (*LogAnalyticsSolutionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LogAnalyticsSolutionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.SolutionName, err = id.PopSegment("solutions"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_solution_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_solution_test.go new file mode 100644 index 000000000000..509d3a9ff2ed --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_solution_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LogAnalyticsSolutionId{} + +func TestLogAnalyticsSolutionIDFormatter(t *testing.T) { + actual := NewLogAnalyticsSolutionID("12345678-1234-9876-4563-123456789012", "resGroup1", "solution1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/solutions/solution1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLogAnalyticsSolutionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LogAnalyticsSolutionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing SolutionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/", + Error: true, + }, + + { + // missing value for SolutionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/solutions/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/solutions/solution1", + Expected: &LogAnalyticsSolutionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + SolutionName: "solution1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONSMANAGEMENT/SOLUTIONS/SOLUTION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LogAnalyticsSolutionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.SolutionName != v.Expected.SolutionName { + t.Fatalf("Expected %q but got %q for SolutionName", v.Expected.SolutionName, actual.SolutionName) + } + } +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go new file mode 100644 index 000000000000..b3c4399cc201 --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LogAnalyticsStorageInsightsId struct { + SubscriptionId string + ResourceGroup string + WorkspaceName string + StorageInsightConfigName string +} + +func NewLogAnalyticsStorageInsightsID(subscriptionId, resourceGroup, workspaceName, storageInsightConfigName string) LogAnalyticsStorageInsightsId { + return LogAnalyticsStorageInsightsId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, + StorageInsightConfigName: storageInsightConfigName, + } +} + +func (id LogAnalyticsStorageInsightsId) String() string { + segments := []string{ + fmt.Sprintf("Storage Insight Config Name %q", id.StorageInsightConfigName), + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Storage Insights", segmentsStr) +} + +func (id LogAnalyticsStorageInsightsId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/storageInsightConfigs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName, id.StorageInsightConfigName) +} + +// LogAnalyticsStorageInsightsID parses a LogAnalyticsStorageInsights ID into an LogAnalyticsStorageInsightsId struct +func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LogAnalyticsStorageInsightsId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { + return nil, err + } + if resourceId.StorageInsightConfigName, err = id.PopSegment("storageInsightConfigs"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights_test.go new file mode 100644 index 000000000000..0c9c136ae4c2 --- /dev/null +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LogAnalyticsStorageInsightsId{} + +func TestLogAnalyticsStorageInsightsIDFormatter(t *testing.T) { + actual := NewLogAnalyticsStorageInsightsID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1", "storageInsight1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLogAnalyticsStorageInsightsID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LogAnalyticsStorageInsightsId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Error: true, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Error: true, + }, + + { + // missing StorageInsightConfigName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Error: true, + }, + + { + // missing value for StorageInsightConfigName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1", + Expected: &LogAnalyticsStorageInsightsId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", + StorageInsightConfigName: "storageInsight1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/STORAGEINSIGHTCONFIGS/STORAGEINSIGHT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LogAnalyticsStorageInsightsID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.WorkspaceName != v.Expected.WorkspaceName { + t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) + } + if actual.StorageInsightConfigName != v.Expected.StorageInsightConfigName { + t.Fatalf("Expected %q but got %q for StorageInsightConfigName", v.Expected.StorageInsightConfigName, actual.StorageInsightConfigName) + } + } +} diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go b/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go index a9c9b59ac17d..50f4e859f132 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go @@ -1,39 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type LogAnalyticsWorkspaceId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + WorkspaceName string } -func NewLogAnalyticsWorkspaceID(name, resourceGroup string) LogAnalyticsWorkspaceId { +func NewLogAnalyticsWorkspaceID(subscriptionId, resourceGroup, workspaceName string) LogAnalyticsWorkspaceId { return LogAnalyticsWorkspaceId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + WorkspaceName: workspaceName, } } -func (id LogAnalyticsWorkspaceId) ID(subscriptionId string) string { - // Log Analytics ID ignores casing - return fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/microsoft.operationalinsights/workspaces/%s", subscriptionId, id.ResourceGroup, id.Name) +func (id LogAnalyticsWorkspaceId) String() string { + segments := []string{ + fmt.Sprintf("Workspace Name %q", id.WorkspaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Log Analytics Workspace", segmentsStr) } +func (id LogAnalyticsWorkspaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.WorkspaceName) +} + +// LogAnalyticsWorkspaceID parses a LogAnalyticsWorkspace ID into an LogAnalyticsWorkspaceId struct func LogAnalyticsWorkspaceID(input string) (*LogAnalyticsWorkspaceId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Log Analytics Workspace ID %q: %+v", input, err) + return nil, err + } + + resourceId := LogAnalyticsWorkspaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - server := LogAnalyticsWorkspaceId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if server.Name, err = id.PopSegment("workspaces"); err != nil { + if resourceId.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { return nil, err } @@ -41,5 +65,5 @@ func LogAnalyticsWorkspaceID(input string) (*LogAnalyticsWorkspaceId, error) { return nil, err } - return &server, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_workspace_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_workspace_test.go index 5e3b113394c9..d77416c2adcf 100644 --- a/azurerm/internal/services/loganalytics/parse/log_analytics_workspace_test.go +++ b/azurerm/internal/services/loganalytics/parse/log_analytics_workspace_test.go @@ -1,54 +1,91 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = LogAnalyticsWorkspaceId{} + +func TestLogAnalyticsWorkspaceIDFormatter(t *testing.T) { + actual := NewLogAnalyticsWorkspaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "workspace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestLogAnalyticsWorkspaceID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *LogAnalyticsWorkspaceId + Input string + Error bool + Expected *LogAnalyticsWorkspaceId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing Workspace Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces", + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", Error: true, }, + { - Name: "Workspace Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1", + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", Error: true, - Expect: &LogAnalyticsWorkspaceId{ - ResourceGroup: "resGroup1", - Name: "workspace1", + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1", + Expected: &LogAnalyticsWorkspaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + WorkspaceName: "workspace1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1", + Error: true, + }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := LogAnalyticsWorkspaceID(v.Input) if err != nil { @@ -56,15 +93,20 @@ func TestLogAnalyticsWorkspaceID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.WorkspaceName != v.Expected.WorkspaceName { + t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) } } } diff --git a/azurerm/internal/services/loganalytics/registration.go b/azurerm/internal/services/loganalytics/registration.go index 99f9f4b6d956..b13d71b445d0 100644 --- a/azurerm/internal/services/loganalytics/registration.go +++ b/azurerm/internal/services/loganalytics/registration.go @@ -21,19 +21,23 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_log_analytics_workspace": dataSourceLogAnalyticsWorkspace()} + "azurerm_log_analytics_workspace": dataSourceLogAnalyticsWorkspace(), + } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_log_analytics_data_export_rule": resourceArmLogAnalyticsDataExport(), - "azurerm_log_analytics_linked_service": resourceArmLogAnalyticsLinkedService(), - "azurerm_log_analytics_linked_storage_account": resourceArmLogAnalyticsLinkedStorageAccount(), - "azurerm_log_analytics_saved_search": resourceArmLogAnalyticsSavedSearch(), - "azurerm_log_analytics_solution": resourceArmLogAnalyticsSolution(), - "azurerm_log_analytics_workspace": resourceArmLogAnalyticsWorkspace(), - "azurerm_log_analytics_datasource_windows_event": resourceArmLogAnalyticsDataSourceWindowsEvent(), - "azurerm_log_analytics_datasource_windows_performance_counter": resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter(), + "azurerm_log_analytics_cluster": resourceLogAnalyticsCluster(), + "azurerm_log_analytics_cluster_customer_managed_key": resourceLogAnalyticsClusterCustomerManagedKey(), + "azurerm_log_analytics_datasource_windows_event": resourceLogAnalyticsDataSourceWindowsEvent(), + "azurerm_log_analytics_datasource_windows_performance_counter": resourceLogAnalyticsDataSourceWindowsPerformanceCounter(), + "azurerm_log_analytics_data_export_rule": resourceLogAnalyticsDataExport(), + "azurerm_log_analytics_linked_service": resourceLogAnalyticsLinkedService(), + "azurerm_log_analytics_linked_storage_account": resourceLogAnalyticsLinkedStorageAccount(), + "azurerm_log_analytics_saved_search": resourceLogAnalyticsSavedSearch(), + "azurerm_log_analytics_solution": resourceLogAnalyticsSolution(), + "azurerm_log_analytics_storage_insights": resourceLogAnalyticsStorageInsights(), + "azurerm_log_analytics_workspace": resourceLogAnalyticsWorkspace(), } } diff --git a/azurerm/internal/services/loganalytics/resourceids.go b/azurerm/internal/services/loganalytics/resourceids.go new file mode 100644 index 000000000000..ff703c49b941 --- /dev/null +++ b/azurerm/internal/services/loganalytics/resourceids.go @@ -0,0 +1,11 @@ +package loganalytics + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsCluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/clusters/cluster1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsDataExport -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataexports/dataExport1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsDataSourceWindowsEvent -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataSources/dataSource1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsLinkedService -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/linkedService1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsLinkedStorageAccount -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/query +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsSavedSearch -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/search1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsSolution -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/solutions/solution1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsStorageInsights -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LogAnalyticsWorkspace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1 diff --git a/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go new file mode 100644 index 000000000000..5feda882b175 --- /dev/null +++ b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go @@ -0,0 +1,27 @@ +package suppress + +import ( + "fmt" + "net" + "net/url" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +func LogAnalyticsClusterUrl(_, old, new string, _ *schema.ResourceData) bool { + u, err := url.ParseRequestURI(old) + if err != nil || u.Host == "" { + return false + } + + host, _, err := net.SplitHostPort(u.Host) + if err != nil { + host = u.Host + } + + if new == fmt.Sprintf("%s://%s/", u.Scheme, host) { + return true + } + + return false +} diff --git a/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go new file mode 100644 index 000000000000..9d160b7f3767 --- /dev/null +++ b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go @@ -0,0 +1,87 @@ +package suppress + +import "testing" + +func TestCaseClusterUrl(t *testing.T) { + cases := []struct { + Name string + ClusterURL string + KeyVaultURL string + Suppress bool + }{ + { + Name: "empty URL", + ClusterURL: "", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "URL with port and wrong scheme", + ClusterURL: "http://flynns.arcade.com:443", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "invalid URL scheme", + ClusterURL: "https//flynns.arcade.com", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "invalid URL character", + ClusterURL: "https://flynns^arcade.com/", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "invalid URL missing scheme", + ClusterURL: "//flynns.arcade.com/", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "URL with wrong scheme no port", + ClusterURL: "http://flynns.arcade.com", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "same URL different case", + ClusterURL: "https://Flynns.Arcade.com/", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: false, + }, + { + Name: "full URL with username@host/path?query#fragment", + ClusterURL: "https://Creator4983@flynns.arcade.com/ENCOM?games=MatrixBlaster#MCP", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: true, + }, + { + Name: "full URL with username:password@host/path?query#fragment", + ClusterURL: "https://Creator4983:7898@flynns.arcade.com/ENCOM?games=SpaceParanoids&developer=KevinFlynn#MCP", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: true, + }, + { + Name: "URL missing path separator", + ClusterURL: "https://flynns.arcade.com", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: true, + }, + { + Name: "same URL", + ClusterURL: "https://flynns.arcade.com/", + KeyVaultURL: "https://flynns.arcade.com/", + Suppress: true, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + if LogAnalyticsClusterUrl("test", tc.ClusterURL, tc.KeyVaultURL, nil) != tc.Suppress { + t.Fatalf("Expected LogAnalyticsClusterUrl to return %t for '%q' == '%q'", tc.Suppress, tc.ClusterURL, tc.KeyVaultURL) + } + }) + } +} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_data_export_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_data_export_resource_test.go deleted file mode 100644 index 0f658379043a..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_data_export_resource_test.go +++ /dev/null @@ -1,272 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" -) - -func TestAccAzureRMLogAnalyticsDataExportRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataExportRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataExportRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataExportRuleExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataExportRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataExportRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataExportRule_basicLower(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataExportRuleExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource - }, - { - Config: testAccAzureRMLogAnalyticsDataExportRule_requiresImport(data), - ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource - ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_data_export_rule"), - }, - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataExportRule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataExportRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataExportRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataExportRuleExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource - }, - data.ImportStep(), - { - Config: testAccAzureRMLogAnalyticsDataExportRule_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataExportRuleExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataExportRule_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_data_export_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataExportRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataExportRule_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataExportRuleExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, // Due to API changing case of attributes you need to ignore a non-empty plan for this resource - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMLogAnalyticsDataExportRuleDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.DataExportClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_data_export_rule" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - workspace, err := parse.LogAnalyticsWorkspaceID(rs.Primary.Attributes["workspace_resource_id"]) - if err != nil { - return nil - } - - resp, err := conn.Get(ctx, resourceGroup, workspace.Name, name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Log Analytics Data Export Rule still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMLogAnalyticsDataExportRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.DataExportClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Log Analytics Data Export Rule: %q", name) - } - - workspace, err := parse.LogAnalyticsWorkspaceID(rs.Primary.Attributes["workspace_resource_id"]) - if err != nil { - return fmt.Errorf("Bad: unable to access 'workspace_resource_id' for Log Analytics Data Export Rule: %q", name) - } - - resp, err := conn.Get(ctx, resourceGroup, workspace.Name, name) - if err != nil { - return fmt.Errorf("Bad: Get on Log Analytics Data Export Rule Client: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Log Analytics Data Export Rule %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} - -func testAccAzureRMLogAnalyticsDataExportRule_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-la-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsads%s" - resource_group_name = azurerm_resource_group.test.name - - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) -} - -func testAccAzureRMLogAnalyticsDataExportRule_basic(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataExportRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_data_export_rule" "test" { - name = "acctest-DER-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - destination_resource_id = azurerm_storage_account.test.id - table_names = ["Heartbeat"] -} -`, template, data.RandomInteger) -} - -// I have to make this a lower case to get the requiresImport test to pass since the RP lowercases everything when it sends the data back to you -func testAccAzureRMLogAnalyticsDataExportRule_basicLower(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataExportRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_data_export_rule" "test" { - name = "acctest-der-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - destination_resource_id = azurerm_storage_account.test.id - table_names = ["Heartbeat"] -} -`, template, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsDataExportRule_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataExportRule_basicLower(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_data_export_rule" "import" { - name = azurerm_log_analytics_data_export_rule.test.name - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - destination_resource_id = azurerm_storage_account.test.id - table_names = ["Heartbeat"] -} -`, template) -} - -func testAccAzureRMLogAnalyticsDataExportRule_update(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataExportRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_data_export_rule" "test" { - name = "acctest-DER-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - destination_resource_id = azurerm_storage_account.test.id - table_names = ["Heartbeat", "Event"] -} -`, template, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsDataExportRule_complete(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataExportRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_data_export_rule" "test" { - name = "acctest-DER-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - destination_resource_id = azurerm_storage_account.test.id - table_names = ["Heartbeat"] - enabled = true -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_datasource_windows_event_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_datasource_windows_event_resource_test.go deleted file mode 100644 index 7560cb163da4..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_datasource_windows_event_resource_test.go +++ /dev/null @@ -1,220 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsEventDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataSourceWindowsEvent_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsEventDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsEvent_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataSourceWindowsEvent_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsEventDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsEvent_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataSourceWindowsEvent_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_event", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsEventDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsDataSourceWindowsEvent_requiresImport), - }, - }) -} - -func testCheckAzureRMLogAnalyticsDataSourceWindowsEventExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.DataSourcesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Log Analytics Data Source Windows Event not found: %s", resourceName) - } - - id, err := parse.LogAnalyticsDataSourceID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Workspace, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Log Analytics Data Source Windows Event %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("failed to get on LogAnalytics.DataSources: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMLogAnalyticsDataSourceWindowsEventDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.DataSourcesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_datasource_windows_event" { - continue - } - - id, err := parse.LogAnalyticsDataSourceID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Workspace, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("failed to get on LogAnalytics.DataSources: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataSourceWindowsEvent_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_datasource_windows_event" "test" { - name = "acctestLADS-WE-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_name = azurerm_log_analytics_workspace.test.name - event_log_name = "Application" - event_types = ["error"] -} -`, template, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsEvent_complete(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataSourceWindowsEvent_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_datasource_windows_event" "test" { - name = "acctestLADS-WE-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_name = azurerm_log_analytics_workspace.test.name - event_log_name = "Application" - event_types = ["InforMation", "warning", "Error"] -} -`, template, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsEvent_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataSourceWindowsEvent_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_datasource_windows_event" "import" { - name = azurerm_log_analytics_datasource_windows_event.test.name - resource_group_name = azurerm_log_analytics_datasource_windows_event.test.resource_group_name - workspace_name = azurerm_log_analytics_datasource_windows_event.test.workspace_name - event_log_name = azurerm_log_analytics_datasource_windows_event.test.event_log_name - event_types = azurerm_log_analytics_datasource_windows_event.test.event_types -} -`, template) -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsEvent_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-la-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_datasource_windows_performance_counter_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_datasource_windows_performance_counter_resource_test.go deleted file mode 100644 index c87bcf91091f..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_datasource_windows_performance_counter_resource_test.go +++ /dev/null @@ -1,235 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "object_name", "CPU"), - resource.TestCheckResourceAttr(data.ResourceName, "instance_name", "*"), - resource.TestCheckResourceAttr(data.ResourceName, "counter_name", "CPU"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_seconds", "10"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "object_name", "Mem"), - resource.TestCheckResourceAttr(data.ResourceName, "instance_name", "inst1"), - resource.TestCheckResourceAttr(data.ResourceName, "counter_name", "Mem"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_seconds", "20"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "object_name", "CPU"), - resource.TestCheckResourceAttr(data.ResourceName, "instance_name", "*"), - resource.TestCheckResourceAttr(data.ResourceName, "counter_name", "CPU"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_seconds", "10"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "object_name", "Mem"), - resource.TestCheckResourceAttr(data.ResourceName, "instance_name", "inst1"), - resource.TestCheckResourceAttr(data.ResourceName, "counter_name", "Mem"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_seconds", "20"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_datasource_windows_performance_counter", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_requiresImport), - }, - }) -} - -func testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.DataSourcesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Log Analytics Data Source Windows Performance Counter not found: %s", resourceName) - } - - id, err := parse.LogAnalyticsDataSourceID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Workspace, id.Name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Log Analytics Data Source Windows Performance Counter %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("failed to get on LogAnalytics.DataSourcesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMLogAnalyticsDataSourceWindowsPerformanceCounterDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.DataSourcesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_datasource_windows_performance_counter" { - continue - } - - id, err := parse.LogAnalyticsDataSourceID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Workspace, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("failed to get on LogAnalytics.DataSourcesClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_basic(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_datasource_windows_performance_counter" "test" { - name = "acctestLADS-WPC-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_name = azurerm_log_analytics_workspace.test.name - object_name = "CPU" - instance_name = "*" - counter_name = "CPU" - interval_seconds = 10 -} -`, template, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_complete(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_datasource_windows_performance_counter" "test" { - name = "acctestLADS-WPC-%d" - resource_group_name = azurerm_resource_group.test.name - workspace_name = azurerm_log_analytics_workspace.test.name - object_name = "Mem" - instance_name = "inst1" - counter_name = "Mem" - interval_seconds = 20 -} -`, template, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_datasource_windows_performance_counter" "import" { - name = azurerm_log_analytics_datasource_windows_performance_counter.test.name - resource_group_name = azurerm_log_analytics_datasource_windows_performance_counter.test.resource_group_name - workspace_name = azurerm_log_analytics_datasource_windows_performance_counter.test.workspace_name - object_name = azurerm_log_analytics_datasource_windows_performance_counter.test.object_name - instance_name = azurerm_log_analytics_datasource_windows_performance_counter.test.instance_name - counter_name = azurerm_log_analytics_datasource_windows_performance_counter.test.counter_name - interval_seconds = azurerm_log_analytics_datasource_windows_performance_counter.test.interval_seconds -} -`, template) -} - -func testAccAzureRMLogAnalyticsDataSourceWindowsPerformanceCounter_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-la-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_linked_service_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_linked_service_resource_test.go deleted file mode 100644 index 55add86dcd79..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_linked_service_resource_test.go +++ /dev/null @@ -1,214 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMLogAnalyticsLinkedService_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsLinkedServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsLinkedService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsLinkedServiceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("acctestlaw-%d/Automation", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "workspace_name", fmt.Sprintf("acctestlaw-%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "linked_service_name", "automation"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsLinkedService_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsLinkedServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsLinkedService_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsLinkedServiceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("acctestlaw-%d/Automation", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "workspace_name", fmt.Sprintf("acctestlaw-%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "linked_service_name", "automation"), - ), - }, - { - Config: testAccAzureRMLogAnalyticsLinkedService_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_linked_service"), - }, - }, - }) -} - -func TestAccAzureRMLogAnalyticsLinkedService_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_service", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsLinkedServiceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsLinkedService_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsLinkedServiceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "linked_service_name", "automation"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMLogAnalyticsLinkedServiceDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.LinkedServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_linked_service" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - workspaceName := rs.Primary.Attributes["workspace_name"] - lsName := rs.Primary.Attributes["linked_service_name"] - - resp, err := conn.Get(ctx, resourceGroup, workspaceName, lsName) - if err != nil { - return nil - } - if resp.ID == nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Log Analytics Linked Service still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMLogAnalyticsLinkedServiceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.LinkedServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - workspaceName := rs.Primary.Attributes["workspace_name"] - lsName := rs.Primary.Attributes["linked_service_name"] - name := rs.Primary.Attributes["name"] - - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Log Analytics Linked Service: '%s'", name) - } - - resp, err := conn.Get(ctx, resourceGroup, workspaceName, lsName) - if err != nil { - return fmt.Errorf("Bad: Get on Log Analytics Linked Service Client: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Log Analytics Linked Service '%s' (resource group: '%s') does not exist", name, resourceGroup) - } - - return nil - } -} - -func testAccAzureRMLogAnalyticsLinkedService_basic(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsLinkedService_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_linked_service" "test" { - resource_group_name = azurerm_resource_group.test.name - workspace_name = azurerm_log_analytics_workspace.test.name - resource_id = azurerm_automation_account.test.id -} -`, template) -} - -func testAccAzureRMLogAnalyticsLinkedService_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsLinkedService_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_linked_service" "import" { - resource_group_name = azurerm_log_analytics_linked_service.test.resource_group_name - workspace_name = azurerm_log_analytics_linked_service.test.workspace_name - resource_id = azurerm_log_analytics_linked_service.test.resource_id -} -`, template) -} - -func testAccAzureRMLogAnalyticsLinkedService_complete(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsLinkedService_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_linked_service" "test" { - resource_group_name = azurerm_resource_group.test.name - workspace_name = azurerm_log_analytics_workspace.test.name - linked_service_name = "automation" - resource_id = azurerm_automation_account.test.id -} -`, template) -} - -func testAccAzureRMLogAnalyticsLinkedService_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_automation_account" "test" { - name = "acctestAutomation-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "Basic" - - tags = { - Environment = "Test" - } -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" - retention_in_days = 30 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_linked_storage_account_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_linked_storage_account_resource_test.go deleted file mode 100644 index 87d577df52e1..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_linked_storage_account_resource_test.go +++ /dev/null @@ -1,222 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMlogAnalyticsLinkedStorageAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMoperationalinsightsLinkedStorageAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMlogAnalyticsLinkedStorageAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMlogAnalyticsLinkedStorageAccount_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMoperationalinsightsLinkedStorageAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMlogAnalyticsLinkedStorageAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMlogAnalyticsLinkedStorageAccount_requiresImport), - }, - }) -} - -func TestAccAzureRMlogAnalyticsLinkedStorageAccount_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMoperationalinsightsLinkedStorageAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMlogAnalyticsLinkedStorageAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMlogAnalyticsLinkedStorageAccount_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_linked_storage_account", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMoperationalinsightsLinkedStorageAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMlogAnalyticsLinkedStorageAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMlogAnalyticsLinkedStorageAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMlogAnalyticsLinkedStorageAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMoperationalinsightsLinkedStorageAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.LinkedStorageAccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("operationalinsights LinkedStorageAccount not found: %s", resourceName) - } - id, err := parse.LogAnalyticsLinkedStorageAccountID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, operationalinsights.DataSourceType(id.Name)); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Operationalinsights LinkedStorageAccount %q does not exist", id.Name) - } - return fmt.Errorf("bad: Get on Operationalinsights.LinkedStorageAccountClient: %+v", err) - } - return nil - } -} - -func testCheckAzureRMoperationalinsightsLinkedStorageAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.LinkedStorageAccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_linked_storage_account" { - continue - } - id, err := parse.LogAnalyticsLinkedStorageAccountID(rs.Primary.ID) - if err != nil { - return err - } - if resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, operationalinsights.DataSourceType(id.Name)); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: Get on LogAnalytics.LinkedStorageAccountClient: %+v", err) - } - } - return nil - } - return nil -} - -func testAccAzureRMlogAnalyticsLinkedStorageAccount_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-la-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_storage_account" "test" { - name = "acctestsap%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) -} - -func testAccAzureRMlogAnalyticsLinkedStorageAccount_basic(data acceptance.TestData) string { - template := testAccAzureRMlogAnalyticsLinkedStorageAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_linked_storage_account" "test" { - data_source_type = "customlogs" - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - storage_account_ids = [azurerm_storage_account.test.id] -} -`, template) -} - -func testAccAzureRMlogAnalyticsLinkedStorageAccount_requiresImport(data acceptance.TestData) string { - config := testAccAzureRMlogAnalyticsLinkedStorageAccount_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_linked_storage_account" "import" { - data_source_type = azurerm_log_analytics_linked_storage_account.test.data_source_type - resource_group_name = azurerm_log_analytics_linked_storage_account.test.resource_group_name - workspace_resource_id = azurerm_log_analytics_linked_storage_account.test.workspace_resource_id - storage_account_ids = [azurerm_storage_account.test.id] -} -`, config) -} - -func testAccAzureRMlogAnalyticsLinkedStorageAccount_complete(data acceptance.TestData) string { - template := testAccAzureRMlogAnalyticsLinkedStorageAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test2" { - name = "acctestsas%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_log_analytics_linked_storage_account" "test" { - data_source_type = "customlogs" - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - storage_account_ids = [azurerm_storage_account.test.id, azurerm_storage_account.test2.id] -} -`, template, data.RandomString) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_saved_search_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_saved_search_resource_test.go deleted file mode 100644 index 7c3c35470bb8..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_saved_search_resource_test.go +++ /dev/null @@ -1,207 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" -) - -func TestAccAzureRMLogAnalyticsSavedSearch_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsSavedSearchDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsSavedSearch_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsSavedSearchExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsSavedSearch_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsSavedSearchDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsSavedSearch_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsSavedSearchExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsSavedSearch_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_saved_search", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsSavedSearchDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsSavedSearch_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsSavedSearchExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMLogAnalyticsSavedSearch_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_saved_search"), - }, - }, - }) -} - -func testCheckAzureRMLogAnalyticsSavedSearchDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.SavedSearchesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_saved_search" { - continue - } - - id, err := parse.LogAnalyticsSavedSearchID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Log Analytics Saved Search still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMLogAnalyticsSavedSearchExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.SavedSearchesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.LogAnalyticsSavedSearchID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on Log Analytics Saved Search Client: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("bad: Log Analytics Saved Search %q (Workspace: %q / Resource Group: %q) does not exist", id.Name, id.WorkspaceName, id.ResourceGroup) - } - - return nil - } -} - -func testAccAzureRMLogAnalyticsSavedSearch_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_log_analytics_saved_search" "test" { - name = "acctestLASS-%d" - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.id - - category = "Saved Search Test Category" - display_name = "Create or Update Saved Search Test" - query = "Heartbeat | summarize Count() by Computer | take a" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsSavedSearch_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsSavedSearch_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_saved_search" "import" { - name = azurerm_log_analytics_saved_search.test.name - log_analytics_workspace_id = azurerm_log_analytics_saved_search.test.log_analytics_workspace_id - - category = azurerm_log_analytics_saved_search.test.category - display_name = azurerm_log_analytics_saved_search.test.display_name - query = azurerm_log_analytics_saved_search.test.query -} -`, template) -} - -func testAccAzureRMLogAnalyticsSavedSearch_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_log_analytics_saved_search" "test" { - name = "acctestLASS-%d" - log_analytics_workspace_id = azurerm_log_analytics_workspace.test.id - - category = "Saved Search Test Category" - display_name = "Create or Update Saved Search Test" - query = "Heartbeat | summarize Count() by Computer | take a" - - function_alias = "heartbeat_func" - function_parameters = ["a:int=1"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_solution_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_solution_resource_test.go deleted file mode 100644 index 72864d6d23eb..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_solution_resource_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMLogAnalyticsSolution_basicContainerMonitoring(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_solution", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsSolutionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsSolution_containerMonitoring(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsSolutionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsSolution_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_solution", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsSolutionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsSolution_containerMonitoring(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsSolutionExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMLogAnalyticsSolution_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_solution"), - }, - }, - }) -} - -func TestAccAzureRMLogAnalyticsSolution_basicSecurity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_solution", "test") - - resource.ParallelTest(t, resource.TestCase{ - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsSolutionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsSolution_security(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsSolutionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMLogAnalyticsSolutionDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.SolutionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_solution" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Log Analytics solution still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMLogAnalyticsSolutionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.SolutionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Log Analytics Workspace: %q", name) - } - - resp, err := conn.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on Log Analytics Solutions Client: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Log Analytics Solutions %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} - -func testAccAzureRMLogAnalyticsSolution_containerMonitoring(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_log_analytics_solution" "test" { - solution_name = "ContainerInsights" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - workspace_name = azurerm_log_analytics_workspace.test.name - - plan { - publisher = "Microsoft" - product = "OMSGallery/ContainerInsights" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsSolution_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsSolution_containerMonitoring(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_solution" "import" { - solution_name = azurerm_log_analytics_solution.test.solution_name - location = azurerm_log_analytics_solution.test.location - resource_group_name = azurerm_log_analytics_solution.test.resource_group_name - workspace_resource_id = azurerm_log_analytics_solution.test.workspace_resource_id - workspace_name = azurerm_log_analytics_solution.test.workspace_name - - plan { - publisher = "Microsoft" - product = "OMSGallery/ContainerInsights" - } -} -`, template) -} - -func testAccAzureRMLogAnalyticsSolution_security(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} - -resource "azurerm_log_analytics_solution" "test" { - solution_name = "Security" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - workspace_resource_id = azurerm_log_analytics_workspace.test.id - workspace_name = azurerm_log_analytics_workspace.test.name - - plan { - publisher = "Microsoft" - product = "OMSGallery/Security" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_workspace_data_source_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_workspace_data_source_test.go deleted file mode 100644 index 7d96944ff591..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_workspace_data_source_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMLogAnalyticsWorkspace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMLogAnalyticsWorkspace_basicWithDataSource(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "sku", "pergb2018"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_in_days", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_quota_gb", "-1"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMLogAnalyticsWorkspace_volumeCapWithDataSource(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMLogAnalyticsWorkspace_volumeCapWithDataSource(data, 4.5), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "sku", "pergb2018"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_in_days", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "daily_quota_gb", "4.5"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMLogAnalyticsWorkspace_basicWithDataSource(data acceptance.TestData) string { - config := testAccAzureRMLogAnalyticsWorkspace_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_log_analytics_workspace" "test" { - name = azurerm_log_analytics_workspace.test.name - resource_group_name = azurerm_log_analytics_workspace.test.resource_group_name -} -`, config) -} - -func testAccDataSourceAzureRMLogAnalyticsWorkspace_volumeCapWithDataSource(data acceptance.TestData, volumeCapGb float64) string { - config := testAccAzureRMLogAnalyticsWorkspace_withVolumeCap(data, volumeCapGb) - return fmt.Sprintf(` -%s - -data "azurerm_log_analytics_workspace" "test" { - name = azurerm_log_analytics_workspace.test.name - resource_group_name = azurerm_log_analytics_workspace.test.resource_group_name -} -`, config) -} diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_workspace_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_workspace_resource_test.go deleted file mode 100644 index 87eae97cf316..000000000000 --- a/azurerm/internal/services/loganalytics/tests/log_analytics_workspace_resource_test.go +++ /dev/null @@ -1,354 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" -) - -func TestAccAzureRmLogAnalyticsWorkspaceName_validation(t *testing.T) { - str := acctest.RandString(63) - cases := []struct { - Value string - ErrCount int - }{ - { - Value: "abc", - ErrCount: 1, - }, - { - Value: "Ab-c", - ErrCount: 0, - }, - { - Value: "-abc", - ErrCount: 1, - }, - { - Value: "abc-", - ErrCount: 1, - }, - { - Value: str, - ErrCount: 0, - }, - { - Value: str + "a", - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := loganalytics.ValidateAzureRmLogAnalyticsWorkspaceName(tc.Value, "azurerm_log_analytics") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the AzureRM Log Analytics Workspace Name to trigger a validation error for '%s'", tc.Value) - } - } -} - -func TestAccAzureRMLogAnalyticsWorkspace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsWorkspaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsWorkspace_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsWorkspace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsWorkspaceExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMLogAnalyticsWorkspace_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_log_analytics_workspace"), - }, - }, - }) -} - -func TestAccAzureRMLogAnalyticsWorkspace_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsWorkspace_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsWorkspaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsWorkspace_freeTier(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsWorkspace_freeTier(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsWorkspaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsWorkspace_withDefaultSku(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsWorkspace_withDefaultSku(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsWorkspaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLogAnalyticsWorkspace_withVolumeCap(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_log_analytics_workspace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLogAnalyticsWorkspaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLogAnalyticsWorkspace_withVolumeCap(data, 4.5), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLogAnalyticsWorkspaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMLogAnalyticsWorkspaceDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_log_analytics_workspace" { - continue - } - - id, err := parse.LogAnalyticsWorkspaceID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Log Analytics Workspace still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMLogAnalyticsWorkspaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.WorkspacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.LogAnalyticsWorkspaceID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on Log Analytics Workspace Client: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Log Analytics Workspace '%s' (resource group: '%s') does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} -func testAccAzureRMLogAnalyticsWorkspace_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsWorkspace_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLogAnalyticsWorkspace_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_log_analytics_workspace" "import" { - name = azurerm_log_analytics_workspace.test.name - location = azurerm_log_analytics_workspace.test.location - resource_group_name = azurerm_log_analytics_workspace.test.resource_group_name - sku = "PerGB2018" -} -`, template) -} - -func testAccAzureRMLogAnalyticsWorkspace_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" - retention_in_days = 30 - - tags = { - Environment = "Test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsWorkspace_freeTier(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Free" - retention_in_days = 7 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsWorkspace_withDefaultSku(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLogAnalyticsWorkspace_withVolumeCap(data acceptance.TestData, volumeCapGb float64) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestLAW-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" - retention_in_days = 30 - daily_quota_gb = %f - - tags = { - Environment = "Test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, volumeCapGb) -} diff --git a/azurerm/internal/services/loganalytics/validate/base64_encoded.go b/azurerm/internal/services/loganalytics/validate/base64_encoded.go new file mode 100644 index 000000000000..eb14253d5f63 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/base64_encoded.go @@ -0,0 +1,27 @@ +package validate + +import ( + "encoding/base64" + "fmt" + "strings" +) + +func IsBase64Encoded(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", k)) + return + } + + if len(strings.TrimSpace(v)) < 1 { + errors = append(errors, fmt.Errorf("%q must not be an empty string", k)) + return + } + + if _, err := base64.StdEncoding.DecodeString(v); err != nil { + errors = append(errors, fmt.Errorf("expected %q to be a base64 encoded string", k)) + return + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/internal.go b/azurerm/internal/services/loganalytics/validate/internal.go new file mode 100644 index 000000000000..24dbd708b144 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/internal.go @@ -0,0 +1,27 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func logAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) + return + } + if len(v) < 4 { + errors = append(errors, fmt.Errorf("length should be greater than %d, got %q", 4, v)) + return + } + if len(v) > 63 { + errors = append(errors, fmt.Errorf("length should be less than %d, got %q", 63, v)) + return + } + if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) { + errors = append(errors, fmt.Errorf("expected value of %s does not match regular expression, got %v", k, v)) + return + } + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_id.go new file mode 100644 index 000000000000..747d8ede0a12 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_id_test.go new file mode 100644 index 000000000000..54d57267f53e --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsClusterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for ClusterName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/clusters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/clusters/cluster1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/CLUSTERS/CLUSTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsClusterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_name.go new file mode 100644 index 000000000000..b8f32236ab74 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_name.go @@ -0,0 +1,5 @@ +package validate + +func LogAnalyticsClusterName(i interface{}, k string) (warnings []string, errors []error) { + return logAnalyticsGenericName(i, k) +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_name_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_name_test.go new file mode 100644 index 000000000000..b0d7c490ec99 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_name_test.go @@ -0,0 +1,73 @@ +package validate + +import ( + "testing" +) + +func TestLogAnalyticsClusterName(t *testing.T) { + testCases := []struct { + Name string + Input string + Expected bool + }{ + { + Name: "Too short", + Input: "inv", + Expected: false, + }, + { + Name: "Invalid characters underscores", + Input: "invalid_Clusters_Name", + Expected: false, + }, + { + Name: "Invalid characters space", + Input: "invalid Clusters Name", + Expected: false, + }, + { + Name: "Invalid name starts with hyphen", + Input: "-invalidClustersName", + Expected: false, + }, + { + Name: "Invalid name ends with hyphen", + Input: "invalidClustersName-", + Expected: false, + }, + { + Name: "Invalid name too long", + Input: "thisIsToLoooooooooooooooooooooooooooooooooooooongForAClusterName", + Expected: false, + }, + { + Name: "Valid name", + Input: "validClustersName", + Expected: true, + }, + { + Name: "Valid name with hyphen", + Input: "validClustersName-2", + Expected: true, + }, + { + Name: "Valid name max length", + Input: "thisIsTheLooooooooooooooooooooooooongestValidClusterNameThereIs", + Expected: true, + }, + { + Name: "Valid name min length", + Input: "vali", + Expected: true, + }, + } + for _, v := range testCases { + t.Logf("[DEBUG] Testing %q..", v.Name) + + _, errors := LogAnalyticsClusterName(v.Input, "name") + result := len(errors) == 0 + if result != v.Expected { + t.Fatalf("Expected the result to be %t but got %t (and %d errors)", v.Expected, result, len(errors)) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go deleted file mode 100644 index 5335c90da758..000000000000 --- a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go +++ /dev/null @@ -1,48 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" -) - -func LogAnalyticsDataExportWorkspaceName(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) - return - } - if len(v) < 4 { - errors = append(errors, fmt.Errorf("length should be greater than %d", 4)) - return - } - if len(v) > 63 { - errors = append(errors, fmt.Errorf("length should be less than %d", 63)) - return - } - if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) { - errors = append(errors, fmt.Errorf("expected value of %s not match regular expression, got %v", k, v)) - return - } - return -} - -func LogAnalyticsDataExportName(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) - return - } - if len(v) < 4 { - errors = append(errors, fmt.Errorf("length should be greater than %d", 4)) - return - } - if len(v) > 63 { - errors = append(errors, fmt.Errorf("length should be less than %d", 63)) - return - } - if !regexp.MustCompile(`^[A-Za-z][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) { - errors = append(errors, fmt.Errorf("expected value of %s not match regular expression, got %v", k, v)) - return - } - return -} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_id.go new file mode 100644 index 000000000000..283df56925a0 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsDataExportID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsDataExportID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_id_test.go new file mode 100644 index 000000000000..aac995f9642a --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsDataExportID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // missing DataexportName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Valid: false, + }, + + { + // missing value for DataexportName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataexports/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataexports/dataExport1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/DATAEXPORTS/DATAEXPORT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsDataExportID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_name.go new file mode 100644 index 000000000000..b0f384c552a7 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_name.go @@ -0,0 +1,5 @@ +package validate + +func LogAnalyticsDataExportName(i interface{}, k string) (warnings []string, errors []error) { + return logAnalyticsGenericName(i, k) +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_name_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_name_test.go new file mode 100644 index 000000000000..a91d9f35c83a --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_name_test.go @@ -0,0 +1,73 @@ +package validate + +import ( + "testing" +) + +func TestLogAnalyticsDataExportName(t *testing.T) { + testCases := []struct { + Name string + Input string + Expected bool + }{ + { + Name: "Too short", + Input: "inv", + Expected: false, + }, + { + Name: "Invalid characters underscores", + Input: "invalid_Exports_Name", + Expected: false, + }, + { + Name: "Invalid characters space", + Input: "invalid Exports Name", + Expected: false, + }, + { + Name: "Invalid name starts with hyphen", + Input: "-invalidExportsName", + Expected: false, + }, + { + Name: "Invalid name ends with hyphen", + Input: "invalidExportsName-", + Expected: false, + }, + { + Name: "Invalid name too long", + Input: "thisIsToLooooooooooooooooooooooooooooooooooooooongForAExportName", + Expected: false, + }, + { + Name: "Valid name", + Input: "validExportsName", + Expected: true, + }, + { + Name: "Valid name with hyphen", + Input: "validExportsName-2", + Expected: true, + }, + { + Name: "Valid name max length", + Input: "thisIsTheLoooooooooooooooooooooooooongestValidExportNameThereIs", + Expected: true, + }, + { + Name: "Valid name min length", + Input: "vali", + Expected: true, + }, + } + for _, v := range testCases { + t.Logf("[DEBUG] Testing %q..", v.Name) + + _, errors := LogAnalyticsDataExportName(v.Input, "name") + result := len(errors) == 0 + if result != v.Expected { + t.Fatalf("Expected the result to be %v but got %v (and %d errors)", v.Expected, result, len(errors)) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go deleted file mode 100644 index 487a6e4672af..000000000000 --- a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go +++ /dev/null @@ -1,56 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestLogAnalyticsDataExportName(t *testing.T) { - testCases := []struct { - Input string - Expected bool - }{ - { - Input: "inv", - Expected: false, - }, - { - Input: "invalid_Exports_Name", - Expected: false, - }, - { - Input: "invalid Exports Name", - Expected: false, - }, - { - Input: "-invalidExportsName", - Expected: false, - }, - { - Input: "invalidExportsName-", - Expected: false, - }, - { - Input: "validExportsName", - Expected: true, - }, - { - Input: "validExportsName-2", - Expected: true, - }, - { - Input: "thisIsTheLooooooooooooooooooooooooongestValidExportsNameThereIs", - Expected: true, - }, - { - Input: "vali", - Expected: true, - }, - } - for _, v := range testCases { - _, errors := LogAnalyticsDataExportName(v.Input, "name") - result := len(errors) == 0 - if result != v.Expected { - t.Fatalf("Expected the result to be %t but got %t (and %d errors)", v.Expected, result, len(errors)) - } - } -} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_source_windows_event_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_source_windows_event_id.go new file mode 100644 index 000000000000..81f8e2e9a0c3 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_source_windows_event_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsDataSourceWindowsEventID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsDataSourceWindowsEventID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_source_windows_event_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_source_windows_event_id_test.go new file mode 100644 index 000000000000..55a0f06c9eae --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_source_windows_event_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsDataSourceWindowsEventID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // missing DataSourceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Valid: false, + }, + + { + // missing value for DataSourceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataSources/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataSources/dataSource1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/DATASOURCES/DATASOURCE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsDataSourceWindowsEventID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_service_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_service_id.go new file mode 100644 index 000000000000..a4e5f65315a0 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_service_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsLinkedServiceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsLinkedServiceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_service_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_service_id_test.go new file mode 100644 index 000000000000..0895b011acb8 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_service_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsLinkedServiceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // missing LinkedServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Valid: false, + }, + + { + // missing value for LinkedServiceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedServices/linkedService1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/LINKEDSERVICES/LINKEDSERVICE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsLinkedServiceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go deleted file mode 100644 index 35b6f7a017f7..000000000000 --- a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go +++ /dev/null @@ -1,35 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" -) - -func LogAnalyticsLinkedStorageAccountWorkspaceName(i interface{}, k string) (warnings []string, errors []error) { - return logAnalyticsGenericName(i, k) -} - -func LogAnalyticsLinkedStorageAccountName(i interface{}, k string) (warnings []string, errors []error) { - return logAnalyticsGenericName(i, k) -} - -func logAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) - return - } - if len(v) < 4 { - errors = append(errors, fmt.Errorf("length should be greater than %d", 4)) - return - } - if len(v) > 63 { - errors = append(errors, fmt.Errorf("length should be less than %d", 63)) - return - } - if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) { - errors = append(errors, fmt.Errorf("expected value of %s does not match regular expression, got %v", k, v)) - return - } - return -} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_id.go new file mode 100644 index 000000000000..6164151c25a0 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsLinkedStorageAccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsLinkedStorageAccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_id_test.go new file mode 100644 index 000000000000..634da80088e6 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsLinkedStorageAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // missing LinkedStorageAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Valid: false, + }, + + { + // missing value for LinkedStorageAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/query", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/LINKEDSTORAGEACCOUNTS/QUERY", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsLinkedStorageAccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_workspace_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_workspace_name.go new file mode 100644 index 000000000000..c0edf32ba729 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_workspace_name.go @@ -0,0 +1,5 @@ +package validate + +func LogAnalyticsLinkedStorageAccountWorkspaceName(i interface{}, k string) (warnings []string, errors []error) { + return logAnalyticsGenericName(i, k) +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_workspace_name_test.go similarity index 100% rename from azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_test.go rename to azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account_workspace_name_test.go diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_saved_search_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_saved_search_id.go new file mode 100644 index 000000000000..cdd150c34734 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_saved_search_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsSavedSearchID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsSavedSearchID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_saved_search_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_saved_search_id_test.go new file mode 100644 index 000000000000..85c0144f3c93 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_saved_search_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsSavedSearchID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // missing SavedSearcheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Valid: false, + }, + + { + // missing value for SavedSearcheName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/savedSearches/search1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/SAVEDSEARCHES/SEARCH1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsSavedSearchID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_solution_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_solution_id.go new file mode 100644 index 000000000000..19ced3da2f66 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_solution_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsSolutionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsSolutionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_solution_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_solution_id_test.go new file mode 100644 index 000000000000..07ab5d185b88 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_solution_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsSolutionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing SolutionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/", + Valid: false, + }, + + { + // missing value for SolutionName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/solutions/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationsManagement/solutions/solution1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONSMANAGEMENT/SOLUTIONS/SOLUTION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsSolutionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_id.go new file mode 100644 index 000000000000..1f26cc00f1a3 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsStorageInsightsID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsStorageInsightsID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_id_test.go new file mode 100644 index 000000000000..6b879f1869ad --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsStorageInsightsID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // missing StorageInsightConfigName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/", + Valid: false, + }, + + { + // missing value for StorageInsightConfigName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1/STORAGEINSIGHTCONFIGS/STORAGEINSIGHT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsStorageInsightsID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_name.go new file mode 100644 index 000000000000..a4bc74fc1737 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_name.go @@ -0,0 +1,5 @@ +package validate + +func LogAnalyticsStorageInsightsName(i interface{}, k string) (warnings []string, errors []error) { + return logAnalyticsGenericName(i, k) +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_workspace_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_workspace_name.go new file mode 100644 index 000000000000..a522e27bf46b --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_workspace_name.go @@ -0,0 +1,5 @@ +package validate + +func LogAnalyticsStorageInsightsWorkspaceName(i interface{}, k string) (warnings []string, errors []error) { + return logAnalyticsGenericName(i, k) +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_workspace_name_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_workspace_name_test.go new file mode 100644 index 000000000000..aaffe53bcc8b --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_workspace_name_test.go @@ -0,0 +1,73 @@ +package validate + +import ( + "testing" +) + +func TestLogAnalyticsStorageInsightsWorkspaceName(t *testing.T) { + testCases := []struct { + Name string + Input string + Expected bool + }{ + { + Name: "Too short", + Input: "inv", + Expected: false, + }, + { + Name: "Invalid characters underscores", + Input: "invalid_Exports_Name", + Expected: false, + }, + { + Name: "Invalid characters space", + Input: "invalid Storage Insight Config Name Name", + Expected: false, + }, + { + Name: "Invalid name starts with hyphen", + Input: "-invalidStorageInsightConfigName", + Expected: false, + }, + { + Name: "Invalid name ends with hyphen", + Input: "invalidStorageInsightConfigName-", + Expected: false, + }, + { + Name: "Invalid name too long", + Input: "thisIsToLoooooooooooooooooooooongestForAStorageInsightConfigName", + Expected: false, + }, + { + Name: "Valid name", + Input: "validStorageInsightConfigName", + Expected: true, + }, + { + Name: "Valid name with hyphen", + Input: "validStorageInsightConfigName-2", + Expected: true, + }, + { + Name: "Valid name max length", + Input: "thisIsTheLoooooooooooongestValidStorageInsightConfigNameThereIs", + Expected: true, + }, + { + Name: "Valid name min length", + Input: "vali", + Expected: true, + }, + } + for _, v := range testCases { + t.Logf("[DEBUG] Testing %q..", v.Name) + + _, errors := LogAnalyticsStorageInsightsWorkspaceName(v.Input, "workspace_name") + result := len(errors) == 0 + if result != v.Expected { + t.Fatalf("Expected the result to be %v but got %v (and %d errors)", v.Expected, result, len(errors)) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_workspace.go b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace.go deleted file mode 100644 index f40d4b02f424..000000000000 --- a/azurerm/internal/services/loganalytics/validate/log_analytics_workspace.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" -) - -func LogAnalyticsWorkspaceID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.LogAnalyticsWorkspaceID(v); err != nil { - errors = append(errors, fmt.Errorf("parsing %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_id.go b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_id.go new file mode 100644 index 000000000000..217489c3f176 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" +) + +func LogAnalyticsWorkspaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LogAnalyticsWorkspaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_id_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_id_test.go new file mode 100644 index 000000000000..2fd0b3b2011c --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLogAnalyticsWorkspaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/", + Valid: false, + }, + + { + // missing value for WorkspaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.OPERATIONALINSIGHTS/WORKSPACES/WORKSPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LogAnalyticsWorkspaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_name.go new file mode 100644 index 000000000000..c4787b23b05e --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_name.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func LogAnalyticsWorkspaceName(v interface{}, _ string) (warnings []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile("^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$").MatchString(value) { + errors = append(errors, fmt.Errorf("Workspace Name can only contain alphabet, number, and '-' character. You can not use '-' as the start and end of the name")) + } + + length := len(value) + if length > 63 || 4 > length { + errors = append(errors, fmt.Errorf("Workspace Name can only be between 4 and 63 letters")) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_name_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_name_test.go new file mode 100644 index 000000000000..cafd976e56a1 --- /dev/null +++ b/azurerm/internal/services/loganalytics/validate/log_analytics_workspace_name_test.go @@ -0,0 +1,48 @@ +package validate + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" +) + +func TestAccLogAnalyticsWorkspaceName_validation(t *testing.T) { + str := acctest.RandString(63) + cases := []struct { + Value string + ErrCount int + }{ + { + Value: "abc", + ErrCount: 1, + }, + { + Value: "Ab-c", + ErrCount: 0, + }, + { + Value: "-abc", + ErrCount: 1, + }, + { + Value: "abc-", + ErrCount: 1, + }, + { + Value: str, + ErrCount: 0, + }, + { + Value: str + "a", + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := LogAnalyticsWorkspaceName(tc.Value, "azurerm_log_analytics") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Log Analytics Workspace Name to trigger a validation error for '%s'", tc.Value) + } + } +} diff --git a/azurerm/internal/services/logic/integration_service_environment.go b/azurerm/internal/services/logic/integration_service_environment.go index 087443f1a0a5..dc9062268c32 100644 --- a/azurerm/internal/services/logic/integration_service_environment.go +++ b/azurerm/internal/services/logic/integration_service_environment.go @@ -21,17 +21,18 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/logic/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/logic/validate" networkParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmIntegrationServiceEnvironment() *schema.Resource { +func resourceIntegrationServiceEnvironment() *schema.Resource { return &schema.Resource{ - Create: resourceArmIntegrationServiceEnvironmentCreateUpdate, - Read: resourceArmIntegrationServiceEnvironmentRead, - Update: resourceArmIntegrationServiceEnvironmentCreateUpdate, - Delete: resourceArmIntegrationServiceEnvironmentDelete, + Create: resourceIntegrationServiceEnvironmentCreateUpdate, + Read: resourceIntegrationServiceEnvironmentRead, + Update: resourceIntegrationServiceEnvironmentCreateUpdate, + Delete: resourceIntegrationServiceEnvironmentDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -93,7 +94,7 @@ func resourceArmIntegrationServiceEnvironment() *schema.Resource { ForceNew: true, // The network configuration subnets cannot be updated after integration service environment is created. Elem: &schema.Schema{ Type: schema.TypeString, - ValidateFunc: validate.ValidateSubnetID, + ValidateFunc: networkValidate.SubnetID, }, MinItems: 4, MaxItems: 4, @@ -137,7 +138,7 @@ func resourceArmIntegrationServiceEnvironment() *schema.Resource { } } -func resourceArmIntegrationServiceEnvironmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceIntegrationServiceEnvironmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Logic.IntegrationServiceEnvironmentClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -205,10 +206,10 @@ func resourceArmIntegrationServiceEnvironmentCreateUpdate(d *schema.ResourceData d.SetId(*resp.ID) - return resourceArmIntegrationServiceEnvironmentRead(d, meta) + return resourceIntegrationServiceEnvironmentRead(d, meta) } -func resourceArmIntegrationServiceEnvironmentRead(d *schema.ResourceData, meta interface{}) error { +func resourceIntegrationServiceEnvironmentRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Logic.IntegrationServiceEnvironmentClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -267,7 +268,7 @@ func resourceArmIntegrationServiceEnvironmentRead(d *schema.ResourceData, meta i return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmIntegrationServiceEnvironmentDelete(d *schema.ResourceData, meta interface{}) error { +func resourceIntegrationServiceEnvironmentDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Logic.IntegrationServiceEnvironmentClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -291,7 +292,7 @@ func resourceArmIntegrationServiceEnvironmentDelete(d *schema.ResourceData, meta // Get subnet IDs before delete subnetIDs := getSubnetIDs(&resp) - // Not optimal behavior for now + // Not optimal behaviour for now // It deletes synchronously and resource is not available anymore after return from delete operation // Next, after return - delete operation is still in progress in the background and is still occupying subnets. // As workaround we are checking on all involved subnets presence of serviceAssociationLink and resourceNavigationLink @@ -457,7 +458,6 @@ func serviceAssociationLinkExists(ctx context.Context, client *network.ServiceAs } resp, err := client.List(ctx, id.ResourceGroup, id.VirtualNetworkName, id.Name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return false, nil @@ -486,7 +486,6 @@ func resourceNavigationLinkExists(ctx context.Context, client *network.ResourceN } resp, err := client.List(ctx, id.ResourceGroup, id.VirtualNetworkName, id.Name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return false, nil diff --git a/azurerm/internal/services/logic/integration_service_environment_resource_test.go b/azurerm/internal/services/logic/integration_service_environment_resource_test.go new file mode 100644 index 000000000000..ac33af580592 --- /dev/null +++ b/azurerm/internal/services/logic/integration_service_environment_resource_test.go @@ -0,0 +1,358 @@ +package logic_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/logic/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type IntegrationServiceEnvironmentResource struct { +} + +func TestAccIntegrationServiceEnvironment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_integration_service_environment", "test") + r := IntegrationServiceEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_0"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIntegrationServiceEnvironment_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_integration_service_environment", "test") + r := IntegrationServiceEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_0"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("development"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIntegrationServiceEnvironment_developer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_integration_service_environment", "test") + r := IntegrationServiceEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.developer(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Developer_0"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("development"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIntegrationServiceEnvironment_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_integration_service_environment", "test") + r := IntegrationServiceEnvironmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_0"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.skuName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_1"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_0"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccIntegrationServiceEnvironment_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_integration_service_environment", "test") + r := IntegrationServiceEnvironmentResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-logic-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("sku_name").HasValue("Premium_0"), + check.That(data.ResourceName).Key("access_endpoint_type").HasValue("Internal"), + check.That(data.ResourceName).Key("virtual_network_subnet_ids.#").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("development"), + check.That(data.ResourceName).Key("connector_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("connector_outbound_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_endpoint_ip_addresses.#").Exists(), + check.That(data.ResourceName).Key("workflow_outbound_ip_addresses.#").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (IntegrationServiceEnvironmentResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.IntegrationServiceEnvironmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Logic.IntegrationServiceEnvironmentClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Integration Service Environment %s (resource group: %s): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (IntegrationServiceEnvironmentResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-logic-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-vnet-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/22"] +} + +resource "azurerm_subnet" "isesubnet1" { + name = "isesubnet1" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.0/26"] + + delegation { + name = "integrationServiceEnvironments" + service_delegation { + name = "Microsoft.Logic/integrationServiceEnvironments" + actions = ["Microsoft.Network/virtualNetworks/subnets/action"] + } + } +} + +resource "azurerm_subnet" "isesubnet2" { + name = "isesubnet2" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.64/26"] +} + +resource "azurerm_subnet" "isesubnet3" { + name = "isesubnet3" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.128/26"] +} + +resource "azurerm_subnet" "isesubnet4" { + name = "isesubnet4" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.0.1.192/26"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r IntegrationServiceEnvironmentResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_integration_service_environment" "test" { + name = "acctestRG-logic-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "Premium_0" + access_endpoint_type = "Internal" + virtual_network_subnet_ids = [ + azurerm_subnet.isesubnet1.id, + azurerm_subnet.isesubnet2.id, + azurerm_subnet.isesubnet3.id, + azurerm_subnet.isesubnet4.id + ] +} +`, r.template(data), data.RandomInteger) +} + +func (r IntegrationServiceEnvironmentResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_integration_service_environment" "test" { + name = "acctestRG-logic-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "Premium_0" + access_endpoint_type = "Internal" + virtual_network_subnet_ids = [ + azurerm_subnet.isesubnet1.id, + azurerm_subnet.isesubnet2.id, + azurerm_subnet.isesubnet3.id, + azurerm_subnet.isesubnet4.id + ] + tags = { + environment = "development" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r IntegrationServiceEnvironmentResource) developer(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_integration_service_environment" "test" { + name = "acctestRG-logic-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "Developer_0" + access_endpoint_type = "Internal" + virtual_network_subnet_ids = [ + azurerm_subnet.isesubnet1.id, + azurerm_subnet.isesubnet2.id, + azurerm_subnet.isesubnet3.id, + azurerm_subnet.isesubnet4.id + ] + tags = { + environment = "development" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r IntegrationServiceEnvironmentResource) skuName(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_integration_service_environment" "test" { + name = "acctestRG-logic-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "Premium_1" + access_endpoint_type = "Internal" + virtual_network_subnet_ids = [ + azurerm_subnet.isesubnet1.id, + azurerm_subnet.isesubnet2.id, + azurerm_subnet.isesubnet3.id, + azurerm_subnet.isesubnet4.id + ] +} + `, r.template(data), data.RandomInteger) +} + +func (r IntegrationServiceEnvironmentResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_integration_service_environment" "import" { + name = azurerm_integration_service_environment.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = azurerm_integration_service_environment.test.sku_name + access_endpoint_type = azurerm_integration_service_environment.test.access_endpoint_type + virtual_network_subnet_ids = azurerm_integration_service_environment.test.virtual_network_subnet_ids + tags = azurerm_integration_service_environment.test.tags +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/logic/logic_app_action_custom_resource.go b/azurerm/internal/services/logic/logic_app_action_custom_resource.go index fb8058077b64..0c5506359e8f 100644 --- a/azurerm/internal/services/logic/logic_app_action_custom_resource.go +++ b/azurerm/internal/services/logic/logic_app_action_custom_resource.go @@ -12,12 +12,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) -func resourceArmLogicAppActionCustom() *schema.Resource { +func resourceLogicAppActionCustom() *schema.Resource { return &schema.Resource{ - Create: resourceArmLogicAppActionCustomCreateUpdate, - Read: resourceArmLogicAppActionCustomRead, - Update: resourceArmLogicAppActionCustomCreateUpdate, - Delete: resourceArmLogicAppActionCustomDelete, + Create: resourceLogicAppActionCustomCreateUpdate, + Read: resourceLogicAppActionCustomRead, + Update: resourceLogicAppActionCustomCreateUpdate, + Delete: resourceLogicAppActionCustomDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -53,7 +53,7 @@ func resourceArmLogicAppActionCustom() *schema.Resource { } } -func resourceArmLogicAppActionCustomCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceLogicAppActionCustomCreateUpdate(d *schema.ResourceData, meta interface{}) error { logicAppId := d.Get("logic_app_id").(string) name := d.Get("name").(string) bodyRaw := d.Get("body").(string) @@ -67,10 +67,10 @@ func resourceArmLogicAppActionCustomCreateUpdate(d *schema.ResourceData, meta in return err } - return resourceArmLogicAppActionCustomRead(d, meta) + return resourceLogicAppActionCustomRead(d, meta) } -func resourceArmLogicAppActionCustomRead(d *schema.ResourceData, meta interface{}) error { +func resourceLogicAppActionCustomRead(d *schema.ResourceData, meta interface{}) error { id, err := azure.ParseAzureResourceID(d.Id()) if err != nil { return err @@ -108,7 +108,7 @@ func resourceArmLogicAppActionCustomRead(d *schema.ResourceData, meta interface{ return nil } -func resourceArmLogicAppActionCustomDelete(d *schema.ResourceData, meta interface{}) error { +func resourceLogicAppActionCustomDelete(d *schema.ResourceData, meta interface{}) error { id, err := azure.ParseAzureResourceID(d.Id()) if err != nil { return err diff --git a/azurerm/internal/services/logic/logic_app_action_custom_resource_test.go b/azurerm/internal/services/logic/logic_app_action_custom_resource_test.go new file mode 100644 index 000000000000..49a407d64465 --- /dev/null +++ b/azurerm/internal/services/logic/logic_app_action_custom_resource_test.go @@ -0,0 +1,113 @@ +package logic_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type LogicAppActionCustomResource struct { +} + +func TestAccLogicAppActionCustom_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_logic_app_action_custom", "test") + r := LogicAppActionCustomResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccLogicAppActionCustom_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_logic_app_action_custom", "test") + r := LogicAppActionCustomResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_logic_app_action_custom"), + }, + }) +} + +func (LogicAppActionCustomResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + return actionExists(ctx, clients, state) +} + +func (r LogicAppActionCustomResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_logic_app_action_custom" "test" { + name = "action%d" + logic_app_id = azurerm_logic_app_workflow.test.id + + body = < 0 { - return fmt.Errorf("maintenance assignment (Dedicated Host ID: %q) still exists", id.DedicatedHostIdRaw) - } - - return nil - } - - return nil -} - -func testCheckAzureRMMaintenanceAssignmentDedicatedHostExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Maintenance.ConfigurationAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %s", resourceName) - } - - id, err := parse.MaintenanceAssignmentDedicatedHostID(rs.Primary.ID) - if err != nil { - return err - } - - listResp, err := conn.ListParent(ctx, id.DedicatedHostId.ResourceGroup, "Microsoft.Compute", "hostGroups", id.DedicatedHostId.HostGroup, "hosts", id.DedicatedHostId.Name) - if err != nil { - return fmt.Errorf("bad: list on ConfigurationAssignmentsClient: %+v", err) - } - if listResp.Value == nil || len(*listResp.Value) == 0 { - return fmt.Errorf("could not find Maintenance Assignment (target resource id: %q)", id.DedicatedHostIdRaw) - } - - return nil - } -} - -func testAccAzureRMMaintenanceAssignmentDedicatedHost_basic(data acceptance.TestData) string { - template := testAccAzureRMMaintenanceAssignmentDedicatedHost_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_maintenance_assignment_dedicated_host" "test" { - location = azurerm_resource_group.test.location - maintenance_configuration_id = azurerm_maintenance_configuration.test.id - dedicated_host_id = azurerm_dedicated_host.test.id -} -`, template) -} - -func testAccAzureRMMaintenanceAssignmentDedicatedHost_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMaintenanceAssignmentDedicatedHost_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_maintenance_assignment_dedicated_host" "import" { - location = azurerm_maintenance_assignment_dedicated_host.test.location - maintenance_configuration_id = azurerm_maintenance_assignment_dedicated_host.test.maintenance_configuration_id - dedicated_host_id = azurerm_maintenance_assignment_dedicated_host.test.dedicated_host_id -} -`, template) -} - -func testAccAzureRMMaintenanceAssignmentDedicatedHost_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-maint-%[1]d" - location = "%[2]s" -} - -resource "azurerm_maintenance_configuration" "test" { - name = "acctest-MC%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - scope = "All" -} - -resource "azurerm_dedicated_host_group" "test" { - name = "acctest-DHG-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - platform_fault_domain_count = 2 -} - -resource "azurerm_dedicated_host" "test" { - name = "acctest-DH-%[1]d" - location = azurerm_resource_group.test.location - dedicated_host_group_id = azurerm_dedicated_host_group.test.id - sku_name = "DSv3-Type1" - platform_fault_domain = 1 -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/maintenance/tests/maintenance_assignment_virtual_machine_resource_test.go b/azurerm/internal/services/maintenance/tests/maintenance_assignment_virtual_machine_resource_test.go deleted file mode 100644 index f059f6db73f4..000000000000 --- a/azurerm/internal/services/maintenance/tests/maintenance_assignment_virtual_machine_resource_test.go +++ /dev/null @@ -1,210 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maintenance/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMaintenanceAssignmentVirtualMachine_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maintenance_assignment_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceAssignmentVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMaintenanceAssignmentVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceAssignmentVirtualMachineExists(data.ResourceName), - ), - }, - // location not returned by list rest api - data.ImportStep("location"), - }, - }) -} - -func TestAccAzureRMMaintenanceAssignmentVirtualMachine_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maintenance_assignment_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceAssignmentVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMaintenanceAssignmentVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceAssignmentVirtualMachineExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMaintenanceAssignmentVirtualMachine_requiresImport), - }, - }) -} - -func testCheckAzureRMMaintenanceAssignmentVirtualMachineDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Maintenance.ConfigurationAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_maintenance_assignment_virtual_machine" { - continue - } - - id, err := parse.MaintenanceAssignmentVirtualMachineID(rs.Primary.ID) - if err != nil { - return err - } - - listResp, err := conn.List(ctx, id.VirtualMachineId.ResourceGroup, "Microsoft.Compute", "virtualMachines", id.VirtualMachineId.Name) - if err != nil { - if !utils.ResponseWasNotFound(listResp.Response) { - return err - } - return nil - } - if listResp.Value != nil && len(*listResp.Value) > 0 { - return fmt.Errorf("maintenance assignment (Virtual Machine id: %q) still exists", id.VirtualMachineIdRaw) - } - - return nil - } - - return nil -} - -func testCheckAzureRMMaintenanceAssignmentVirtualMachineExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Maintenance.ConfigurationAssignmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %s", resourceName) - } - - id, err := parse.MaintenanceAssignmentVirtualMachineID(rs.Primary.ID) - if err != nil { - return err - } - - listResp, err := conn.List(ctx, id.VirtualMachineId.ResourceGroup, "Microsoft.Compute", "virtualMachines", id.VirtualMachineId.Name) - if err != nil { - return fmt.Errorf("bad: list on ConfigurationAssignmentsClient: %+v", err) - } - if listResp.Value == nil || len(*listResp.Value) == 0 { - return fmt.Errorf("could not find Maintenance Assignment (Virtual Machine id: %q)", id.VirtualMachineIdRaw) - } - - return nil - } -} - -func testAccAzureRMMaintenanceAssignmentVirtualMachine_basic(data acceptance.TestData) string { - template := testAccAzureRMMaintenanceAssignmentVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_maintenance_assignment_virtual_machine" "test" { - location = azurerm_resource_group.test.location - maintenance_configuration_id = azurerm_maintenance_configuration.test.id - virtual_machine_id = azurerm_linux_virtual_machine.test.id -} -`, template) -} - -func testAccAzureRMMaintenanceAssignmentVirtualMachine_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMaintenanceAssignmentVirtualMachine_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_maintenance_assignment_virtual_machine" "import" { - location = azurerm_maintenance_assignment_virtual_machine.test.location - maintenance_configuration_id = azurerm_maintenance_assignment_virtual_machine.test.maintenance_configuration_id - virtual_machine_id = azurerm_maintenance_assignment_virtual_machine.test.virtual_machine_id -} -`, template) -} - -func testAccAzureRMMaintenanceAssignmentVirtualMachine_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-maint-%[1]d" - location = "%[2]s" -} - -resource "azurerm_maintenance_configuration" "test" { - name = "acctest-MC%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - scope = "All" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestnw-%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "internal" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_network_interface" "test" { - name = "acctni-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "testconfiguration1" - subnet_id = azurerm_subnet.test.id - private_ip_address_allocation = "Dynamic" - } -} - -resource "azurerm_linux_virtual_machine" "test" { - name = "acctestVM-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - size = "Standard_D15_v2" - admin_username = "adminuser" - admin_password = "P@$$w0rd1234!" - - disable_password_authentication = false - - network_interface_ids = [ - azurerm_network_interface.test.id, - ] - - os_disk { - caching = "ReadWrite" - storage_account_type = "Standard_LRS" - } - - source_image_reference { - publisher = "Canonical" - offer = "UbuntuServer" - sku = "16.04-LTS" - version = "latest" - } -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/maintenance/tests/maintenance_configuration_data_source_test.go b/azurerm/internal/services/maintenance/tests/maintenance_configuration_data_source_test.go deleted file mode 100644 index 268f228a7d2b..000000000000 --- a/azurerm/internal/services/maintenance/tests/maintenance_configuration_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMaintenanceConfiguration_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_maintenance_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceMaintenanceConfiguration_complete(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "scope", "Host"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "TesT"), - ), - }, - }, - }) -} - -func testAccDataSourceMaintenanceConfiguration_complete(data acceptance.TestData) string { - template := testAccAzureRMMaintenanceConfiguration_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_maintenance_configuration" "test" { - name = azurerm_maintenance_configuration.test.name - resource_group_name = azurerm_maintenance_configuration.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/maintenance/tests/maintenance_configuration_resource_test.go b/azurerm/internal/services/maintenance/tests/maintenance_configuration_resource_test.go deleted file mode 100644 index 130d7489c7c5..000000000000 --- a/azurerm/internal/services/maintenance/tests/maintenance_configuration_resource_test.go +++ /dev/null @@ -1,229 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maintenance/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMaintenanceConfiguration_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maintenance_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMaintenanceConfiguration_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "All"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMaintenanceConfiguration_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maintenance_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMaintenanceConfiguration_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceConfigurationExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMaintenanceConfiguration_requiresImport), - }, - }) -} - -func TestAccAzureRMMaintenanceConfiguration_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maintenance_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMaintenanceConfiguration_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "Host"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "TesT"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMaintenanceConfiguration_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maintenance_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMaintenanceConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMaintenanceConfiguration_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "All"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMaintenanceConfiguration_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "Host"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "TesT"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMaintenanceConfiguration_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMaintenanceConfigurationExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "scope", "All"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMMaintenanceConfigurationDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Maintenance.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_maintenance_configuration" { - continue - } - - id, err := parse.MaintenanceConfigurationID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return err - } - } - - return nil - } - - return nil -} - -func testCheckAzureRMMaintenanceConfigurationExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Maintenance.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.MaintenanceConfigurationID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on maintenanceConfigurationsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Maintenance Configuration %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testAccAzureRMMaintenanceConfiguration_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-maint-%d" - location = "%s" -} - -resource "azurerm_maintenance_configuration" "test" { - name = "acctest-MC%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - scope = "All" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMMaintenanceConfiguration_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMaintenanceConfiguration_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_maintenance_configuration" "import" { - name = azurerm_maintenance_configuration.test.name - resource_group_name = azurerm_maintenance_configuration.test.resource_group_name - location = azurerm_maintenance_configuration.test.location - scope = azurerm_maintenance_configuration.test.scope -} -`, template) -} - -func testAccAzureRMMaintenanceConfiguration_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-maint-%d" - location = "%s" -} - -resource "azurerm_maintenance_configuration" "test" { - name = "acctest-MC%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - scope = "Host" - - tags = { - env = "TesT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/maintenance/validate/maintenance_configuration.go b/azurerm/internal/services/maintenance/validate/maintenance_configuration_id.go similarity index 100% rename from azurerm/internal/services/maintenance/validate/maintenance_configuration.go rename to azurerm/internal/services/maintenance/validate/maintenance_configuration_id.go diff --git a/azurerm/internal/services/maintenance/validate/maintenance.go b/azurerm/internal/services/maintenance/validate/tags.go similarity index 100% rename from azurerm/internal/services/maintenance/validate/maintenance.go rename to azurerm/internal/services/maintenance/validate/tags.go diff --git a/azurerm/internal/services/maintenance/validate/maintenance_test.go b/azurerm/internal/services/maintenance/validate/tags_test.go similarity index 100% rename from azurerm/internal/services/maintenance/validate/maintenance_test.go rename to azurerm/internal/services/maintenance/validate/tags_test.go diff --git a/azurerm/internal/services/managedapplications/managed_application_definition_data_source.go b/azurerm/internal/services/managedapplications/managed_application_definition_data_source.go new file mode 100644 index 000000000000..5ea1c5cd9efd --- /dev/null +++ b/azurerm/internal/services/managedapplications/managed_application_definition_data_source.go @@ -0,0 +1,64 @@ +package managedapplications + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managedapplications/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceManagedApplicationDefinition() *schema.Resource { + return &schema.Resource{ + Read: dataSourceManagedApplicationDefinitionRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.ApplicationDefinitionName, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "location": azure.SchemaLocationForDataSource(), + }, + } +} + +func dataSourceManagedApplicationDefinitionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).ManagedApplication.ApplicationDefinitionClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Managed Application Definition (Managed Application Definition Name %q / Resource Group %q) was not found", name, resourceGroup) + } + return fmt.Errorf("failed to read Managed Application Definition (Managed Application Definition Name %q / Resource Group %q): %+v", name, resourceGroup, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("API returns a nil/empty id on Managed Application Definition %q (Resource Group %q): %+v", name, resourceGroup, err) + } + d.SetId(*resp.ID) + + return nil +} diff --git a/azurerm/internal/services/managedapplications/managed_application_definition_data_source_test.go b/azurerm/internal/services/managedapplications/managed_application_definition_data_source_test.go new file mode 100644 index 000000000000..3dfbec28f3ee --- /dev/null +++ b/azurerm/internal/services/managedapplications/managed_application_definition_data_source_test.go @@ -0,0 +1,38 @@ +package managedapplications_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ManagedApplicationDefinitionDataSource struct { +} + +func TestAccManagedApplicationDefinitionDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_managed_application_definition", "test") + r := ManagedApplicationDefinitionDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func (ManagedApplicationDefinitionDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_managed_application_definition" "test" { + name = azurerm_managed_application_definition.test.name + resource_group_name = azurerm_managed_application_definition.test.resource_group_name +} +`, ManagedApplicationDefinitionResource{}.basic(data)) +} diff --git a/azurerm/internal/services/managedapplications/managed_application_definition_resource.go b/azurerm/internal/services/managedapplications/managed_application_definition_resource.go index 2d298a97a97d..5922670020c7 100644 --- a/azurerm/internal/services/managedapplications/managed_application_definition_resource.go +++ b/azurerm/internal/services/managedapplications/managed_application_definition_resource.go @@ -29,7 +29,7 @@ func resourceManagedApplicationDefinition() *schema.Resource { Delete: resourceManagedApplicationDefinitionDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.ManagedApplicationDefinitionID(id) + _, err := parse.ApplicationDefinitionID(id) return err }), @@ -45,7 +45,7 @@ func resourceManagedApplicationDefinition() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.ManagedApplicationDefinitionName, + ValidateFunc: validate.ApplicationDefinitionName, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -55,7 +55,7 @@ func resourceManagedApplicationDefinition() *schema.Resource { "display_name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.ManagedApplicationDefinitionDisplayName, + ValidateFunc: validate.ApplicationDefinitionDisplayName, }, "lock_level": { @@ -100,7 +100,7 @@ func resourceManagedApplicationDefinition() *schema.Resource { "description": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.ManagedApplicationDefinitionDescription, + ValidateFunc: validate.ApplicationDefinitionDescription, }, "main_template": { @@ -201,7 +201,7 @@ func resourceManagedApplicationDefinitionRead(d *schema.ResourceData, meta inter ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.ManagedApplicationDefinitionID(d.Id()) + id, err := parse.ApplicationDefinitionID(d.Id()) if err != nil { return err } @@ -249,7 +249,7 @@ func resourceManagedApplicationDefinitionDelete(d *schema.ResourceData, meta int ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.ManagedApplicationDefinitionID(d.Id()) + id, err := parse.ApplicationDefinitionID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/managedapplications/managed_application_definition_resource_test.go b/azurerm/internal/services/managedapplications/managed_application_definition_resource_test.go new file mode 100644 index 000000000000..69c130718fed --- /dev/null +++ b/azurerm/internal/services/managedapplications/managed_application_definition_resource_test.go @@ -0,0 +1,288 @@ +package managedapplications_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managedapplications/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ManagedApplicationDefinitionResource struct { +} + +func TestAccManagedApplicationDefinition_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_managed_application_definition", "test") + r := ManagedApplicationDefinitionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("package_file_uri"), + }) +} + +func TestAccManagedApplicationDefinition_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_managed_application_definition", "test") + r := ManagedApplicationDefinitionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccManagedApplicationDefinition_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_managed_application_definition", "test") + r := ManagedApplicationDefinitionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("create_ui_definition", "main_template"), + }) +} + +func TestAccManagedApplicationDefinition_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_managed_application_definition", "test") + r := ManagedApplicationDefinitionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("TestManagedApplicationDefinition"), + check.That(data.ResourceName).Key("description").HasValue("Test Managed Application Definition"), + check.That(data.ResourceName).Key("package_enabled").HasValue("false"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("package_file_uri").Exists(), + ), + }, + data.ImportStep("package_file_uri"), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("UpdatedTestManagedApplicationDefinition"), + check.That(data.ResourceName).Key("description").HasValue("Updated Test Managed Application Definition"), + check.That(data.ResourceName).Key("package_enabled").HasValue("true"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), + check.That(data.ResourceName).Key("create_ui_definition").Exists(), + check.That(data.ResourceName).Key("main_template").Exists(), + ), + }, + data.ImportStep("create_ui_definition", "main_template", "package_file_uri"), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("display_name").HasValue("TestManagedApplicationDefinition"), + check.That(data.ResourceName).Key("description").HasValue("Test Managed Application Definition"), + check.That(data.ResourceName).Key("package_enabled").HasValue("false"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("package_file_uri").Exists(), + ), + }, + data.ImportStep("create_ui_definition", "main_template", "package_file_uri"), + }) +} + +func (ManagedApplicationDefinitionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ApplicationDefinitionID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.ManagedApplication.ApplicationDefinitionClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Managed Definition %s (resource group: %s): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ApplicationDefinitionProperties != nil), nil +} + +func (r ManagedApplicationDefinitionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_managed_application_definition" "test" { + name = "acctestAppDef%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + lock_level = "None" + package_file_uri = "https://github.com/Azure/azure-managedapp-samples/raw/master/Managed Application Sample Packages/201-managed-storage-account/managedstorage.zip" + display_name = "TestManagedApplicationDefinition" + description = "Test Managed Application Definition" + package_enabled = false +} +`, r.template(data), data.RandomInteger) +} + +func (r ManagedApplicationDefinitionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_managed_application_definition" "import" { + name = azurerm_managed_application_definition.test.name + location = azurerm_managed_application_definition.test.location + resource_group_name = azurerm_managed_application_definition.test.resource_group_name + display_name = azurerm_managed_application_definition.test.display_name + lock_level = azurerm_managed_application_definition.test.lock_level +} +`, r.basic(data)) +} + +func (r ManagedApplicationDefinitionResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_managed_application_definition" "test" { + name = "acctestAppDef%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + lock_level = "ReadOnly" + display_name = "UpdatedTestManagedApplicationDefinition" + description = "Updated Test Managed Application Definition" + package_enabled = true + + create_ui_definition = < 200 { + errors = append(errors, fmt.Errorf("%q should not exceed 200 characters in length.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/managedapplications/validate/application_definition_description_test.go b/azurerm/internal/services/managedapplications/validate/application_definition_description_test.go new file mode 100644 index 000000000000..77ce58f7c09e --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_definition_description_test.go @@ -0,0 +1,148 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestManagedApplicationDefinitionName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + input: "", + expected: false, + }, + { + input: "h", + expected: false, + }, + { + input: "he", + expected: false, + }, + { + input: "hel", + expected: true, + }, + { + input: "hel2", + expected: true, + }, + { + input: "_hello", + expected: false, + }, + { + input: "hello-", + expected: false, + }, + { + input: "malcolm-in!the-middle", + expected: false, + }, + { + input: strings.Repeat("a", 63), + expected: true, + }, + { + input: strings.Repeat("a", 64), + expected: true, + }, + { + input: strings.Repeat("a", 65), + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ApplicationDefinitionName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} + +func TestManagedApplicationDefinitionDisplayName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + input: "", + expected: false, + }, + { + input: "hel", + expected: false, + }, + { + input: "hell", + expected: true, + }, + { + input: "hello", + expected: true, + }, + { + input: strings.Repeat("a", 59), + expected: true, + }, + { + input: strings.Repeat("a", 60), + expected: true, + }, + { + input: strings.Repeat("a", 61), + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ApplicationDefinitionDisplayName(v.input, "display_name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} + +func TestManagedApplicationDefinitionDescription(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + input: "", + expected: true, + }, + { + input: strings.Repeat("a", 199), + expected: true, + }, + { + input: strings.Repeat("a", 200), + expected: true, + }, + { + input: strings.Repeat("a", 201), + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ApplicationDefinitionDescription(v.input, "description") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/managedapplications/validate/application_definition_display_name.go b/azurerm/internal/services/managedapplications/validate/application_definition_display_name.go new file mode 100644 index 000000000000..53631cf01c7d --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_definition_display_name.go @@ -0,0 +1,13 @@ +package validate + +import "fmt" + +func ApplicationDefinitionDisplayName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if len(value) < 4 || len(value) > 60 { + errors = append(errors, fmt.Errorf("%q must be between 4 and 60 characters in length.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/managedapplications/validate/application_definition_id.go b/azurerm/internal/services/managedapplications/validate/application_definition_id.go new file mode 100644 index 000000000000..397e0200609a --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_definition_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managedapplications/parse" +) + +func ApplicationDefinitionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApplicationDefinitionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/managedapplications/validate/application_definition_id_test.go b/azurerm/internal/services/managedapplications/validate/application_definition_id_test.go new file mode 100644 index 000000000000..f5006ffbc389 --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_definition_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApplicationDefinitionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Solutions/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Solutions/applicationDefinitions/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Solutions/applicationDefinitions/definition1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.SOLUTIONS/APPLICATIONDEFINITIONS/DEFINITION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApplicationDefinitionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/managedapplications/validate/application_definition_name.go b/azurerm/internal/services/managedapplications/validate/application_definition_name.go new file mode 100644 index 000000000000..49e823a795ea --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_definition_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ApplicationDefinitionName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(`^[^\W_]{3,64}$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q must be between 3 and 64 characters in length and contains only letters or numbers.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/managedapplications/validate/application_id.go b/azurerm/internal/services/managedapplications/validate/application_id.go new file mode 100644 index 000000000000..d4507a9f8aaf --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managedapplications/parse" +) + +func ApplicationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ApplicationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/managedapplications/validate/application_id_test.go b/azurerm/internal/services/managedapplications/validate/application_id_test.go new file mode 100644 index 000000000000..d05f21866cc5 --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestApplicationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Solutions/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Solutions/applications/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Solutions/applications/app1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.SOLUTIONS/APPLICATIONS/APP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ApplicationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/managedapplications/validate/application_name.go b/azurerm/internal/services/managedapplications/validate/application_name.go new file mode 100644 index 000000000000..568d49613062 --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ApplicationName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(`^[-\da-zA-Z]{3,64}$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q must be between 3 and 64 characters in length and contains only letters, numbers or hyphens.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/managedapplications/validate/application_name_test.go b/azurerm/internal/services/managedapplications/validate/application_name_test.go new file mode 100644 index 000000000000..996a906059da --- /dev/null +++ b/azurerm/internal/services/managedapplications/validate/application_name_test.go @@ -0,0 +1,68 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestApplicationName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + input: "", + expected: false, + }, + { + input: "h", + expected: false, + }, + { + input: "he", + expected: false, + }, + { + input: "hel", + expected: true, + }, + { + input: "hel2", + expected: true, + }, + { + input: "_hello", + expected: false, + }, + { + input: "hello-", + expected: true, + }, + { + input: "malcolm-in!the-middle", + expected: false, + }, + { + input: strings.Repeat("a", 63), + expected: true, + }, + { + input: strings.Repeat("a", 64), + expected: true, + }, + { + input: strings.Repeat("a", 65), + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ApplicationName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/managedapplications/validate/managed_application.go b/azurerm/internal/services/managedapplications/validate/managed_application.go deleted file mode 100644 index 1609ba5ab76d..000000000000 --- a/azurerm/internal/services/managedapplications/validate/managed_application.go +++ /dev/null @@ -1,33 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managedapplications/parse" -) - -func ManagedApplicationID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ManagedApplicationID(v); err != nil { - errors = append(errors, fmt.Errorf("cannot parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} - -func ManagedApplicationName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(`^[-\da-zA-Z]{3,64}$`).MatchString(value) { - errors = append(errors, fmt.Errorf("%q must be between 3 and 64 characters in length and contains only letters, numbers or hyphens.", k)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/managedapplications/validate/managed_application_definition.go b/azurerm/internal/services/managedapplications/validate/managed_application_definition.go deleted file mode 100644 index 57b698fea7ef..000000000000 --- a/azurerm/internal/services/managedapplications/validate/managed_application_definition.go +++ /dev/null @@ -1,53 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managedapplications/parse" -) - -func ManagedApplicationDefinitionID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ManagedApplicationDefinitionID(v); err != nil { - errors = append(errors, fmt.Errorf("cannot parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} - -func ManagedApplicationDefinitionName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(`^[^\W_]{3,64}$`).MatchString(value) { - errors = append(errors, fmt.Errorf("%q must be between 3 and 64 characters in length and contains only letters or numbers.", k)) - } - - return warnings, errors -} - -func ManagedApplicationDefinitionDisplayName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if len(value) < 4 || len(value) > 60 { - errors = append(errors, fmt.Errorf("%q must be between 4 and 60 characters in length.", k)) - } - - return warnings, errors -} - -func ManagedApplicationDefinitionDescription(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if len(value) > 200 { - errors = append(errors, fmt.Errorf("%q should not exceed 200 characters in length.", k)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/managedapplications/validate/managed_application_definition_test.go b/azurerm/internal/services/managedapplications/validate/managed_application_definition_test.go deleted file mode 100644 index 2950b57c45d9..000000000000 --- a/azurerm/internal/services/managedapplications/validate/managed_application_definition_test.go +++ /dev/null @@ -1,148 +0,0 @@ -package validate - -import ( - "strings" - "testing" -) - -func TestManagedApplicationDefinitionName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - input: "", - expected: false, - }, - { - input: "h", - expected: false, - }, - { - input: "he", - expected: false, - }, - { - input: "hel", - expected: true, - }, - { - input: "hel2", - expected: true, - }, - { - input: "_hello", - expected: false, - }, - { - input: "hello-", - expected: false, - }, - { - input: "malcolm-in!the-middle", - expected: false, - }, - { - input: strings.Repeat("a", 63), - expected: true, - }, - { - input: strings.Repeat("a", 64), - expected: true, - }, - { - input: strings.Repeat("a", 65), - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := ManagedApplicationDefinitionName(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} - -func TestManagedApplicationDefinitionDisplayName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - input: "", - expected: false, - }, - { - input: "hel", - expected: false, - }, - { - input: "hell", - expected: true, - }, - { - input: "hello", - expected: true, - }, - { - input: strings.Repeat("a", 59), - expected: true, - }, - { - input: strings.Repeat("a", 60), - expected: true, - }, - { - input: strings.Repeat("a", 61), - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := ManagedApplicationDefinitionDisplayName(v.input, "display_name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} - -func TestManagedApplicationDefinitionDescription(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - input: "", - expected: true, - }, - { - input: strings.Repeat("a", 199), - expected: true, - }, - { - input: strings.Repeat("a", 200), - expected: true, - }, - { - input: strings.Repeat("a", 201), - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := ManagedApplicationDefinitionDescription(v.input, "description") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} diff --git a/azurerm/internal/services/managedapplications/validate/managed_application_test.go b/azurerm/internal/services/managedapplications/validate/managed_application_test.go deleted file mode 100644 index 4e38db233201..000000000000 --- a/azurerm/internal/services/managedapplications/validate/managed_application_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package validate - -import ( - "strings" - "testing" -) - -func TestManagedApplicationName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - input: "", - expected: false, - }, - { - input: "h", - expected: false, - }, - { - input: "he", - expected: false, - }, - { - input: "hel", - expected: true, - }, - { - input: "hel2", - expected: true, - }, - { - input: "_hello", - expected: false, - }, - { - input: "hello-", - expected: true, - }, - { - input: "malcolm-in!the-middle", - expected: false, - }, - { - input: strings.Repeat("a", 63), - expected: true, - }, - { - input: strings.Repeat("a", 64), - expected: true, - }, - { - input: strings.Repeat("a", 65), - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := ManagedApplicationName(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} diff --git a/azurerm/internal/services/managementgroup/management_group_data_source.go b/azurerm/internal/services/managementgroup/management_group_data_source.go index 38633857f81e..2f7de802428e 100644 --- a/azurerm/internal/services/managementgroup/management_group_data_source.go +++ b/azurerm/internal/services/managementgroup/management_group_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmManagementGroup() *schema.Resource { +func dataSourceManagementGroup() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmManagementGroupRead, + Read: dataSourceManagementGroupRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -61,7 +61,7 @@ func dataSourceArmManagementGroup() *schema.Resource { } } -func dataSourceArmManagementGroupRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceManagementGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ManagementGroups.GroupsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -105,7 +105,7 @@ func dataSourceArmManagementGroupRead(d *schema.ResourceData, meta interface{}) if props := resp.Properties; props != nil { d.Set("display_name", props.DisplayName) - subscriptionIds, err := flattenArmManagementGroupDataSourceSubscriptionIds(props.Children) + subscriptionIds, err := flattenManagementGroupDataSourceSubscriptionIds(props.Children) if err != nil { return fmt.Errorf("Error flattening `subscription_ids`: %+v", err) } @@ -156,7 +156,7 @@ func getManagementGroupNameByDisplayName(ctx context.Context, client *management return results[0], nil } -func flattenArmManagementGroupDataSourceSubscriptionIds(input *[]managementgroups.ChildInfo) (*schema.Set, error) { +func flattenManagementGroupDataSourceSubscriptionIds(input *[]managementgroups.ChildInfo) (*schema.Set, error) { subscriptionIds := &schema.Set{F: schema.HashString} if input == nil { return subscriptionIds, nil diff --git a/azurerm/internal/services/managementgroup/management_group_data_source_test.go b/azurerm/internal/services/managementgroup/management_group_data_source_test.go new file mode 100644 index 000000000000..52a57ad298c6 --- /dev/null +++ b/azurerm/internal/services/managementgroup/management_group_data_source_test.go @@ -0,0 +1,75 @@ +package managementgroup_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type ManagementGroupDataSource struct { +} + +func TestAccManagementGroupDataSource_basicByName(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_management_group", "test") + r := ManagementGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicByName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("acctestmg-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("subscription_ids.#").HasValue("0"), + ), + }, + }) +} + +func TestAccManagementGroupDataSource_basicByDisplayName(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_management_group", "test") + r := ManagementGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicByDisplayName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("display_name").HasValue(fmt.Sprintf("acctest Management Group %d", data.RandomInteger)), + check.That(data.ResourceName).Key("subscription_ids.#").HasValue("0"), + ), + }, + }) +} + +func (ManagementGroupDataSource) basicByName(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "test" { + display_name = "acctestmg-%d" +} + +data "azurerm_management_group" "test" { + name = azurerm_management_group.test.name +} +`, data.RandomInteger) +} + +func (ManagementGroupDataSource) basicByDisplayName(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "test" { + display_name = "acctest Management Group %d" +} + +data "azurerm_management_group" "test" { + display_name = azurerm_management_group.test.display_name +} +`, data.RandomInteger) +} diff --git a/azurerm/internal/services/managementgroup/management_group_resource.go b/azurerm/internal/services/managementgroup/management_group_resource.go index 8ef3aaa6dadd..5d8744e3939e 100644 --- a/azurerm/internal/services/managementgroup/management_group_resource.go +++ b/azurerm/internal/services/managementgroup/management_group_resource.go @@ -23,12 +23,12 @@ import ( var managementGroupCacheControl = "no-cache" -func resourceArmManagementGroup() *schema.Resource { +func resourceManagementGroup() *schema.Resource { return &schema.Resource{ - Create: resourceArmManagementGroupCreateUpdate, - Update: resourceArmManagementGroupCreateUpdate, - Read: resourceArmManagementGroupRead, - Delete: resourceArmManagementGroupDelete, + Create: resourceManagementGroupCreateUpdate, + Update: resourceManagementGroupCreateUpdate, + Read: resourceManagementGroupRead, + Delete: resourceManagementGroupDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.ManagementGroupID(id) @@ -85,7 +85,7 @@ func resourceArmManagementGroup() *schema.Resource { } } -func resourceArmManagementGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceManagementGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ManagementGroups.GroupsClient subscriptionsClient := meta.(*clients.Client).ManagementGroups.SubscriptionClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) @@ -207,10 +207,10 @@ func resourceArmManagementGroupCreateUpdate(d *schema.ResourceData, meta interfa } } - return resourceArmManagementGroupRead(d, meta) + return resourceManagementGroupRead(d, meta) } -func resourceArmManagementGroupRead(d *schema.ResourceData, meta interface{}) error { +func resourceManagementGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ManagementGroups.GroupsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -220,8 +220,8 @@ func resourceArmManagementGroupRead(d *schema.ResourceData, meta interface{}) er return err } - recurse := true - resp, err := client.Get(ctx, id.Name, "children", &recurse, "", managementGroupCacheControl) + recurse := utils.Bool(true) + resp, err := client.Get(ctx, id.Name, "children", recurse, "", managementGroupCacheControl) if err != nil { if utils.ResponseWasForbidden(resp.Response) || utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Management Group %q doesn't exist - removing from state", d.Id()) @@ -238,7 +238,7 @@ func resourceArmManagementGroupRead(d *schema.ResourceData, meta interface{}) er if props := resp.Properties; props != nil { d.Set("display_name", props.DisplayName) - subscriptionIds, err := flattenArmManagementGroupSubscriptionIds(props.Children) + subscriptionIds, err := flattenManagementGroupSubscriptionIds(props.Children) if err != nil { return fmt.Errorf("unable to flatten `subscription_ids`: %+v", err) } @@ -258,7 +258,7 @@ func resourceArmManagementGroupRead(d *schema.ResourceData, meta interface{}) er return nil } -func resourceArmManagementGroupDelete(d *schema.ResourceData, meta interface{}) error { +func resourceManagementGroupDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).ManagementGroups.GroupsClient subscriptionsClient := meta.(*clients.Client).ManagementGroups.SubscriptionClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) @@ -329,7 +329,7 @@ func expandManagementGroupSubscriptionIds(input *schema.Set) []string { return output } -func flattenArmManagementGroupSubscriptionIds(input *[]managementgroups.ChildInfo) (*schema.Set, error) { +func flattenManagementGroupSubscriptionIds(input *[]managementgroups.ChildInfo) (*schema.Set, error) { subscriptionIds := &schema.Set{F: schema.HashString} if input == nil { return subscriptionIds, nil diff --git a/azurerm/internal/services/managementgroup/management_group_resource_test.go b/azurerm/internal/services/managementgroup/management_group_resource_test.go new file mode 100644 index 000000000000..3d1dc078be9f --- /dev/null +++ b/azurerm/internal/services/managementgroup/management_group_resource_test.go @@ -0,0 +1,285 @@ +package managementgroup_test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managementgroup/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ManagementGroupResource struct { +} + +func TestAcc_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "test") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccManagementGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "test") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(), + ExpectError: acceptance.RequiresImportError("azurerm_management_group"), + }, + }) +} + +func TestAccManagementGroup_nested(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "parent") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nested(), + Check: resource.ComposeTestCheckFunc( + check.That("azurerm_management_group.parent").ExistsInAzure(r), + check.That("azurerm_management_group.child").ExistsInAzure(r), + ), + }, + data.ImportStep(), + data.ImportStepFor("azurerm_management_group.child"), + }) +} + +func TestAccManagementGroup_multiLevel(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "parent") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiLevel(), + Check: resource.ComposeTestCheckFunc( + check.That("azurerm_management_group.grandparent").ExistsInAzure(r), + check.That("azurerm_management_group.parent").ExistsInAzure(r), + check.That("azurerm_management_group.child").ExistsInAzure(r), + ), + }, + data.ImportStepFor("azurerm_management_group.grandparent"), + data.ImportStep(), + data.ImportStepFor("azurerm_management_group.child"), + }) +} + +func TestAccManagementGroup_multiLevelUpdated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "parent") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nested(), + Check: resource.ComposeTestCheckFunc( + check.That("azurerm_management_group.parent").ExistsInAzure(r), + check.That("azurerm_management_group.child").ExistsInAzure(r), + ), + }, + data.ImportStep(), + data.ImportStepFor("azurerm_management_group.child"), + { + Config: r.multiLevel(), + Check: resource.ComposeTestCheckFunc( + check.That("azurerm_management_group.grandparent").ExistsInAzure(r), + check.That("azurerm_management_group.parent").ExistsInAzure(r), + check.That("azurerm_management_group.child").ExistsInAzure(r), + ), + }, + data.ImportStepFor("azurerm_management_group.grandparent"), + data.ImportStep(), + data.ImportStepFor("azurerm_management_group.child"), + }) +} + +func TestAccManagementGroup_withName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "test") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccManagementGroup_updateName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "test") + r := ManagementGroupResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.withName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestmg-%d", data.RandomInteger)), + ), + }, + data.ImportStep(), + }) +} + +func TestAccManagementGroup_withSubscriptions(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_group", "test") + r := ManagementGroupResource{} + subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subscription_ids.#").HasValue("0"), + ), + }, + { + Config: r.withSubscriptions(subscriptionID), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subscription_ids.#").HasValue("1"), + ), + }, + { + Config: r.basic(), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subscription_ids.#").HasValue("0"), + ), + }, + }) +} + +func (ManagementGroupResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ManagementGroupID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.ManagementGroups.GroupsClient.Get(ctx, id.Name, "children", utils.Bool(true), "", "no-cache") + if err != nil { + return nil, fmt.Errorf("retrieving Management Group %s: %v", id.Name, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (r ManagementGroupResource) basic() string { + return ` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "test" { +} +` +} + +func (r ManagementGroupResource) requiresImport() string { + return ` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "test" { +} + +resource "azurerm_management_group" "import" { + name = azurerm_management_group.test.name +} +` +} + +func (r ManagementGroupResource) nested() string { + return ` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "parent" { +} + +resource "azurerm_management_group" "child" { + parent_management_group_id = azurerm_management_group.parent.id +} +` +} + +func (r ManagementGroupResource) multiLevel() string { + return ` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "grandparent" { +} + +resource "azurerm_management_group" "parent" { + parent_management_group_id = azurerm_management_group.grandparent.id +} + +resource "azurerm_management_group" "child" { + parent_management_group_id = azurerm_management_group.parent.id +} +` +} + +func (ManagementGroupResource) withName(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "test" { + name = "acctestmg-%d" + display_name = "accTestMG-%d" +} +`, data.RandomInteger, data.RandomInteger) +} + +// TODO: switch this out for dynamically creating a subscription once that's supported in the future +func (r ManagementGroupResource) withSubscriptions(subscriptionID string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_management_group" "test" { + subscription_ids = [ + "%s", + ] +} +`, subscriptionID) +} diff --git a/azurerm/internal/services/managementgroup/registration.go b/azurerm/internal/services/managementgroup/registration.go index 05adf764bd91..a9ae21ffdf4e 100644 --- a/azurerm/internal/services/managementgroup/registration.go +++ b/azurerm/internal/services/managementgroup/registration.go @@ -21,13 +21,13 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_management_group": dataSourceArmManagementGroup(), + "azurerm_management_group": dataSourceManagementGroup(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_management_group": resourceArmManagementGroup(), + "azurerm_management_group": resourceManagementGroup(), } } diff --git a/azurerm/internal/services/managementgroup/tests/management_group_data_source_test.go b/azurerm/internal/services/managementgroup/tests/management_group_data_source_test.go deleted file mode 100644 index 809014b8a49a..000000000000 --- a/azurerm/internal/services/managementgroup/tests/management_group_data_source_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceArmManagementGroup_basicByName(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArmManagementGroup_basicByName(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("acctestmg-%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_ids.#", "0"), - ), - }, - }, - }) -} - -func TestAccDataSourceArmManagementGroup_basicByDisplayName(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_management_group", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArmManagementGroup_basicByDisplayName(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "display_name", fmt.Sprintf("acctest Management Group %d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_ids.#", "0"), - ), - }, - }, - }) -} - -func testAccDataSourceArmManagementGroup_basicByName(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "test" { - display_name = "acctestmg-%d" -} - -data "azurerm_management_group" "test" { - name = azurerm_management_group.test.name -} -`, data.RandomInteger) -} - -func testAccDataSourceArmManagementGroup_basicByDisplayName(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "test" { - display_name = "acctest Management Group %d" -} - -data "azurerm_management_group" "test" { - display_name = azurerm_management_group.test.display_name -} -`, data.RandomInteger) -} diff --git a/azurerm/internal/services/managementgroup/tests/management_group_resource_test.go b/azurerm/internal/services/managementgroup/tests/management_group_resource_test.go deleted file mode 100644 index 25ab059fcb1c..000000000000 --- a/azurerm/internal/services/managementgroup/tests/management_group_resource_test.go +++ /dev/null @@ -1,342 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "os" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMManagementGroup_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_management_group", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_basic(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMManagementGroup_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_management_group", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_basic(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - ), - }, - { - Config: testAzureRMManagementGroup_requiresImport(), - ExpectError: acceptance.RequiresImportError("azurerm_management_group"), - }, - }, - }) -} - -func TestAccAzureRMManagementGroup_nested(t *testing.T) { - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_nested(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists("azurerm_management_group.parent"), - testCheckAzureRMManagementGroupExists("azurerm_management_group.child"), - ), - }, - { - ResourceName: "azurerm_management_group.child", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMManagementGroup_multiLevel(t *testing.T) { - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_multiLevel(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists("azurerm_management_group.grandparent"), - testCheckAzureRMManagementGroupExists("azurerm_management_group.parent"), - testCheckAzureRMManagementGroupExists("azurerm_management_group.child"), - ), - }, - { - ResourceName: "azurerm_management_group.child", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMManagementGroup_multiLevelUpdated(t *testing.T) { - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_nested(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists("azurerm_management_group.parent"), - testCheckAzureRMManagementGroupExists("azurerm_management_group.child"), - ), - }, - { - Config: testAzureRMManagementGroup_multiLevel(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists("azurerm_management_group.grandparent"), - testCheckAzureRMManagementGroupExists("azurerm_management_group.parent"), - testCheckAzureRMManagementGroupExists("azurerm_management_group.child"), - ), - }, - }, - }) -} - -func TestAccAzureRMManagementGroup_withName(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_management_group", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_withName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMManagementGroup_updateName(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_management_group", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_basic(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - ), - }, - { - Config: testAzureRMManagementGroup_withName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("acctestmg-%d", data.RandomInteger)), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMManagementGroup_withSubscriptions(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_management_group", "test") - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMManagementGroupDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMManagementGroup_basic(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_ids.#", "0"), - ), - }, - { - Config: testAzureRMManagementGroup_withSubscriptions(subscriptionID), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_ids.#", "1"), - ), - }, - { - Config: testAzureRMManagementGroup_basic(), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMManagementGroupExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subscription_ids.#", "0"), - ), - }, - }, - }) -} - -func testCheckAzureRMManagementGroupExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ManagementGroups.GroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %s", resourceName) - } - - groupName := rs.Primary.Attributes["group_id"] - - recurse := false - resp, err := client.Get(ctx, groupName, "", &recurse, "", "no-cache") - if err != nil { - return fmt.Errorf("Bad: Get on managementGroupsClient: %s", err) - } - - if resp.StatusCode == http.StatusForbidden { - return fmt.Errorf("Management Group does not exist or you do not have proper permissions: %s", groupName) - } - - return nil - } -} - -func testCheckAzureRMManagementGroupDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).ManagementGroups.GroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_management_group" { - continue - } - - name := rs.Primary.Attributes["group_id"] - recurse := false - resp, err := client.Get(ctx, name, "", &recurse, "", "no-cache") - - if err != nil { - return nil - } - - if resp.StatusCode == http.StatusAccepted { - return fmt.Errorf("Management Group still exists: %s", *resp.Name) - } - } - - return nil -} - -func testAzureRMManagementGroup_basic() string { - return ` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "test" { -} -` -} - -func testAzureRMManagementGroup_requiresImport() string { - return ` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "test" { -} - -resource "azurerm_management_group" "import" { - name = azurerm_management_group.test.name -} -` -} - -func testAzureRMManagementGroup_nested() string { - return ` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "parent" { -} - -resource "azurerm_management_group" "child" { - parent_management_group_id = azurerm_management_group.parent.id -} -` -} - -func testAzureRMManagementGroup_multiLevel() string { - return ` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "grandparent" { -} - -resource "azurerm_management_group" "parent" { - parent_management_group_id = azurerm_management_group.grandparent.id -} - -resource "azurerm_management_group" "child" { - parent_management_group_id = azurerm_management_group.parent.id -} -` -} - -func testAzureRMManagementGroup_withName(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "test" { - name = "acctestmg-%d" - display_name = "accTestMG-%d" -} -`, data.RandomInteger, data.RandomInteger) -} - -// TODO: switch this out for dynamically creating a subscription once that's supported in the future -func testAzureRMManagementGroup_withSubscriptions(subscriptionID string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_management_group" "test" { - subscription_ids = [ - "%s", - ] -} -`, subscriptionID) -} diff --git a/azurerm/internal/services/managementgroup/validate/management_group.go b/azurerm/internal/services/managementgroup/validate/management_group.go deleted file mode 100644 index 63f5697ec33a..000000000000 --- a/azurerm/internal/services/managementgroup/validate/management_group.go +++ /dev/null @@ -1,37 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managementgroup/parse" -) - -func ManagementGroupName(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) - return - } - - // portal says: The name can only be an ASCII letter, digit, -, _, (, ), . and have a maximum length constraint of 90 - if matched := regexp.MustCompile(`^[a-zA-Z0-9_().-]{1,90}$`).Match([]byte(v)); !matched { - errors = append(errors, fmt.Errorf("%s can only consist of ASCII letters, digits, -, _, (, ), . , and cannot exceed the maximum length of 90", k)) - } - return -} - -func ManagementGroupID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ManagementGroupID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a management group id: %v", k, err)) - return - } - - return -} diff --git a/azurerm/internal/services/managementgroup/validate/management_group_id.go b/azurerm/internal/services/managementgroup/validate/management_group_id.go new file mode 100644 index 000000000000..6c2fc9667beb --- /dev/null +++ b/azurerm/internal/services/managementgroup/validate/management_group_id.go @@ -0,0 +1,22 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managementgroup/parse" +) + +func ManagementGroupID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if _, err := parse.ManagementGroupID(v); err != nil { + errors = append(errors, fmt.Errorf("Can not parse %q as a management group id: %v", k, err)) + return + } + + return +} diff --git a/azurerm/internal/services/managementgroup/validate/management_group_name.go b/azurerm/internal/services/managementgroup/validate/management_group_name.go new file mode 100644 index 000000000000..17a746e004ac --- /dev/null +++ b/azurerm/internal/services/managementgroup/validate/management_group_name.go @@ -0,0 +1,20 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func ManagementGroupName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) + return + } + + // portal says: The name can only be an ASCII letter, digit, -, _, (, ), . and have a maximum length constraint of 90 + if matched := regexp.MustCompile(`^[a-zA-Z0-9_().-]{1,90}$`).Match([]byte(v)); !matched { + errors = append(errors, fmt.Errorf("%s can only consist of ASCII letters, digits, -, _, (, ), . , and cannot exceed the maximum length of 90", k)) + } + return +} diff --git a/azurerm/internal/services/managementgroup/validate/management_group_test.go b/azurerm/internal/services/managementgroup/validate/management_group_name_test.go similarity index 100% rename from azurerm/internal/services/managementgroup/validate/management_group_test.go rename to azurerm/internal/services/managementgroup/validate/management_group_name_test.go diff --git a/azurerm/internal/services/maps/maps_account_data_source.go b/azurerm/internal/services/maps/maps_account_data_source.go index 507a8edc6136..4693bf11211b 100644 --- a/azurerm/internal/services/maps/maps_account_data_source.go +++ b/azurerm/internal/services/maps/maps_account_data_source.go @@ -7,12 +7,13 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maps/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMapsAccount() *schema.Resource { +func dataSourceMapsAccount() *schema.Resource { return &schema.Resource{ Read: dataSourceMapsAccountRead, @@ -24,7 +25,7 @@ func dataSourceArmMapsAccount() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: ValidateName(), + ValidateFunc: validate.AccountName(), }, "resource_group_name": azure.SchemaResourceGroupName(), diff --git a/azurerm/internal/services/maps/maps_account_data_source_test.go b/azurerm/internal/services/maps/maps_account_data_source_test.go new file mode 100644 index 000000000000..253180fe7532 --- /dev/null +++ b/azurerm/internal/services/maps/maps_account_data_source_test.go @@ -0,0 +1,46 @@ +package maps_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MapsAccountDataSource struct { +} + +func TestAccMapsAccountDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_maps_account", "test") + r := MapsAccountDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("testing"), + check.That(data.ResourceName).Key("sku_name").HasValue("S0"), + check.That(data.ResourceName).Key("x_ms_client_id").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + }) +} + +func (MapsAccountDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_maps_account" "test" { + name = azurerm_maps_account.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, MapsAccountResource{}.tags(data)) +} diff --git a/azurerm/internal/services/maps/maps_account_resource.go b/azurerm/internal/services/maps/maps_account_resource.go index e5e7b8e2880a..cc0a0081248a 100644 --- a/azurerm/internal/services/maps/maps_account_resource.go +++ b/azurerm/internal/services/maps/maps_account_resource.go @@ -12,18 +12,19 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maps/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maps/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMapsAccount() *schema.Resource { +func resourceMapsAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmMapsAccountCreateUpdate, - Read: resourceArmMapsAccountRead, - Update: resourceArmMapsAccountCreateUpdate, - Delete: resourceArmMapsAccountDelete, + Create: resourceMapsAccountCreateUpdate, + Read: resourceMapsAccountRead, + Update: resourceMapsAccountCreateUpdate, + Delete: resourceMapsAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -33,7 +34,7 @@ func resourceArmMapsAccount() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MapsAccountID(id) + _, err := parse.AccountID(id) return err }), @@ -42,7 +43,7 @@ func resourceArmMapsAccount() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidateName(), + ValidateFunc: validate.AccountName(), }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -79,7 +80,7 @@ func resourceArmMapsAccount() *schema.Resource { } } -func resourceArmMapsAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMapsAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Maps.AccountsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -127,15 +128,15 @@ func resourceArmMapsAccountCreateUpdate(d *schema.ResourceData, meta interface{} d.SetId(*read.ID) - return resourceArmMapsAccountRead(d, meta) + return resourceMapsAccountRead(d, meta) } -func resourceArmMapsAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceMapsAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Maps.AccountsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MapsAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } @@ -169,12 +170,12 @@ func resourceArmMapsAccountRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMapsAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMapsAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Maps.AccountsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MapsAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/maps/maps_account_resource_test.go b/azurerm/internal/services/maps/maps_account_resource_test.go new file mode 100644 index 000000000000..2d959faa437c --- /dev/null +++ b/azurerm/internal/services/maps/maps_account_resource_test.go @@ -0,0 +1,156 @@ +package maps_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maps/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MapsAccountResource struct { +} + +func TestAccMapsAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_maps_account", "test") + r := MapsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("x_ms_client_id").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("S0"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMapsAccount_sku(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_maps_account", "test") + r := MapsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sku(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("x_ms_client_id").Exists(), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("S1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMapsAccount_tags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_maps_account", "test") + r := MapsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + { + Config: r.tags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.environment").HasValue("testing"), + ), + }, + data.ImportStep(), + }) +} + +func (MapsAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AccountID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Maps.AccountsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Maps Account %s (resource group: %s): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (MapsAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_maps_account" "test" { + name = "accMapsAccount-%d" + resource_group_name = azurerm_resource_group.test.name + sku_name = "S0" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (MapsAccountResource) sku(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_maps_account" "test" { + name = "accMapsAccount-%d" + resource_group_name = azurerm_resource_group.test.name + sku_name = "S1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (MapsAccountResource) tags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_maps_account" "test" { + name = "accMapsAccount-%d" + resource_group_name = azurerm_resource_group.test.name + sku_name = "S0" + + tags = { + environment = "testing" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/maps/parse/account.go b/azurerm/internal/services/maps/parse/account.go new file mode 100644 index 000000000000..4c510d4844c5 --- /dev/null +++ b/azurerm/internal/services/maps/parse/account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccountId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewAccountID(subscriptionId, resourceGroup, name string) AccountId { + return AccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id AccountId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Account", segmentsStr) +} + +func (id AccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Maps/accounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// AccountID parses a Account ID into an AccountId struct +func AccountID(input string) (*AccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/maps/parse/account_test.go b/azurerm/internal/services/maps/parse/account_test.go new file mode 100644 index 000000000000..c21ffd99542a --- /dev/null +++ b/azurerm/internal/services/maps/parse/account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccountId{} + +func TestAccountIDFormatter(t *testing.T) { + actual := NewAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/account1", + Expected: &AccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MAPS/ACCOUNTS/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/maps/parse/maps_account.go b/azurerm/internal/services/maps/parse/maps_account.go deleted file mode 100644 index 52920eb4fffa..000000000000 --- a/azurerm/internal/services/maps/parse/maps_account.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MapsAccountId struct { - ResourceGroup string - Name string -} - -func MapsAccountID(input string) (*MapsAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Maps Account ID %q: %+v", input, err) - } - - account := MapsAccountId{ - ResourceGroup: id.ResourceGroup, - } - - if account.Name, err = id.PopSegment("accounts"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &account, nil -} diff --git a/azurerm/internal/services/maps/parse/maps_account_test.go b/azurerm/internal/services/maps/parse/maps_account_test.go deleted file mode 100644 index 5b3470f327fe..000000000000 --- a/azurerm/internal/services/maps/parse/maps_account_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestMapsAccountID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MapsAccountId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Accounts Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/", - Expected: nil, - }, - { - Name: "Maps Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/Account1", - Expected: &MapsAccountId{ - Name: "Account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Maps/Accounts/Account1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MapsAccountID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/maps/registration.go b/azurerm/internal/services/maps/registration.go index 35de16e03c77..5ca142aa1924 100644 --- a/azurerm/internal/services/maps/registration.go +++ b/azurerm/internal/services/maps/registration.go @@ -21,13 +21,13 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_maps_account": dataSourceArmMapsAccount(), + "azurerm_maps_account": dataSourceMapsAccount(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_maps_account": resourceArmMapsAccount(), + "azurerm_maps_account": resourceMapsAccount(), } } diff --git a/azurerm/internal/services/maps/resourceids.go b/azurerm/internal/services/maps/resourceids.go new file mode 100644 index 000000000000..5451cc107e85 --- /dev/null +++ b/azurerm/internal/services/maps/resourceids.go @@ -0,0 +1,3 @@ +package maps + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Account -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/account1 diff --git a/azurerm/internal/services/maps/tests/maps_account_data_source_test.go b/azurerm/internal/services/maps/tests/maps_account_data_source_test.go deleted file mode 100644 index efd647e6721f..000000000000 --- a/azurerm/internal/services/maps/tests/maps_account_data_source_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMapsAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_maps_account", "test") - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMapsAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "testing"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "S0"), - resource.TestCheckResourceAttrSet(data.ResourceName, "x_ms_client_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMapsAccount_basic(data acceptance.TestData) string { - template := testAccAzureRMMapsAccount_tags(data) - return fmt.Sprintf(` -%s - -data "azurerm_maps_account" "test" { - name = azurerm_maps_account.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/maps/tests/maps_account_resource_test.go b/azurerm/internal/services/maps/tests/maps_account_resource_test.go deleted file mode 100644 index ca695ad2aefe..000000000000 --- a/azurerm/internal/services/maps/tests/maps_account_resource_test.go +++ /dev/null @@ -1,206 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maps/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMapsAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maps_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMapsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMapsAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "x_ms_client_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "S0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMapsAccount_sku(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maps_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMapsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMapsAccount_sku(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "x_ms_client_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "S1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMapsAccount_tags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_maps_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMapsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMapsAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMapsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMapsAccount_tags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMapsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "testing"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMMapsAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Maps.AccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.MapsAccountID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on MapsAccountClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Maps Account %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMMapsAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Maps.AccountsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_maps_account" { - continue - } - - id, err := parse.MapsAccountID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Error retrieving Maps Account %q (Resource Group %q): %s", id.Name, id.ResourceGroup, err) - } - } - - return nil -} - -func testAccAzureRMMapsAccount_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_maps_account" "test" { - name = "accMapsAccount-%d" - resource_group_name = azurerm_resource_group.test.name - sku_name = "S0" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMMapsAccount_sku(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_maps_account" "test" { - name = "accMapsAccount-%d" - resource_group_name = azurerm_resource_group.test.name - sku_name = "S1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMMapsAccount_tags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_maps_account" "test" { - name = "accMapsAccount-%d" - resource_group_name = azurerm_resource_group.test.name - sku_name = "S0" - - tags = { - environment = "testing" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/maps/validate/account_id.go b/azurerm/internal/services/maps/validate/account_id.go new file mode 100644 index 000000000000..2b745c27df60 --- /dev/null +++ b/azurerm/internal/services/maps/validate/account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/maps/parse" +) + +func AccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/maps/validate/account_id_test.go b/azurerm/internal/services/maps/validate/account_id_test.go new file mode 100644 index 000000000000..ae5b493e304c --- /dev/null +++ b/azurerm/internal/services/maps/validate/account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Maps/accounts/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MAPS/ACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/maps/validate/account_name.go b/azurerm/internal/services/maps/validate/account_name.go new file mode 100644 index 000000000000..a769954758bd --- /dev/null +++ b/azurerm/internal/services/maps/validate/account_name.go @@ -0,0 +1,14 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func AccountName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[A-Za-z0-9]{1}[A-Za-z0-9._-]{1,}$`), + "First character must be alphanumeric. Subsequent character(s) must be any combination of alphanumeric, underscore (_), period (.), or hyphen (-).") +} diff --git a/azurerm/internal/services/maps/validate/account_name_test.go b/azurerm/internal/services/maps/validate/account_name_test.go new file mode 100644 index 000000000000..821db917cf57 --- /dev/null +++ b/azurerm/internal/services/maps/validate/account_name_test.go @@ -0,0 +1,69 @@ +package validate + +import "testing" + +func TestAccountName(t *testing.T) { + testData := []struct { + Name string + Expected bool + }{ + { + Name: "", + Expected: false, + }, + { + Name: "hello", + Expected: true, + }, + { + Name: "Hello", + Expected: true, + }, + { + Name: "1hello", + Expected: true, + }, + { + Name: "1he-llo", + Expected: true, + }, + { + Name: "he-llo1", + Expected: true, + }, + { + Name: "he_llo1", + Expected: true, + }, + { + Name: ".hello1", + Expected: false, + }, + { + Name: "_hello1", + Expected: false, + }, + { + Name: "he.llo1", + Expected: true, + }, + { + Name: "he-llo!", + Expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Name) + + warnings, errors := AccountName()(v.Name, "name") + if len(warnings) != 0 { + t.Fatalf("Expected no warnings but got %d", len(warnings)) + } + + actual := len(errors) == 0 + if v.Expected != actual { + t.Fatalf("Expected %t but got %t for %q: %s", v.Expected, actual, v.Name, errors) + } + } +} diff --git a/azurerm/internal/services/maps/validation.go b/azurerm/internal/services/maps/validation.go deleted file mode 100644 index 531d899aa397..000000000000 --- a/azurerm/internal/services/maps/validation.go +++ /dev/null @@ -1,14 +0,0 @@ -package maps - -import ( - "regexp" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" -) - -func ValidateName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`^[A-Za-z0-9]{1}[A-Za-z0-9._-]{1,}$`), - "First character must be alphanumeric. Subsequent character(s) must be any combination of alphanumeric, underscore (_), period (.), or hyphen (-).") -} diff --git a/azurerm/internal/services/maps/validation_test.go b/azurerm/internal/services/maps/validation_test.go deleted file mode 100644 index dcfdc9eb52c7..000000000000 --- a/azurerm/internal/services/maps/validation_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package maps - -import "testing" - -func TestValidateName(t *testing.T) { - testData := []struct { - Name string - Expected bool - }{ - { - Name: "", - Expected: false, - }, - { - Name: "hello", - Expected: true, - }, - { - Name: "Hello", - Expected: true, - }, - { - Name: "1hello", - Expected: true, - }, - { - Name: "1he-llo", - Expected: true, - }, - { - Name: "he-llo1", - Expected: true, - }, - { - Name: "he_llo1", - Expected: true, - }, - { - Name: ".hello1", - Expected: false, - }, - { - Name: "_hello1", - Expected: false, - }, - { - Name: "he.llo1", - Expected: true, - }, - { - Name: "he-llo!", - Expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - warnings, errors := ValidateName()(v.Name, "name") - if len(warnings) != 0 { - t.Fatalf("Expected no warnings but got %d", len(warnings)) - } - - actual := len(errors) == 0 - if v.Expected != actual { - t.Fatalf("Expected %t but got %t for %q: %s", v.Expected, actual, v.Name, errors) - } - } -} diff --git a/azurerm/internal/services/mariadb/mariadb_configuration_resource.go b/azurerm/internal/services/mariadb/mariadb_configuration_resource.go index 1d3ba4770dbe..e32167d79336 100644 --- a/azurerm/internal/services/mariadb/mariadb_configuration_resource.go +++ b/azurerm/internal/services/mariadb/mariadb_configuration_resource.go @@ -15,11 +15,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMariaDbConfiguration() *schema.Resource { +func resourceMariaDbConfiguration() *schema.Resource { return &schema.Resource{ - Create: resourceArmMariaDbConfigurationCreateUpdate, - Read: resourceArmMariaDbConfigurationRead, - Delete: resourceArmMariaDbConfigurationDelete, + Create: resourceMariaDbConfigurationCreateUpdate, + Read: resourceMariaDbConfigurationRead, + Delete: resourceMariaDbConfigurationDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -46,7 +46,7 @@ func resourceArmMariaDbConfiguration() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MariaDbServerServerName, + ValidateFunc: validate.ServerName, }, "value": { @@ -58,7 +58,7 @@ func resourceArmMariaDbConfiguration() *schema.Resource { } } -func resourceArmMariaDbConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ConfigurationsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -95,10 +95,10 @@ func resourceArmMariaDbConfigurationCreateUpdate(d *schema.ResourceData, meta in d.SetId(*read.ID) - return resourceArmMariaDbConfigurationRead(d, meta) + return resourceMariaDbConfigurationRead(d, meta) } -func resourceArmMariaDbConfigurationRead(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbConfigurationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -130,7 +130,7 @@ func resourceArmMariaDbConfigurationRead(d *schema.ResourceData, meta interface{ return nil } -func resourceArmMariaDbConfigurationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbConfigurationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ConfigurationsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mariadb/mariadb_configuration_resource_test.go b/azurerm/internal/services/mariadb/mariadb_configuration_resource_test.go new file mode 100644 index 000000000000..6875cfc1589d --- /dev/null +++ b/azurerm/internal/services/mariadb/mariadb_configuration_resource_test.go @@ -0,0 +1,216 @@ +package mariadb_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mariadb/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MariaDbConfigurationResource struct { +} + +func TestAccMariaDbConfiguration_characterSetServer(t *testing.T) { + srv := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + data := acceptance.BuildTestData(t, "azurerm_mariadb_configuration", "test") + r := MariaDbConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.characterSetServer(data), + Check: resource.ComposeTestCheckFunc( + data.CheckWithClient(checkValueIs("hebrew")), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + srv.CheckWithClient(checkValueIsReset("character_set_server")), + ), + }, + }) +} + +func TestAccMariaDbConfiguration_interactiveTimeout(t *testing.T) { + srv := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + data := acceptance.BuildTestData(t, "azurerm_mariadb_configuration", "test") + r := MariaDbConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.interactiveTimeout(data), + Check: resource.ComposeTestCheckFunc( + data.CheckWithClient(checkValueIs("30")), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + srv.CheckWithClient(checkValueIsReset("interactive_timeout")), + ), + }, + }) +} + +func TestAccMariaDbConfiguration_logSlowAdminStatements(t *testing.T) { + srv := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + data := acceptance.BuildTestData(t, "azurerm_mariadb_configuration", "test") + r := MariaDbConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.logSlowAdminStatements(data), + Check: resource.ComposeTestCheckFunc( + data.CheckWithClient(checkValueIs("On")), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + srv.CheckWithClient(checkValueIsReset("log_slow_admin_statements")), + ), + }, + }) +} + +func (MariaDbConfigurationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + serverName := id.Path["servers"] + name := id.Path["configurations"] + + resp, err := clients.MariaDB.ConfigurationsClient.Get(ctx, id.ResourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("retrieving MariaDB Configuration %q (Server %q / Resource Group %q): %v", name, serverName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ConfigurationProperties != nil), nil +} + +func checkValueIs(value string) acceptance.ClientCheckFunc { + return func(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return err + } + + serverName := id.Path["servers"] + name := id.Path["configurations"] + + resp, err := clients.MariaDB.ConfigurationsClient.Get(ctx, id.ResourceGroup, serverName, name) + if err != nil { + return fmt.Errorf("retrieving MariaDB Configuration %q (Server %q / Resource Group %q): %v", name, serverName, id.ResourceGroup, err) + } + + if resp.Value == nil { + return fmt.Errorf("MariaDB Configuration %q (Server %q / Resource Group %q) Value is nil", name, serverName, id.ResourceGroup) + } + + actualValue := *resp.Value + + if value != actualValue { + return fmt.Errorf("MariaDB Configuration %q (Server %q / Resource Group %q) Value (%s) != expected (%s)", name, serverName, id.ResourceGroup, actualValue, value) + } + + return nil + } +} + +func checkValueIsReset(configurationName string) acceptance.ClientCheckFunc { + return func(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + id, err := parse.ServerID(state.ID) + if err != nil { + return err + } + + resp, err := clients.MariaDB.ConfigurationsClient.Get(ctx, id.ResourceGroup, id.Name, configurationName) + if err != nil { + return fmt.Errorf("retrieving MariaDB Configuration %q (Server %q / Resource Group %q): %v", configurationName, id.Name, id.ResourceGroup, err) + } + + if resp.Value == nil { + return fmt.Errorf("MariaDB Configuration %q (Server %q / Resource Group %q) Value is nil", configurationName, id.Name, id.ResourceGroup) + } + + if resp.DefaultValue == nil { + return fmt.Errorf("MariaDB Configuration %q (Server %q / Resource Group %q) Default Value is nil", configurationName, id.Name, id.ResourceGroup) + } + actualValue := *resp.Value + defaultValue := *resp.DefaultValue + + if defaultValue != actualValue { + return fmt.Errorf("MariaDB Configuration %q (Server %q / Resource Group %q) Value (%s) != Default (%s)", configurationName, id.Name, id.ResourceGroup, actualValue, defaultValue) + } + + return nil + } +} + +func (r MariaDbConfigurationResource) characterSetServer(data acceptance.TestData) string { + return r.template(data, "character_set_server", "hebrew") +} + +func (r MariaDbConfigurationResource) interactiveTimeout(data acceptance.TestData) string { + return r.template(data, "interactive_timeout", "30") +} + +func (r MariaDbConfigurationResource) logSlowAdminStatements(data acceptance.TestData) string { + return r.template(data, "log_slow_admin_statements", "on") +} + +func (r MariaDbConfigurationResource) template(data acceptance.TestData, name string, value string) string { + server := r.empty(data) + config := fmt.Sprintf(` +resource "azurerm_mariadb_configuration" "test" { + name = "%s" + resource_group_name = "${azurerm_resource_group.test.name}" + server_name = "${azurerm_mariadb_server.test.name}" + value = "%s" +} +`, name, value) + return server + config +} + +func (MariaDbConfigurationResource) empty(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku_name = "GP_Gen5_2" + version = "10.2" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + backup_retention_days = 7 + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/mariadb/mariadb_database_resource.go b/azurerm/internal/services/mariadb/mariadb_database_resource.go index ca912803f07a..14831a7ccba8 100644 --- a/azurerm/internal/services/mariadb/mariadb_database_resource.go +++ b/azurerm/internal/services/mariadb/mariadb_database_resource.go @@ -18,11 +18,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMariaDbDatabase() *schema.Resource { +func resourceMariaDbDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmMariaDbDatabaseCreateUpdate, - Read: resourceArmMariaDbDatabaseRead, - Delete: resourceArmMariaDbDatabaseDelete, + Create: resourceMariaDbDatabaseCreateUpdate, + Read: resourceMariaDbDatabaseRead, + Delete: resourceMariaDbDatabaseDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -51,7 +51,7 @@ func resourceArmMariaDbDatabase() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MariaDbServerServerName, + ValidateFunc: validate.ServerName, }, "charset": { @@ -77,7 +77,7 @@ func resourceArmMariaDbDatabase() *schema.Resource { } } -func resourceArmMariaDbDatabaseCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbDatabaseCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.DatabasesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -130,10 +130,10 @@ func resourceArmMariaDbDatabaseCreateUpdate(d *schema.ResourceData, meta interfa d.SetId(*read.ID) - return resourceArmMariaDbDatabaseRead(d, meta) + return resourceMariaDbDatabaseRead(d, meta) } -func resourceArmMariaDbDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -169,7 +169,7 @@ func resourceArmMariaDbDatabaseRead(d *schema.ResourceData, meta interface{}) er return nil } -func resourceArmMariaDbDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.DatabasesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mariadb/mariadb_database_resource_test.go b/azurerm/internal/services/mariadb/mariadb_database_resource_test.go new file mode 100644 index 000000000000..cf11151c37cc --- /dev/null +++ b/azurerm/internal/services/mariadb/mariadb_database_resource_test.go @@ -0,0 +1,124 @@ +package mariadb_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MariaDbDatabaseResource struct { +} + +func TestAccMariaDbDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_database", "test") + r := MariaDbDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("utf8"), + check.That(data.ResourceName).Key("collation").HasValue("utf8_general_ci"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMariaDbDatabase_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_database", "test") + r := MariaDbDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mariadb_database"), + }, + }) +} + +func (MariaDbDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + serverName := id.Path["servers"] + name := id.Path["databases"] + + resp, err := clients.MariaDB.DatabasesClient.Get(ctx, id.ResourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("retrieving MariaDB Database %q (Server %q / Resource Group %q): %v", name, serverName, id.ResourceGroup, err) + } + + return utils.Bool(resp.DatabaseProperties != nil), nil +} + +func (MariaDbDatabaseResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = %q +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "B_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true +} + +resource "azurerm_mariadb_database" "test" { + name = "acctestmariadb_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + charset = "utf8" + collation = "utf8_general_ci" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r MariaDbDatabaseResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mariadb_database" "import" { + name = azurerm_mariadb_database.test.name + resource_group_name = azurerm_mariadb_database.test.resource_group_name + server_name = azurerm_mariadb_database.test.server_name + charset = azurerm_mariadb_database.test.charset + collation = azurerm_mariadb_database.test.collation +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource.go b/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource.go index 9c761157f495..c4c249567869 100644 --- a/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource.go +++ b/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource.go @@ -52,7 +52,7 @@ func resourceArmMariaDBFirewallRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MariaDbServerServerName, + ValidateFunc: validate.ServerName, }, "start_ip_address": { diff --git a/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource_test.go b/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource_test.go new file mode 100644 index 000000000000..7bcf9482ef4c --- /dev/null +++ b/azurerm/internal/services/mariadb/mariadb_firewall_rule_resource_test.go @@ -0,0 +1,122 @@ +package mariadb_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MariaDbFirewallRuleResource struct { +} + +func TestAccMariaDbFirewallRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_firewall_rule", "test") + r := MariaDbFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMariaDbFirewallRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_firewall_rule", "test") + r := MariaDbFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mariadb_firewall_rule"), + }, + }) +} + +func (MariaDbFirewallRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + serverName := id.Path["servers"] + name := id.Path["firewallRules"] + + resp, err := clients.MariaDB.FirewallRulesClient.Get(ctx, id.ResourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("retrieving MariaDB Firewall Rule %q (Server %q / Resource Group %q): %v", name, serverName, id.ResourceGroup, err) + } + + return utils.Bool(resp.FirewallRuleProperties != nil), nil +} + +func (MariaDbFirewallRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true +} + +resource "azurerm_mariadb_firewall_rule" "test" { + name = "acctestfwrule-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + server_name = "${azurerm_mariadb_server.test.name}" + start_ip_address = "0.0.0.0" + end_ip_address = "255.255.255.255" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r MariaDbFirewallRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mariadb_firewall_rule" "import" { + name = azurerm_mariadb_firewall_rule.test.name + resource_group_name = azurerm_mariadb_firewall_rule.test.resource_group_name + server_name = azurerm_mariadb_firewall_rule.test.server_name + start_ip_address = azurerm_mariadb_firewall_rule.test.start_ip_address + end_ip_address = azurerm_mariadb_firewall_rule.test.end_ip_address +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/mariadb/mariadb_server_data_source_test.go b/azurerm/internal/services/mariadb/mariadb_server_data_source_test.go new file mode 100644 index 000000000000..979146809dbd --- /dev/null +++ b/azurerm/internal/services/mariadb/mariadb_server_data_source_test.go @@ -0,0 +1,66 @@ +package mariadb_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MariaDbServerDataSource struct { +} + +func TestAccMariaDbServerDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mariadb_server", "test") + r := MariaDbServerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("version").HasValue("10.2"), + check.That(data.ResourceName).Key("ssl_enforcement").HasValue("Enabled"), + ), + }, + }) +} + +func (MariaDbServerDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-maria-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "B_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true +} + +data "azurerm_mariadb_server" "test" { + name = azurerm_mariadb_server.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/mariadb/mariadb_server_resource.go b/azurerm/internal/services/mariadb/mariadb_server_resource.go index f9094cb5dc50..510e905a74f6 100644 --- a/azurerm/internal/services/mariadb/mariadb_server_resource.go +++ b/azurerm/internal/services/mariadb/mariadb_server_resource.go @@ -23,16 +23,16 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMariaDbServer() *schema.Resource { +func resourceMariaDbServer() *schema.Resource { return &schema.Resource{ - Create: resourceArmMariaDbServerCreate, - Read: resourceArmMariaDbServerRead, - Update: resourceArmMariaDbServerUpdate, - Delete: resourceArmMariaDbServerDelete, + Create: resourceMariaDbServerCreate, + Read: resourceMariaDbServerRead, + Update: resourceMariaDbServerUpdate, + Delete: resourceMariaDbServerDelete, Importer: &schema.ResourceImporter{ State: func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - if _, err := parse.MariaDbServerServerID(d.Id()); err != nil { + if _, err := parse.ServerID(d.Id()); err != nil { return []*schema.ResourceData{d}, err } @@ -57,7 +57,7 @@ func resourceArmMariaDbServer() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MariaDbServerServerName, + ValidateFunc: validate.ServerName, }, "administrator_login": { @@ -103,7 +103,7 @@ func resourceArmMariaDbServer() *schema.Resource { "creation_source_server_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.MariaDbServerServerID, + ValidateFunc: validate.ServerID, }, "fqdn": { @@ -255,7 +255,7 @@ func resourceArmMariaDbServer() *schema.Resource { } } -func resourceArmMariaDbServerCreate(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbServerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ServersClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -390,17 +390,17 @@ func resourceArmMariaDbServerCreate(d *schema.ResourceData, meta interface{}) er d.SetId(*read.ID) - return resourceArmMariaDbServerRead(d, meta) + return resourceMariaDbServerRead(d, meta) } -func resourceArmMariaDbServerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbServerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ServersClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for AzureRM MariaDB Server update.") - id, err := parse.MariaDbServerServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing MariaDB Server ID : %v", err) } @@ -454,15 +454,15 @@ func resourceArmMariaDbServerUpdate(d *schema.ResourceData, meta interface{}) er d.SetId(*read.ID) - return resourceArmMariaDbServerRead(d, meta) + return resourceMariaDbServerRead(d, meta) } -func resourceArmMariaDbServerRead(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ServersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MariaDbServerServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing MariaDB Server ID : %v", err) } @@ -514,12 +514,12 @@ func resourceArmMariaDbServerRead(d *schema.ResourceData, meta interface{}) erro return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMariaDbServerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbServerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.ServersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MariaDbServerServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing MariaDB Server ID : %v", err) } diff --git a/azurerm/internal/services/mariadb/mariadb_server_resource_test.go b/azurerm/internal/services/mariadb/mariadb_server_resource_test.go new file mode 100644 index 000000000000..179cc564c5be --- /dev/null +++ b/azurerm/internal/services/mariadb/mariadb_server_resource_test.go @@ -0,0 +1,518 @@ +package mariadb_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mariadb/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MariaDbServerResource struct { +} + +func TestAccMariaDbServer_basicTenTwo(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("version").HasValue(version), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_basicTenTwoDeprecated(t *testing.T) { // remove in v3.0 + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDeprecated(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("version").HasValue(version), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_basicTenThree(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("version").HasValue(version), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_autogrowOnly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autogrow(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "10.3"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMariaDbServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "10.3"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.complete(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_completeDeprecatedMigrate(t *testing.T) { // remove in v3.0 + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeDeprecated(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.complete(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_updateDeprecated(t *testing.T) { // remove in v3.0 + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.2" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDeprecated(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.completeDeprecated(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.basicDeprecated(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_updateSKU(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sku(data, "GP_Gen5_32"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.sku(data, "MO_Gen5_16"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_createReplica(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + version := "10.3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.createReplica(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That("azurerm_mariadb_server").ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMariaDbServer_createPointInTimeRestore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") + r := MariaDbServerResource{} + restoreTime := time.Now().Add(11 * time.Minute) + version := "10.3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + PreConfig: func() { time.Sleep(restoreTime.Sub(time.Now().Add(-7 * time.Minute))) }, + Config: r.createPointInTimeRestore(data, version, restoreTime.Format(time.RFC3339)), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That("azurerm_mariadb_server.restore").ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func (MariaDbServerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.MariaDB.ServersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving MariaDB Server %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ServerProperties != nil), nil +} + +func (MariaDbServerResource) basic(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MariaDbServerResource) basicDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + + storage_profile { + storage_mb = 51200 + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MariaDbServerResource) complete(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + auto_grow_enabled = true + backup_retention_days = 14 + create_mode = "Default" + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MariaDbServerResource) completeDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + + storage_profile { + auto_grow = "Enabled" + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + storage_mb = 51200 + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + create_mode = "Default" + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MariaDbServerResource) autogrow(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + auto_grow_enabled = true + backup_retention_days = 7 + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (r MariaDbServerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mariadb_server" "import" { + name = azurerm_mariadb_server.test.name + location = azurerm_mariadb_server.test.location + resource_group_name = azurerm_mariadb_server.test.resource_group_name + sku_name = "B_Gen5_2" + version = "10.3" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + backup_retention_days = 7 + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, r.basic(data, "10.3")) +} + +func (MariaDbServerResource) sku(data acceptance.TestData, sku string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "%s" + version = "10.2" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + backup_retention_days = 7 + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 640000 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku) +} + +func (r MariaDbServerResource) createReplica(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mariadb_server" "replica" { + name = "acctestmariadbsvr-%d-replica" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + create_mode = "Replica" + creation_source_server_id = azurerm_mariadb_server.test.id + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, r.basic(data, version), data.RandomInteger, version) +} + +func (r MariaDbServerResource) createPointInTimeRestore(data acceptance.TestData, version, restoreTime string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mariadb_server" "restore" { + name = "acctestmariadbsvr-%d-restore" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "B_Gen5_2" + version = "%s" + create_mode = "PointInTimeRestore" + creation_source_server_id = azurerm_mariadb_server.test.id + restore_point_in_time = "%s" + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, r.basic(data, version), data.RandomInteger, version, restoreTime) +} diff --git a/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource.go b/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource.go index cf4f586a51d3..5b6aacca6ac9 100644 --- a/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource.go +++ b/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMariaDbVirtualNetworkRule() *schema.Resource { +func resourceMariaDbVirtualNetworkRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmMariaDbVirtualNetworkRuleCreateUpdate, - Read: resourceArmMariaDbVirtualNetworkRuleRead, - Update: resourceArmMariaDbVirtualNetworkRuleCreateUpdate, - Delete: resourceArmMariaDbVirtualNetworkRuleDelete, + Create: resourceMariaDbVirtualNetworkRuleCreateUpdate, + Read: resourceMariaDbVirtualNetworkRuleRead, + Update: resourceMariaDbVirtualNetworkRuleCreateUpdate, + Delete: resourceMariaDbVirtualNetworkRuleDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -50,7 +50,7 @@ func resourceArmMariaDbVirtualNetworkRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MariaDbServerServerName, + ValidateFunc: validate.ServerName, }, "subnet_id": { @@ -62,7 +62,7 @@ func resourceArmMariaDbVirtualNetworkRule() *schema.Resource { } } -func resourceArmMariaDbVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.VirtualNetworkRulesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -122,10 +122,10 @@ func resourceArmMariaDbVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, me d.SetId(*resp.ID) - return resourceArmMariaDbVirtualNetworkRuleRead(d, meta) + return resourceMariaDbVirtualNetworkRuleRead(d, meta) } -func resourceArmMariaDbVirtualNetworkRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbVirtualNetworkRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.VirtualNetworkRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -161,7 +161,7 @@ func resourceArmMariaDbVirtualNetworkRuleRead(d *schema.ResourceData, meta inter return nil } -func resourceArmMariaDbVirtualNetworkRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMariaDbVirtualNetworkRuleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MariaDB.VirtualNetworkRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -196,7 +196,6 @@ func resourceArmMariaDbVirtualNetworkRuleDelete(d *schema.ResourceData, meta int func mariaDbVirtualNetworkStateStatusCodeRefreshFunc(ctx context.Context, client *mariadb.VirtualNetworkRulesClient, resourceGroup string, serverName string, name string) resource.StateRefreshFunc { return func() (interface{}, string, error) { resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Retrieving MariaDb Virtual Network Rule %q (MariaDb Server: %q, Resource Group: %q) returned 404.", resourceGroup, serverName, name) diff --git a/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource_test.go b/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource_test.go new file mode 100644 index 000000000000..c15787ae2865 --- /dev/null +++ b/azurerm/internal/services/mariadb/mariadb_virtual_network_rule_resource_test.go @@ -0,0 +1,384 @@ +package mariadb_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MariaDbVirtualNetworkRuleResource struct { +} + +func TestAccMariaDbVirtualNetworkRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") + r := MariaDbVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccMariaDbVirtualNetworkRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") + r := MariaDbVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mariadb_virtual_network_rule"), + }, + }) +} + +func TestAccMariaDbVirtualNetworkRule_switchSubnets(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") + r := MariaDbVirtualNetworkRuleResource{} + + // Create regex strings that will ensure that one subnet name exists, but not the other + preConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet1%d)$|(subnet[^2]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 1 but not 2 + postConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet2%d)$|(subnet[^1]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 2 but not 1 + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subnetSwitchPre(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", preConfigRegex), + ), + }, + { + Config: r.subnetSwitchPost(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", postConfigRegex), + ), + }, + }) +} + +func TestAccMariaDbVirtualNetworkRule_multipleSubnets(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "rule1") + r := MariaDbVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleSubnets(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That("azurerm_mariadb_virtual_network_rule.rule2").ExistsInAzure(r), + check.That("azurerm_mariadb_virtual_network_rule.rule3").ExistsInAzure(r), + ), + }, + }) +} + +func (MariaDbVirtualNetworkRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + serverName := id.Path["servers"] + name := id.Path["virtualNetworkRules"] + + resp, err := clients.MariaDB.VirtualNetworkRulesClient.Get(ctx, id.ResourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("retrieving MariaDB Virtual Network Rule %q (Server %q / Resource Group %q): %v", name, serverName, id.ResourceGroup, err) + } + + return utils.Bool(resp.VirtualNetworkRuleProperties != nil), nil +} + +func (MariaDbVirtualNetworkRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mariadb_virtual_network_rule" "test" { + name = "acctestmariadbvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + subnet_id = azurerm_subnet.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r MariaDbVirtualNetworkRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mariadb_virtual_network_rule" "import" { + name = azurerm_mariadb_virtual_network_rule.test.name + resource_group_name = azurerm_mariadb_virtual_network_rule.test.resource_group_name + server_name = azurerm_mariadb_virtual_network_rule.test.server_name + subnet_id = azurerm_mariadb_virtual_network_rule.test.subnet_id +} +`, r.basic(data)) +} + +func (MariaDbVirtualNetworkRuleResource) subnetSwitchPre(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test1" { + name = "subnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "test2" { + name = "subnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.128/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mariadb_virtual_network_rule" "test" { + name = "acctestmariadbvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + subnet_id = azurerm_subnet.test1.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (MariaDbVirtualNetworkRuleResource) subnetSwitchPost(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test1" { + name = "subnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "test2" { + name = "subnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.128/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mariadb_virtual_network_rule" "test" { + name = "acctestmariadbvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + subnet_id = azurerm_subnet.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (MariaDbVirtualNetworkRuleResource) multipleSubnets(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "vnet1" { + name = "acctestvnet1%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_virtual_network" "vnet2" { + name = "acctestvnet2%d" + address_space = ["10.1.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "vnet1_subnet1" { + name = "acctestsubnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet1.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "vnet1_subnet2" { + name = "acctestsubnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet1.name + address_prefix = "10.7.29.128/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "vnet2_subnet1" { + name = "acctestsubnet3%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet2.name + address_prefix = "10.1.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mariadb_server" "test" { + name = "acctestmariadbsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "10.2" + ssl_enforcement_enabled = true + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mariadb_virtual_network_rule" "rule1" { + name = "acctestmariadbvnetrule1%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + subnet_id = azurerm_subnet.vnet1_subnet1.id +} + +resource "azurerm_mariadb_virtual_network_rule" "rule2" { + name = "acctestmariadbvnetrule2%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + subnet_id = azurerm_subnet.vnet1_subnet2.id +} + +resource "azurerm_mariadb_virtual_network_rule" "rule3" { + name = "acctestmariadbvnetrule3%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mariadb_server.test.name + subnet_id = azurerm_subnet.vnet2_subnet1.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/mariadb/parse/mariadb.go b/azurerm/internal/services/mariadb/parse/mariadb.go deleted file mode 100644 index f13fbfb352e3..000000000000 --- a/azurerm/internal/services/mariadb/parse/mariadb.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MariaDbServerServerId struct { - ResourceGroup string - Name string -} - -func MariaDbServerServerID(input string) (*MariaDbServerServerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse MariaDB Server ID %q: %+v", input, err) - } - - server := MariaDbServerServerId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/mariadb/parse/mariadb_test.go b/azurerm/internal/services/mariadb/parse/mariadb_test.go deleted file mode 100644 index a8bfac754831..000000000000 --- a/azurerm/internal/services/mariadb/parse/mariadb_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestValidateMariaDbServerServerID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MariaDbServerServerId - }{ - { - Name: "Empty resource ID", - Input: "", - Expected: nil, - }, - { - Name: "No resourceGroups segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No resource group name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource group", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/", - Expected: nil, - }, - { - Name: "Missing server name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMariaDB/servers/", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMariaDB/servers/test-mariadb", - Expected: &MariaDbServerServerId{ - Name: "test-mariadb", - ResourceGroup: "test-rg", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MariaDbServerServerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/mariadb/parse/server.go b/azurerm/internal/services/mariadb/parse/server.go new file mode 100644 index 000000000000..2f5b768e25be --- /dev/null +++ b/azurerm/internal/services/mariadb/parse/server.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServerID(subscriptionId, resourceGroup, name string) ServerId { + return ServerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server", segmentsStr) +} + +func (id ServerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforMariaDB/servers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServerID parses a Server ID into an ServerId struct +func ServerID(input string) (*ServerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("servers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mariadb/parse/server_test.go b/azurerm/internal/services/mariadb/parse/server_test.go new file mode 100644 index 000000000000..b7800ebf6aa8 --- /dev/null +++ b/azurerm/internal/services/mariadb/parse/server_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerId{} + +func TestServerIDFormatter(t *testing.T) { + actual := NewServerID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/servers/server1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/servers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/servers/server1", + Expected: &ServerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "server1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORMARIADB/SERVERS/SERVER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mariadb/registration.go b/azurerm/internal/services/mariadb/registration.go index 0b7789a86de3..25f7bbf93964 100644 --- a/azurerm/internal/services/mariadb/registration.go +++ b/azurerm/internal/services/mariadb/registration.go @@ -28,9 +28,10 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_mariadb_configuration": resourceArmMariaDbConfiguration(), - "azurerm_mariadb_database": resourceArmMariaDbDatabase(), + "azurerm_mariadb_configuration": resourceMariaDbConfiguration(), + "azurerm_mariadb_database": resourceMariaDbDatabase(), "azurerm_mariadb_firewall_rule": resourceArmMariaDBFirewallRule(), - "azurerm_mariadb_server": resourceArmMariaDbServer(), - "azurerm_mariadb_virtual_network_rule": resourceArmMariaDbVirtualNetworkRule()} + "azurerm_mariadb_server": resourceMariaDbServer(), + "azurerm_mariadb_virtual_network_rule": resourceMariaDbVirtualNetworkRule(), + } } diff --git a/azurerm/internal/services/mariadb/resourceids.go b/azurerm/internal/services/mariadb/resourceids.go new file mode 100644 index 000000000000..3a1811e6dc45 --- /dev/null +++ b/azurerm/internal/services/mariadb/resourceids.go @@ -0,0 +1,3 @@ +package mariadb + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Server -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/servers/server1 diff --git a/azurerm/internal/services/mariadb/tests/mariadb_configuration_resource_test.go b/azurerm/internal/services/mariadb/tests/mariadb_configuration_resource_test.go deleted file mode 100644 index ac2c0ab8d3ba..000000000000 --- a/azurerm/internal/services/mariadb/tests/mariadb_configuration_resource_test.go +++ /dev/null @@ -1,232 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMariaDbConfiguration_characterSetServer(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbConfiguration_characterSetServer(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbConfigurationValue(data.ResourceName, "hebrew"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMariaDbConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMMariaDbConfigurationValueReset(data.RandomInteger, "character_set_server"), - ), - }, - }, - }) -} - -func TestAccAzureRMMariaDbConfiguration_interactiveTimeout(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbConfiguration_interactiveTimeout(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbConfigurationValue(data.ResourceName, "30"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMariaDbConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMMariaDbConfigurationValueReset(data.RandomInteger, "interactive_timeout"), - ), - }, - }, - }) -} - -func TestAccAzureRMMariaDbConfiguration_logSlowAdminStatements(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbConfiguration_logSlowAdminStatements(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbConfigurationValue(data.ResourceName, "on"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMariaDbConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMMariaDbConfigurationValueReset(data.RandomInteger, "log_slow_admin_statements"), - ), - }, - }, - }) -} - -func testCheckAzureRMMariaDbConfigurationValue(resourceName string, value string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MariaDb Configuration: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MariaDb Configuration %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on mariadbConfigurationsClient: %+v", err) - } - - if *resp.Value != value { - return fmt.Errorf("MariaDb Configuration wasn't set. Expected '%s' - got '%s': \n%+v", value, *resp.Value, resp) - } - - return nil - } -} - -func testCheckAzureRMMariaDbConfigurationValueReset(rInt int, configurationName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resourceGroup := fmt.Sprintf("acctestRG-%d", rInt) - serverName := fmt.Sprintf("acctestmariadbsvr-%d", rInt) - - resp, err := client.Get(ctx, resourceGroup, serverName, configurationName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MariaDb Configuration %q (server %q resource group: %q) does not exist", configurationName, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on mariadbConfigurationsClient: %+v", err) - } - - actualValue := *resp.Value - defaultValue := *resp.DefaultValue - - if defaultValue != actualValue { - return fmt.Errorf("MariaDb Configuration wasn't set to the default value. Expected '%s' - got '%s': \n%+v", defaultValue, actualValue, resp) - } - - return nil - } -} - -func testCheckAzureRMMariaDbConfigurationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mariadb_configuration" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - } - - return nil -} - -func testAccAzureRMMariaDbConfiguration_characterSetServer(data acceptance.TestData) string { - return testAccAzureRMMariaDbConfiguration_template(data, "character_set_server", "hebrew") -} - -func testAccAzureRMMariaDbConfiguration_interactiveTimeout(data acceptance.TestData) string { - return testAccAzureRMMariaDbConfiguration_template(data, "interactive_timeout", "30") -} - -func testAccAzureRMMariaDbConfiguration_logSlowAdminStatements(data acceptance.TestData) string { - return testAccAzureRMMariaDbConfiguration_template(data, "log_slow_admin_statements", "on") -} - -func testAccAzureRMMariaDbConfiguration_template(data acceptance.TestData, name string, value string) string { - server := testAccAzureRMMariaDbConfiguration_empty(data) - config := fmt.Sprintf(` -resource "azurerm_mariadb_configuration" "test" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - server_name = "${azurerm_mariadb_server.test.name}" - value = "%s" -} -`, name, value) - return server + config -} - -func testAccAzureRMMariaDbConfiguration_empty(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku_name = "GP_Gen5_2" - version = "10.2" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - backup_retention_days = 7 - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/mariadb/tests/mariadb_database_resource_test.go b/azurerm/internal/services/mariadb/tests/mariadb_database_resource_test.go deleted file mode 100644 index 8721bd4f86eb..000000000000 --- a/azurerm/internal/services/mariadb/tests/mariadb_database_resource_test.go +++ /dev/null @@ -1,166 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMariaDbDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "utf8"), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "utf8_general_ci"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMariaDbDatabase_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbDatabaseExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMariaDbDatabase_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mariadb_database"), - }, - }, - }) -} - -func testCheckAzureRMMariaDbDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %q", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("bad: no resource group found in state for MariaDB database: %q", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("bad: MariaDB database %q (Server %q Resource Group: %q) does not exist", name, serverName, resourceGroup) - } - return fmt.Errorf("bad: get on mariadbDatabasesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMariaDbDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mariadb_database" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - return fmt.Errorf("error MariaDB database %q (Resource Group %q) still exists:\n%+v", name, resourceGroup, err) - } - return fmt.Errorf("MariaDB database %q (Resource Group %q) still exists:\n%#+v", name, resourceGroup, resp) - } - - return nil -} - -func testAccAzureRMMariaDbDatabase_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = %q -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "B_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true -} - -resource "azurerm_mariadb_database" "test" { - name = "acctestmariadb_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - charset = "utf8" - collation = "utf8_general_ci" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMariaDbDatabase_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMariaDbDatabase_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mariadb_database" "import" { - name = azurerm_mariadb_database.test.name - resource_group_name = azurerm_mariadb_database.test.resource_group_name - server_name = azurerm_mariadb_database.test.server_name - charset = azurerm_mariadb_database.test.charset - collation = azurerm_mariadb_database.test.collation -} -`, template) -} diff --git a/azurerm/internal/services/mariadb/tests/mariadb_firewall_rule_resource_test.go b/azurerm/internal/services/mariadb/tests/mariadb_firewall_rule_resource_test.go deleted file mode 100644 index 4422f8c84f66..000000000000 --- a/azurerm/internal/services/mariadb/tests/mariadb_firewall_rule_resource_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMariaDbFirewallRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbFirewallRuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMariaDbFirewallRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbFirewallRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMariaDbFirewallRule_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mariadb_firewall_rule"), - }, - }, - }) -} - -func testCheckAzureRMMariaDbFirewallRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.FirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MariaDB Firewall Rule: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MariaDB Firewall Rule %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on mariadbFirewallRulesClient: %s", err) - } - - return nil - } -} - -func testCheckAzureRMMariaDbFirewallRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mariadb_firewall_rule" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("MariaDB Firewall Rule still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMMariaDbFirewallRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true -} - -resource "azurerm_mariadb_firewall_rule" "test" { - name = "acctestfwrule-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - server_name = "${azurerm_mariadb_server.test.name}" - start_ip_address = "0.0.0.0" - end_ip_address = "255.255.255.255" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMariaDbFirewallRule_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mariadb_firewall_rule" "import" { - name = azurerm_mariadb_firewall_rule.test.name - resource_group_name = azurerm_mariadb_firewall_rule.test.resource_group_name - server_name = azurerm_mariadb_firewall_rule.test.server_name - start_ip_address = azurerm_mariadb_firewall_rule.test.start_ip_address - end_ip_address = azurerm_mariadb_firewall_rule.test.end_ip_address -} -`, testAccAzureRMMariaDbFirewallRule_basic(data)) -} diff --git a/azurerm/internal/services/mariadb/tests/mariadb_server_data_source_test.go b/azurerm/internal/services/mariadb/tests/mariadb_server_data_source_test.go deleted file mode 100644 index 6746ab4e0009..000000000000 --- a/azurerm/internal/services/mariadb/tests/mariadb_server_data_source_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMariaDbServer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mariadb_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMariaDbServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "10.2"), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_enforcement", "Enabled"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMariaDbServer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-maria-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "B_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true -} - -data "azurerm_mariadb_server" "test" { - name = azurerm_mariadb_server.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/mariadb/tests/mariadb_server_resource_test.go b/azurerm/internal/services/mariadb/tests/mariadb_server_resource_test.go deleted file mode 100644 index 2249eb49c52d..000000000000 --- a/azurerm/internal/services/mariadb/tests/mariadb_server_resource_test.go +++ /dev/null @@ -1,604 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMariaDbServer_basicTenTwo(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "version", version), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_basicTenTwoDeprecated(t *testing.T) { // remove in v3.0 - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basicDeprecated(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "version", version), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_basicTenThree(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "version", version), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_autogrowOnly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_autogrow(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basic(data, "10.3"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMariaDbServer_requiresImport), - }, - }) -} - -func TestAccAzureRMMariaDbServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_complete(data, "10.3"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_complete(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_completeDeprecatedMigrate(t *testing.T) { // remove in v3.0 - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_completeDeprecated(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_complete(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_updateDeprecated(t *testing.T) { // remove in v3.0 - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.2" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basicDeprecated(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_completeDeprecated(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_basicDeprecated(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_updateSKU(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_sku(data, "GP_Gen5_32"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_sku(data, "MO_Gen5_16"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_createReplica(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - version := "10.3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMariaDbServer_createReplica(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - testCheckAzureRMMariaDbServerExists("azurerm_mariadb_server.replica"), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMariaDbServer_createPointInTimeRestore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_server", "test") - restoreTime := time.Now().Add(11 * time.Minute) - version := "10.3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - PreConfig: func() { time.Sleep(restoreTime.Sub(time.Now().Add(-7 * time.Minute))) }, - Config: testAccAzureRMMariaDbServer_createPointInTimeRestore(data, version, restoreTime.Format(time.RFC3339)), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbServerExists(data.ResourceName), - testCheckAzureRMMariaDbServerExists("azurerm_mariadb_server.restore"), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func testCheckAzureRMMariaDbServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MariaDB Server: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MariaDB Server %q (resource group: %q) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on mariadbServersClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMariaDbServerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mariadb_server" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("MariaDB Server still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMMariaDbServer_basic(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMariaDbServer_basicDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - - storage_profile { - storage_mb = 51200 - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMariaDbServer_complete(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - auto_grow_enabled = true - backup_retention_days = 14 - create_mode = "Default" - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMariaDbServer_completeDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - - storage_profile { - auto_grow = "Enabled" - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - storage_mb = 51200 - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - create_mode = "Default" - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMariaDbServer_autogrow(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - auto_grow_enabled = true - backup_retention_days = 7 - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMariaDbServer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMariaDbServer_basic(data, "10.3") - return fmt.Sprintf(` -%s - -resource "azurerm_mariadb_server" "import" { - name = azurerm_mariadb_server.test.name - location = azurerm_mariadb_server.test.location - resource_group_name = azurerm_mariadb_server.test.resource_group_name - sku_name = "B_Gen5_2" - version = "10.3" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - backup_retention_days = 7 - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, template) -} - -func testAccAzureRMMariaDbServer_sku(data acceptance.TestData, sku string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "%s" - version = "10.2" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - backup_retention_days = 7 - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 640000 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku) -} - -func testAccAzureRMMariaDbServer_createReplica(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mariadb_server" "replica" { - name = "acctestmariadbsvr-%d-replica" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - create_mode = "Replica" - creation_source_server_id = azurerm_mariadb_server.test.id - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, testAccAzureRMMariaDbServer_basic(data, version), data.RandomInteger, version) -} - -func testAccAzureRMMariaDbServer_createPointInTimeRestore(data acceptance.TestData, version, restoreTime string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mariadb_server" "restore" { - name = "acctestmariadbsvr-%d-restore" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "B_Gen5_2" - version = "%s" - create_mode = "PointInTimeRestore" - creation_source_server_id = azurerm_mariadb_server.test.id - restore_point_in_time = "%s" - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, testAccAzureRMMariaDbServer_basic(data, version), data.RandomInteger, version, restoreTime) -} diff --git a/azurerm/internal/services/mariadb/tests/mariadb_virtual_network_rule_resource_test.go b/azurerm/internal/services/mariadb/tests/mariadb_virtual_network_rule_resource_test.go deleted file mode 100644 index 43e73b322a9a..000000000000 --- a/azurerm/internal/services/mariadb/tests/mariadb_virtual_network_rule_resource_test.go +++ /dev/null @@ -1,494 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMariaDbVirtualNetworkRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbVirtualNetworkRuleExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMMariaDbVirtualNetworkRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbVirtualNetworkRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mariadb_virtual_network_rule"), - }, - }, - }) -} - -func TestAccAzureRMMariaDbVirtualNetworkRule_switchSubnets(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") - - // Create regex strings that will ensure that one subnet name exists, but not the other - preConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet1%d)$|(subnet[^2]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 1 but not 2 - postConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet2%d)$|(subnet[^1]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 2 but not 1 - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_subnetSwitchPre(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbVirtualNetworkRuleExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", preConfigRegex), - ), - }, - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_subnetSwitchPost(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbVirtualNetworkRuleExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", postConfigRegex), - ), - }, - }, - }) -} - -func TestAccAzureRMMariaDbVirtualNetworkRule_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbVirtualNetworkRuleExists(data.ResourceName), - testCheckAzureRMMariaDbVirtualNetworkRuleDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMMariaDbVirtualNetworkRule_multipleSubnets(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mariadb_virtual_network_rule", "rule1") - resourceName2 := "azurerm_mariadb_virtual_network_rule.rule2" - resourceName3 := "azurerm_mariadb_virtual_network_rule.rule3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMariaDbVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMariaDbVirtualNetworkRule_multipleSubnets(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMariaDbVirtualNetworkRuleExists(data.ResourceName), - testCheckAzureRMMariaDbVirtualNetworkRuleExists(resourceName2), - testCheckAzureRMMariaDbVirtualNetworkRuleExists(resourceName3), - ), - }, - }, - }) -} - -func testCheckAzureRMMariaDbVirtualNetworkRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, ruleName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MariaDB Virtual Network Rule %q (Server %q / Resource Group %q) was not found", ruleName, serverName, resourceGroup) - } - - return err - } - - return nil - } -} - -func testCheckAzureRMMariaDbVirtualNetworkRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mariadb_virtual_network_rule" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, ruleName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Bad: MariaDB Firewall Rule %q (Server %q / Resource Group %q) still exists: %+v", ruleName, serverName, resourceGroup, resp) - } - - return nil -} - -func testCheckAzureRMMariaDbVirtualNetworkRuleDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MariaDB.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - future, err := client.Delete(ctx, resourceGroup, serverName, ruleName) - if err != nil { - // If the error is that the resource we want to delete does not exist in the first - // place (404), then just return with no error. - if response.WasNotFound(future.Response()) { - return nil - } - - return fmt.Errorf("Error deleting MariaDB Virtual Network Rule: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - // Same deal as before. Just in case. - if response.WasNotFound(future.Response()) { - return nil - } - - return fmt.Errorf("Error deleting MariaDB Virtual Network Rule: %+v", err) - } - - return nil - } -} - -func testAccAzureRMMariaDbVirtualNetworkRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mariadb_virtual_network_rule" "test" { - name = "acctestmariadbvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - subnet_id = azurerm_subnet.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMariaDbVirtualNetworkRule_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mariadb_virtual_network_rule" "import" { - name = azurerm_mariadb_virtual_network_rule.test.name - resource_group_name = azurerm_mariadb_virtual_network_rule.test.resource_group_name - server_name = azurerm_mariadb_virtual_network_rule.test.server_name - subnet_id = azurerm_mariadb_virtual_network_rule.test.subnet_id -} -`, testAccAzureRMMariaDbVirtualNetworkRule_basic(data)) -} - -func testAccAzureRMMariaDbVirtualNetworkRule_subnetSwitchPre(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "subnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "test2" { - name = "subnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.128/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mariadb_virtual_network_rule" "test" { - name = "acctestmariadbvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - subnet_id = azurerm_subnet.test1.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMariaDbVirtualNetworkRule_subnetSwitchPost(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "subnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "test2" { - name = "subnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.128/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mariadb_virtual_network_rule" "test" { - name = "acctestmariadbvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - subnet_id = azurerm_subnet.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMariaDbVirtualNetworkRule_multipleSubnets(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "vnet1" { - name = "acctestvnet1%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_virtual_network" "vnet2" { - name = "acctestvnet2%d" - address_space = ["10.1.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "vnet1_subnet1" { - name = "acctestsubnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet1.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "vnet1_subnet2" { - name = "acctestsubnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet1.name - address_prefix = "10.7.29.128/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "vnet2_subnet1" { - name = "acctestsubnet3%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet2.name - address_prefix = "10.1.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mariadb_server" "test" { - name = "acctestmariadbsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "10.2" - ssl_enforcement_enabled = true - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mariadb_virtual_network_rule" "rule1" { - name = "acctestmariadbvnetrule1%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - subnet_id = azurerm_subnet.vnet1_subnet1.id -} - -resource "azurerm_mariadb_virtual_network_rule" "rule2" { - name = "acctestmariadbvnetrule2%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - subnet_id = azurerm_subnet.vnet1_subnet2.id -} - -resource "azurerm_mariadb_virtual_network_rule" "rule3" { - name = "acctestmariadbvnetrule3%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mariadb_server.test.name - subnet_id = azurerm_subnet.vnet2_subnet1.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/mariadb/validate/mariadb.go b/azurerm/internal/services/mariadb/validate/mariadb.go deleted file mode 100644 index a7aadae3947c..000000000000 --- a/azurerm/internal/services/mariadb/validate/mariadb.go +++ /dev/null @@ -1,30 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mariadb/parse" -) - -func MariaDbServerServerID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.MariaDbServerServerID(v); err != nil { - errors = append(errors, fmt.Errorf("cannot parse %q as a MariaDB Server resource id: %v", k, err)) - } - - return warnings, errors -} - -func MariaDbServerServerName(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `^[0-9a-z][-0-9a-z]{1,61}[0-9a-z]$`); !m { - return nil, append(regexErrs, fmt.Errorf("%q can contain only lowercase letters, numbers, and '-', but can't start or end with '-', and must be at least 3 characters and no more than 63 characters long.", k)) - } - - return nil, nil -} diff --git a/azurerm/internal/services/mariadb/validate/mariadb_test.go b/azurerm/internal/services/mariadb/validate/mariadb_test.go deleted file mode 100644 index 481d3320fafa..000000000000 --- a/azurerm/internal/services/mariadb/validate/mariadb_test.go +++ /dev/null @@ -1,106 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestValidateMariaDbServerServerID(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // invalid - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg", - expected: false, - }, - { - // valid - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMariaDB/servers/test-mariadb", - expected: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := MariaDbServerServerID(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} - -func TestValidateMariaDbServerServerName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // basic example - input: "ab-c", - expected: true, - }, - { - // can't contain upper case letter - input: "AbcD", - expected: false, - }, - { - // can't start with a hyphen - input: "-abc", - expected: false, - }, - { - // can't contain underscore - input: "ab_c", - expected: false, - }, - { - // can't end with hyphen - input: "abc-", - expected: false, - }, - { - // can not be shorter than 3 characters - input: "ab", - expected: false, - }, - { - // can not be shorter than 3 characters (catching bad regex) - input: "a", - expected: false, - }, - { - // 63 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcde", - expected: true, - }, - { - // 64 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcdef", - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := MariaDbServerServerName(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} diff --git a/azurerm/internal/services/mariadb/validate/server_id.go b/azurerm/internal/services/mariadb/validate/server_id.go new file mode 100644 index 000000000000..ef38d981ae9d --- /dev/null +++ b/azurerm/internal/services/mariadb/validate/server_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mariadb/parse" +) + +func ServerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mariadb/validate/server_id_test.go b/azurerm/internal/services/mariadb/validate/server_id_test.go new file mode 100644 index 000000000000..2f1d7e91db3e --- /dev/null +++ b/azurerm/internal/services/mariadb/validate/server_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/servers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMariaDB/servers/server1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORMARIADB/SERVERS/SERVER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mariadb/validate/server_name.go b/azurerm/internal/services/mariadb/validate/server_name.go new file mode 100644 index 000000000000..75d51248bb67 --- /dev/null +++ b/azurerm/internal/services/mariadb/validate/server_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func ServerName(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `^[0-9a-z][-0-9a-z]{1,61}[0-9a-z]$`); !m { + return nil, append(regexErrs, fmt.Errorf("%q can contain only lowercase letters, numbers, and '-', but can't start or end with '-', and must be at least 3 characters and no more than 63 characters long.", k)) + } + + return nil, nil +} diff --git a/azurerm/internal/services/mariadb/validate/server_name_test.go b/azurerm/internal/services/mariadb/validate/server_name_test.go new file mode 100644 index 000000000000..80126b27b209 --- /dev/null +++ b/azurerm/internal/services/mariadb/validate/server_name_test.go @@ -0,0 +1,73 @@ +package validate + +import ( + "testing" +) + +func TestValidateServerName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + // empty + input: "", + expected: false, + }, + { + // basic example + input: "ab-c", + expected: true, + }, + { + // can't contain upper case letter + input: "AbcD", + expected: false, + }, + { + // can't start with a hyphen + input: "-abc", + expected: false, + }, + { + // can't contain underscore + input: "ab_c", + expected: false, + }, + { + // can't end with hyphen + input: "abc-", + expected: false, + }, + { + // can not be shorter than 3 characters + input: "ab", + expected: false, + }, + { + // can not be shorter than 3 characters (catching bad regex) + input: "a", + expected: false, + }, + { + // 63 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcde", + expected: true, + }, + { + // 64 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcdef", + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ServerName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/media/client/client.go b/azurerm/internal/services/media/client/client.go index e86fae7fc4c9..8c827eb0767a 100644 --- a/azurerm/internal/services/media/client/client.go +++ b/azurerm/internal/services/media/client/client.go @@ -1,19 +1,44 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2018-07-01/media" + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) type Client struct { - ServicesClient *media.MediaservicesClient + ServicesClient *media.MediaservicesClient + AssetsClient *media.AssetsClient + TransformsClient *media.TransformsClient + StreamingEndpointsClient *media.StreamingEndpointsClient + JobsClient *media.JobsClient + StreamingLocatorsClient *media.StreamingLocatorsClient } func NewClient(o *common.ClientOptions) *Client { ServicesClient := media.NewMediaservicesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&ServicesClient.Client, o.ResourceManagerAuthorizer) + AssetsClient := media.NewAssetsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&AssetsClient.Client, o.ResourceManagerAuthorizer) + + TransformsClient := media.NewTransformsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&TransformsClient.Client, o.ResourceManagerAuthorizer) + + StreamingEndpointsClient := media.NewStreamingEndpointsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&StreamingEndpointsClient.Client, o.ResourceManagerAuthorizer) + + JobsClient := media.NewJobsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&JobsClient.Client, o.ResourceManagerAuthorizer) + + StreamingLocatorsClient := media.NewStreamingLocatorsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&StreamingLocatorsClient.Client, o.ResourceManagerAuthorizer) + return &Client{ - ServicesClient: &ServicesClient, + ServicesClient: &ServicesClient, + AssetsClient: &AssetsClient, + TransformsClient: &TransformsClient, + StreamingEndpointsClient: &StreamingEndpointsClient, + JobsClient: &JobsClient, + StreamingLocatorsClient: &StreamingLocatorsClient, } } diff --git a/azurerm/internal/services/media/media_asset_resource.go b/azurerm/internal/services/media/media_asset_resource.go new file mode 100644 index 000000000000..5d1c4de7e01e --- /dev/null +++ b/azurerm/internal/services/media/media_asset_resource.go @@ -0,0 +1,198 @@ +package media + +import ( + "fmt" + "log" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceMediaAsset() *schema.Resource { + return &schema.Resource{ + Create: resourceMediaAssetCreateUpdate, + Read: resourceMediaAssetRead, + Update: resourceMediaAssetCreateUpdate, + Delete: resourceAssetDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.AssetID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{1,128}$"), + "Asset name must be 1 - 128 characters long, contain only letters, hyphen and numbers.", + ), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "media_services_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-z0-9]{3,24}$"), + "Media Services Account name must be 3 - 24 characters long, contain only lowercase letters and numbers.", + ), + }, + + "alternate_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "container": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + ValidateFunc: validate.StorageContainerName, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "storage_account_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^([a-z0-9]{3,24})$"), + "Storage Account Name can only consist of lowercase letters and numbers, and must be between 3 and 24 characters long.", + ), + }, + }, + } +} + +func resourceMediaAssetCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.AssetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceId := parse.NewAssetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("media_services_account_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for existing %s: %+v", resourceId, err) + } + } + + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_media_asset", resourceId.ID()) + } + } + + parameters := media.Asset{ + AssetProperties: &media.AssetProperties{ + Description: utils.String(d.Get("description").(string)), + }, + } + + if v, ok := d.GetOk("container"); ok { + parameters.Container = utils.String(v.(string)) + } + + if v, ok := d.GetOk("alternate_id"); ok { + parameters.AlternateID = utils.String(v.(string)) + } + + if v, ok := d.GetOk("storage_account_name"); ok { + parameters.StorageAccountName = utils.String(v.(string)) + } + + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.Name, parameters); err != nil { + return fmt.Errorf("creating %s: %+v", resourceId, err) + } + + d.SetId(resourceId.ID()) + return resourceMediaAssetRead(d, meta) +} + +func resourceMediaAssetRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.AssetsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.AssetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] %s was not found - removing from state", id) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving %s: %+v", id, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("media_services_account_name", id.MediaserviceName) + + if props := resp.AssetProperties; props != nil { + d.Set("description", props.Description) + d.Set("alternate_id", props.AlternateID) + d.Set("container", props.Container) + d.Set("storage_account_name", props.StorageAccountName) + } + + return nil +} + +func resourceAssetDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.AssetsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.AssetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Delete(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + if !response.WasNotFound(resp.Response) { + return fmt.Errorf("deleting %s: %+v", id, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/media/media_asset_resource_test.go b/azurerm/internal/services/media/media_asset_resource_test.go new file mode 100644 index 000000000000..295e262a3cd3 --- /dev/null +++ b/azurerm/internal/services/media/media_asset_resource_test.go @@ -0,0 +1,192 @@ +package media_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MediaAssetResource struct { +} + +func TestAccMediaAsset_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_asset", "test") + r := MediaAssetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Asset-Content1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaAsset_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_asset", "test") + r := MediaAssetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Asset-Content1"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestMediaAccAsset_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_asset", "test") + r := MediaAssetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("alternate_id").HasValue("Asset-alternateid"), + check.That(data.ResourceName).Key("storage_account_name").HasValue(fmt.Sprintf("acctestsa1%s", data.RandomString)), + check.That(data.ResourceName).Key("container").HasValue("asset-container"), + check.That(data.ResourceName).Key("description").HasValue("Asset description"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaAsset_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_asset", "test") + r := MediaAssetResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Asset-Content1"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("alternate_id").HasValue("Asset-alternateid"), + check.That(data.ResourceName).Key("storage_account_name").HasValue(fmt.Sprintf("acctestsa1%s", data.RandomString)), + check.That(data.ResourceName).Key("container").HasValue("asset-container"), + check.That(data.ResourceName).Key("description").HasValue("Asset description"), + ), + }, + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Asset-Content1"), + check.That(data.ResourceName).Key("description").HasValue(""), + check.That(data.ResourceName).Key("alternate_id").HasValue(""), + ), + }, + data.ImportStep(), + data.ImportStep(), + }) +} + +func (MediaAssetResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AssetID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Media.AssetsClient.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Asset %s (Media Services Account %s) (resource group: %s): %v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return utils.Bool(resp.AssetProperties != nil), nil +} + +func (r MediaAssetResource) basic(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_asset" "test" { + name = "Asset-Content1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name +} + +`, template) +} + +func (r MediaAssetResource) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_asset" "import" { + name = azurerm_media_asset.test.name + resource_group_name = azurerm_media_asset.test.resource_group_name + media_services_account_name = azurerm_media_asset.test.media_services_account_name +} + +`, template) +} + +func (MediaAssetResource) complete(data acceptance.TestData) string { + template := MediaAssetResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_asset" "test" { + name = "Asset-Content1" + description = "Asset description" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + storage_account_name = azurerm_storage_account.test.name + alternate_id = "Asset-alternateid" + container = "asset-container" +} + +`, template) +} + +func (MediaAssetResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-media-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa1%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.test.id + is_primary = true + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/media/media_job_resource.go b/azurerm/internal/services/media/media_job_resource.go new file mode 100644 index 000000000000..6d0748ba1c9e --- /dev/null +++ b/azurerm/internal/services/media/media_job_resource.go @@ -0,0 +1,396 @@ +package media + +import ( + "fmt" + "log" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azuread/azuread/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceMediaJob() *schema.Resource { + return &schema.Resource{ + Create: resourceMediaJobCreate, + Read: resourceMediaJobRead, + Update: resourceMediaJobUpdate, + Delete: resourceMediaJobDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.JobID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9(_)]{1,128}$"), + "Job name must be 1 - 128 characters long, can contain letters, numbers, underscores, and hyphens (but the first and last character must be a letter or number).", + ), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "media_services_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-z0-9]{3,24}$"), + "Media Services Account name must be 3 - 24 characters long, contain only lowercase letters and numbers.", + ), + }, + + "transform_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9(_)]{1,128}$"), + "Transform name must be 1 - 128 characters long, can contain letters, numbers, underscores, and hyphens (but the first and last character must be a letter or number).", + ), + }, + + "input_asset": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{1,128}$"), + "Asset name must be 1 - 128 characters long, contain only letters, hyphen and numbers.", + ), + }, + "label": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "output_asset": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{1,128}$"), + "Asset name must be 1 - 128 characters long, contain only letters, hyphen and numbers.", + ), + }, + "label": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "priority": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.High), string(media.Normal), string(media.Low), + }, false), + Default: string(media.Normal), + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + }, + } +} + +func resourceMediaJobCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.JobsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceId := parse.NewJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("media_services_account_name").(string), d.Get("transform_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.TransformName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Media Job %q (Media Service account %q) (ResourceGroup %q): %s", resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.Name, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_media_job", *existing.ID) + } + } + + parameters := media.Job{ + JobProperties: &media.JobProperties{ + Description: utils.String(d.Get("description").(string)), + }, + } + + if v, ok := d.GetOk("priority"); ok { + parameters.Priority = media.Priority(v.(string)) + } + + if v, ok := d.GetOk("input_asset"); ok { + parameters.JobProperties.Input = expandInputAsset(v.([]interface{})) + } + + if v, ok := d.GetOk("output_asset"); ok { + outputAssets, err := expandOutputAssets(v.([]interface{})) + if err != nil { + return err + } + parameters.JobProperties.Outputs = outputAssets + } + + if _, err := client.Create(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.TransformName, resourceId.Name, parameters); err != nil { + return fmt.Errorf("creating %s: %+v", resourceId, err) + } + + d.SetId(resourceId.ID()) + + return resourceMediaJobRead(d, meta) +} + +func resourceMediaJobRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.JobsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.JobID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.TransformName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] %s was not found - removing from state", id) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving %s: %+v", id, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("media_services_account_name", id.MediaserviceName) + d.Set("transform_name", id.TransformName) + + if props := resp.JobProperties; props != nil { + d.Set("description", props.Description) + d.Set("priority", string(props.Priority)) + + inputAsset, err := flattenInputAsset(props.Input) + if err != nil { + return err + } + if err = d.Set("input_asset", inputAsset); err != nil { + return fmt.Errorf("Error flattening `input_asset`: %s", err) + } + + outputAssets, err := flattenOutputAssets(props.Outputs) + if err != nil { + return err + } + if err = d.Set("output_asset", outputAssets); err != nil { + return fmt.Errorf("Error flattening `output_asset`: %s", err) + } + } + return nil +} + +func resourceMediaJobUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.JobsClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.JobID(d.Id()) + if err != nil { + return err + } + description := d.Get("description").(string) + + parameters := media.Job{ + JobProperties: &media.JobProperties{ + Description: utils.String(description), + }, + } + + if v, ok := d.GetOk("priority"); ok { + parameters.Priority = media.Priority(v.(string)) + } + + if v, ok := d.GetOk("input_asset"); ok { + inputAsset := expandInputAsset(v.([]interface{})) + parameters.JobProperties.Input = inputAsset + } + + if v, ok := d.GetOk("output_asset"); ok { + outputAssets, err := expandOutputAssets(v.([]interface{})) + if err != nil { + return err + } + parameters.JobProperties.Outputs = outputAssets + } + + if _, err := client.Update(ctx, id.ResourceGroup, id.MediaserviceName, id.TransformName, id.Name, parameters); err != nil { + return fmt.Errorf("updating %s: %+v", id, err) + } + + return resourceMediaJobRead(d, meta) +} + +func resourceMediaJobDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.JobsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.JobID(d.Id()) + if err != nil { + return err + } + + // Cancel the job before we attempt to delete it. + _, err = client.CancelJob(ctx, id.ResourceGroup, id.MediaserviceName, id.TransformName, id.Name) + if err != nil { + return fmt.Errorf("could not cancel Media Job %q (reource group %q) for delete: %+v", id.Name, id.ResourceGroup, err) + } + + resp, err := client.Delete(ctx, id.ResourceGroup, id.MediaserviceName, id.TransformName, id.Name) + if err != nil { + if response.WasNotFound(resp.Response) { + return nil + } + return fmt.Errorf("deleting %s: %+v", id, err) + } + + return nil +} + +func expandInputAsset(input []interface{}) media.BasicJobInput { + inputAsset := input[0].(map[string]interface{}) + assetName := inputAsset["name"].(string) + label := inputAsset["label"].(string) + return &media.JobInputAsset{ + AssetName: utils.String(assetName), + Label: utils.String(label), + } +} + +func flattenInputAsset(input media.BasicJobInput) ([]interface{}, error) { + if input == nil { + return make([]interface{}, 0), nil + } + + asset, ok := input.AsJobInputAsset() + if !ok { + return nil, fmt.Errorf("Unexpected type for Input Asset. Currently only JobInputAsset is supported.") + } + assetName := "" + if asset.AssetName != nil { + assetName = *asset.AssetName + } + + label := "" + if asset.Label != nil { + label = *asset.Label + } + + return []interface{}{ + map[string]interface{}{ + "name": assetName, + "label": label, + }, + }, nil +} + +func expandOutputAssets(input []interface{}) (*[]media.BasicJobOutput, error) { + if len(input) == 0 { + return nil, fmt.Errorf("Job must contain at least one output_asset.") + } + outputAssets := make([]media.BasicJobOutput, len(input)) + for index, output := range input { + outputAsset := output.(map[string]interface{}) + assetName := outputAsset["name"].(string) + label := outputAsset["label"].(string) + jobOutputAsset := media.JobOutputAsset{ + AssetName: utils.String(assetName), + Label: utils.String(label), + } + outputAssets[index] = jobOutputAsset + } + + return &outputAssets, nil +} + +func flattenOutputAssets(input *[]media.BasicJobOutput) ([]interface{}, error) { + if input == nil || len(*input) == 0 { + return []interface{}{}, nil + } + + outputAssets := make([]interface{}, len(*input)) + for i, output := range *input { + outputAssetJob, ok := output.AsJobOutputAsset() + if !ok { + return nil, fmt.Errorf("unexpected type for output_asset. Currently only JobOutputAsset is supported.") + } + assetName := "" + if outputAssetJob.AssetName != nil { + assetName = *outputAssetJob.AssetName + } + + label := "" + if outputAssetJob.Label != nil { + label = *outputAssetJob.Label + } + + outputAssets[i] = map[string]interface{}{ + "name": assetName, + "label": label, + } + } + return outputAssets, nil +} diff --git a/azurerm/internal/services/media/media_job_resource_test.go b/azurerm/internal/services/media/media_job_resource_test.go new file mode 100644 index 000000000000..bde6a46b2d7d --- /dev/null +++ b/azurerm/internal/services/media/media_job_resource_test.go @@ -0,0 +1,243 @@ +package media_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MediaJobResource struct { +} + +func TestAccMediaJob_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_job", "test") + r := MediaJobResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Job-1"), + check.That(data.ResourceName).Key("input_asset.0.name").HasValue("inputAsset"), + check.That(data.ResourceName).Key("output_asset.0.name").HasValue("outputAsset"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaJob_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_job", "test") + r := MediaJobResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Job-1"), + check.That(data.ResourceName).Key("input_asset.0.name").HasValue("inputAsset"), + check.That(data.ResourceName).Key("output_asset.0.name").HasValue("outputAsset"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMediaJob_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_job", "test") + r := MediaJobResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("description").HasValue("Job description"), + check.That(data.ResourceName).Key("priority").HasValue("Normal"), + check.That(data.ResourceName).Key("input_asset.0.label").HasValue("Input"), + check.That(data.ResourceName).Key("output_asset.0.label").HasValue("Output"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaJob_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_job", "test") + r := MediaJobResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Job-1"), + check.That(data.ResourceName).Key("input_asset.0.name").HasValue("inputAsset"), + check.That(data.ResourceName).Key("output_asset.0.name").HasValue("outputAsset"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("description").HasValue("Job description"), + check.That(data.ResourceName).Key("priority").HasValue("Normal"), + check.That(data.ResourceName).Key("input_asset.0.label").HasValue("Input"), + check.That(data.ResourceName).Key("output_asset.0.label").HasValue("Output"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Job-1"), + check.That(data.ResourceName).Key("input_asset.0.name").HasValue("inputAsset"), + check.That(data.ResourceName).Key("output_asset.0.name").HasValue("outputAsset"), + ), + }, + data.ImportStep(), + }) +} + +func (MediaJobResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.JobID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Media.JobsClient.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.TransformName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Job %s (Media Services Account %s) (resource group: %s): %v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return utils.Bool(resp.JobProperties != nil), nil +} + +func (r MediaJobResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_job" "test" { + name = "Job-1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + transform_name = azurerm_media_transform.test.name + input_asset { + name = azurerm_media_asset.input.name + } + output_asset { + name = azurerm_media_asset.output.name + } +} + +`, r.template(data)) +} + +func (r MediaJobResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_job" "import" { + name = azurerm_media_job.test.name + resource_group_name = azurerm_media_job.test.resource_group_name + media_services_account_name = azurerm_media_job.test.media_services_account_name + transform_name = azurerm_media_job.test.transform_name + input_asset { + name = azurerm_media_job.test.input_asset[0].name + } + output_asset { + name = azurerm_media_job.test.output_asset[0].name + } +} + +`, r.basic(data)) +} + +func (r MediaJobResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_job" "test" { + name = "Job-1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + transform_name = azurerm_media_transform.test.name + description = "Job description" + priority = "Normal" + input_asset { + name = azurerm_media_asset.input.name + label = "Input" + } + output_asset { + name = azurerm_media_asset.output.name + label = "Output" + } +} + +`, r.template(data)) +} + +func (MediaJobResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-media-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa1%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.test.id + is_primary = true + } +} + +resource "azurerm_media_transform" "test" { + name = "transform1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + output { + relative_priority = "Normal" + on_error_action = "ContinueJob" + builtin_preset { + preset_name = "AACGoodQualityAudio" + } + } +} + +resource "azurerm_media_asset" "input" { + name = "inputAsset" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + description = "Input Asset description" +} + +resource "azurerm_media_asset" "output" { + name = "outputAsset" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + description = "Output Asset description" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/media/media_services_account_resource.go b/azurerm/internal/services/media/media_services_account_resource.go index 95421fa25d50..b74693ae0942 100644 --- a/azurerm/internal/services/media/media_services_account_resource.go +++ b/azurerm/internal/services/media/media_services_account_resource.go @@ -6,24 +6,27 @@ import ( "regexp" "time" - "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2018-07-01/media" + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMediaServicesAccount() *schema.Resource { +func resourceMediaServicesAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmMediaServicesAccountCreateUpdate, - Read: resourceArmMediaServicesAccountRead, - Update: resourceArmMediaServicesAccountCreateUpdate, - Delete: resourceArmMediaServicesAccountDelete, + Create: resourceMediaServicesAccountCreateUpdate, + Read: resourceMediaServicesAccountRead, + Update: resourceMediaServicesAccountCreateUpdate, + Delete: resourceMediaServicesAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -33,7 +36,7 @@ func resourceArmMediaServicesAccount() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MediaServicesAccountID(id) + _, err := parse.MediaServiceID(id) return err }), @@ -72,54 +75,107 @@ func resourceArmMediaServicesAccount() *schema.Resource { }, }, - // TODO: support Tags when this bug is fixed: - // https://github.com/Azure/azure-rest-api-specs/issues/5249 - // "tags": tags.Schema(), + "identity": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "principal_id": { + Type: schema.TypeString, + Computed: true, + }, + + "tenant_id": { + Type: schema.TypeString, + Computed: true, + }, + + "type": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + "SystemAssigned", + }, true), + }, + }, + }, + }, + + "storage_authentication_type": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{ + "ManagedIdentity", + }, true), + }, + + "tags": tags.Schema(), }, } } -func resourceArmMediaServicesAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMediaServicesAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Media.ServicesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - accountName := d.Get("name").(string) + resourceId := parse.NewMediaServiceID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for existing %s: %+v", resourceId, err) + } + } + + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_media_services_account", resourceId.ID()) + } + } + location := azure.NormalizeLocation(d.Get("location").(string)) - resourceGroup := d.Get("resource_group_name").(string) + t := d.Get("tags").(map[string]interface{}) storageAccountsRaw := d.Get("storage_account").(*schema.Set).List() storageAccounts, err := expandMediaServicesAccountStorageAccounts(storageAccountsRaw) if err != nil { return err } - parameters := media.Service{ ServiceProperties: &media.ServiceProperties{ StorageAccounts: storageAccounts, }, Location: utils.String(location), + Tags: tags.Expand(t), } - if _, e := client.CreateOrUpdate(ctx, resourceGroup, accountName, parameters); e != nil { - return fmt.Errorf("Error creating Media Service Account %q (Resource Group %q): %+v", accountName, resourceGroup, e) + if _, ok := d.GetOk("identity"); ok { + parameters.Identity = expandAzureRmMediaServiceIdentity(d) } - service, err := client.Get(ctx, resourceGroup, accountName) - if err != nil { - return fmt.Errorf("Error retrieving Media Service Account %q (Resource Group %q): %+v", accountName, resourceGroup, err) + if v, ok := d.GetOk("storage_authentication_type"); ok { + parameters.StorageAuthentication = media.StorageAuthentication(v.(string)) + } + + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.Name, parameters); err != nil { + return fmt.Errorf("creating %s: %+v", resourceId, err) } - d.SetId(*service.ID) - return resourceArmMediaServicesAccountRead(d, meta) + d.SetId(resourceId.ID()) + return resourceMediaServicesAccountRead(d, meta) } -func resourceArmMediaServicesAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceMediaServicesAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Media.ServicesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MediaServicesAccountID(d.Id()) + id, err := parse.MediaServiceID(d.Id()) if err != nil { return err } @@ -132,7 +188,7 @@ func resourceArmMediaServicesAccountRead(d *schema.ResourceData, meta interface{ return nil } - return fmt.Errorf("Error retrieving Media Services Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("retrieving Media Services Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } d.Set("name", id.Name) @@ -141,25 +197,28 @@ func resourceArmMediaServicesAccountRead(d *schema.ResourceData, meta interface{ d.Set("location", azure.NormalizeLocation(*location)) } - if props := resp.ServiceProperties; props != nil { + props := resp.ServiceProperties + if props != nil { accounts := flattenMediaServicesAccountStorageAccounts(props.StorageAccounts) if e := d.Set("storage_account", accounts); e != nil { - return fmt.Errorf("Error flattening `storage_account`: %s", e) + return fmt.Errorf("flattening `storage_account`: %s", e) } + d.Set("storage_authentication_type", string(props.StorageAuthentication)) } - // TODO: support Tags when this bug is fixed: - // https://github.com/Azure/azure-rest-api-specs/issues/5249 - // return tags.FlattenAndSet(d, resp.Tags) - return nil + if err := d.Set("identity", flattenAzureRmMediaServicedentity(resp.Identity)); err != nil { + return fmt.Errorf("flattening `identity`: %s", err) + } + + return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMediaServicesAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMediaServicesAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Media.ServicesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MediaServicesAccountID(d.Id()) + id, err := parse.MediaServiceID(d.Id()) if err != nil { return err } @@ -169,7 +228,7 @@ func resourceArmMediaServicesAccountDelete(d *schema.ResourceData, meta interfac if response.WasNotFound(resp.Response) { return nil } - return fmt.Errorf("Error issuing AzureRM delete request for Media Services Account '%s': %+v", id.Name, err) + return fmt.Errorf("issuing AzureRM delete request for Media Services Account '%s': %+v", id.Name, err) } return nil @@ -225,3 +284,29 @@ func flattenMediaServicesAccountStorageAccounts(input *[]media.StorageAccount) [ return results } + +func expandAzureRmMediaServiceIdentity(d *schema.ResourceData) *media.ServiceIdentity { + identities := d.Get("identity").([]interface{}) + identity := identities[0].(map[string]interface{}) + identityType := identity["type"].(string) + return &media.ServiceIdentity{ + Type: media.ManagedIdentityType(identityType), + } +} + +func flattenAzureRmMediaServicedentity(identity *media.ServiceIdentity) []interface{} { + if identity == nil { + return make([]interface{}, 0) + } + + result := make(map[string]interface{}) + result["type"] = string(identity.Type) + if identity.PrincipalID != nil { + result["principal_id"] = *identity.PrincipalID + } + if identity.TenantID != nil { + result["tenant_id"] = *identity.TenantID + } + + return []interface{}{result} +} diff --git a/azurerm/internal/services/media/media_services_account_resource_test.go b/azurerm/internal/services/media/media_services_account_resource_test.go new file mode 100644 index 000000000000..ebf92a487130 --- /dev/null +++ b/azurerm/internal/services/media/media_services_account_resource_test.go @@ -0,0 +1,291 @@ +package media_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MediaServicesAccountResource struct { +} + +func TestAccMediaServicesAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") + r := MediaServicesAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("storage_account.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaServicesAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") + r := MediaServicesAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("storage_account.#").HasValue("1"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMediaServicesAccount_multipleAccounts(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") + r := MediaServicesAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleAccounts(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("storage_account.#").HasValue("2"), + ), + }, + data.ImportStep(), + { + Config: r.multipleAccountsUpdated(data), + PlanOnly: true, + }, + data.ImportStep(), + }) +} + +func TestAccMediaServicesAccount_multiplePrimaries(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") + r := MediaServicesAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multiplePrimaries(data), + ExpectError: regexp.MustCompile("Only one Storage Account can be set as Primary"), + }, + }) +} + +func TestAccMediaServicesAccount_identitySystemAssigned(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") + r := MediaServicesAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identitySystemAssigned(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("identity.0.type").HasValue("SystemAssigned"), + ), + }, + data.ImportStep(), + }) +} + +func (MediaServicesAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.MediaServiceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Media.ServicesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Media Services Account %s (resource group: %s): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ServiceProperties != nil), nil +} + +func (r MediaServicesAccountResource) basic(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.first.id + is_primary = true + } + + tags = { + environment = "staging" + } +} +`, template, data.RandomString) +} + +func (r MediaServicesAccountResource) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_services_account" "import" { + name = azurerm_media_services_account.test.name + location = azurerm_media_services_account.test.location + resource_group_name = azurerm_media_services_account.test.resource_group_name + + storage_account { + id = azurerm_storage_account.first.id + is_primary = true + } + + tags = { + environment = "staging" + } +} +`, template) +} + +func (MediaServicesAccountResource) multipleAccounts(data acceptance.TestData) string { + template := MediaServicesAccountResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "second" { + name = "acctestsa2%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.first.id + is_primary = true + } + + storage_account { + id = azurerm_storage_account.second.id + is_primary = false + } +} +`, template, data.RandomString, data.RandomString) +} + +func (MediaServicesAccountResource) multipleAccountsUpdated(data acceptance.TestData) string { + template := MediaServicesAccountResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "second" { + name = "acctestsa2%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.second.id + is_primary = false + } + + storage_account { + id = azurerm_storage_account.first.id + is_primary = true + } +} +`, template, data.RandomString, data.RandomString) +} + +func (MediaServicesAccountResource) multiplePrimaries(data acceptance.TestData) string { + template := MediaServicesAccountResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "second" { + name = "acctestsa2%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.first.id + is_primary = true + } + + storage_account { + id = azurerm_storage_account.second.id + is_primary = true + } +} +`, template, data.RandomString, data.RandomString) +} + +func (MediaServicesAccountResource) identitySystemAssigned(data acceptance.TestData) string { + template := MediaServicesAccountResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.first.id + is_primary = true + } + + identity { + type = "SystemAssigned" + } +} +`, template, data.RandomString) +} + +func (MediaServicesAccountResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-media-%d" + location = "%s" +} + +resource "azurerm_storage_account" "first" { + name = "acctestsa1%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/media/media_streaming_endpoint_resource.go b/azurerm/internal/services/media/media_streaming_endpoint_resource.go new file mode 100644 index 000000000000..e278633322dd --- /dev/null +++ b/azurerm/internal/services/media/media_streaming_endpoint_resource.go @@ -0,0 +1,613 @@ +package media + +import ( + "fmt" + "log" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceMediaStreamingEndpoint() *schema.Resource { + return &schema.Resource{ + Create: resourceMediaStreamingEndpointCreate, + Read: resourceMediaStreamingEndpointRead, + Update: resourceMediaStreamingEndpointUpdate, + Delete: resourceMediaStreamingEndpointDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.StreamingEndpointID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: ValidateStreamingEnpointName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "media_services_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: ValidateMediaServicesAccountName, + }, + + "auto_start_enabled": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + }, + + "location": azure.SchemaLocation(), + + "scale_units": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 10), + }, + + "access_control": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "akamai_signature_header_authentication_key": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "base64_key": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsBase64, + }, + "expiration": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.IsRFC3339Time, + }, + "identifier": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + "ip_allow": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "address": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "subnet_prefix_length": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + }, + }, + }, + + "cdn_enabled": { + Type: schema.TypeBool, + Optional: true, + }, + + "cdn_profile": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-A-Za-z0-9]{1,120}$"), + "CDN profile must be 1 - 120 characters long, can contain only letters, numbers, and hyphens.", + ), + }, + + "cdn_provider": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{ + "StandardVerizon", "PremiumVerizon", "StandardAkamai", + }, false), + }, + + "cross_site_access_policy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "client_access_policy": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "cross_domain_policy": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "custom_host_names": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "max_cache_age_seconds": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(1, 2147483647), + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceMediaStreamingEndpointCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingEndpointsClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + streamingEndpointName := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + accountName := d.Get("media_services_account_name").(string) + location := azure.NormalizeLocation(d.Get("location").(string)) + scaleUnits := d.Get("scale_units").(int) + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + resourceId := parse.NewStreamingEndpointID(subscriptionId, d.Get("resource_group_name").(string), d.Get("media_services_account_name").(string), d.Get("name").(string)) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of %s: %+v", resourceId, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_media_streaming_endpoint", resourceId.ID()) + } + + parameters := media.StreamingEndpoint{ + StreamingEndpointProperties: &media.StreamingEndpointProperties{ + ScaleUnits: utils.Int32(int32(scaleUnits)), + }, + Location: utils.String(location), + } + + autoStart := utils.Bool(false) + if _, ok := d.GetOk("auto_start_enabled"); ok { + autoStart = utils.Bool(d.Get("auto_start_enabled").(bool)) + } + if _, ok := d.GetOk("access_control"); ok { + accessControl, err := expandAccessControl(d) + if err != nil { + return err + } + parameters.StreamingEndpointProperties.AccessControl = accessControl + } + if cdnEnabled, ok := d.GetOk("cdn_enabled"); ok { + parameters.StreamingEndpointProperties.CdnEnabled = utils.Bool(cdnEnabled.(bool)) + } + + if cdnProfile, ok := d.GetOk("cdn_profile"); ok { + parameters.StreamingEndpointProperties.CdnProfile = utils.String(cdnProfile.(string)) + } + + if cdnProvider, ok := d.GetOk("cdn_provider"); ok { + parameters.StreamingEndpointProperties.CdnProvider = utils.String(cdnProvider.(string)) + } + + if crossSite, ok := d.GetOk("cross_site_access_policy"); ok { + parameters.StreamingEndpointProperties.CrossSiteAccessPolicies = expandCrossSiteAccessPolicies(crossSite.([]interface{})) + } + + if _, ok := d.GetOk("custom_host_names"); ok { + customHostNames := d.Get("custom_host_names").([]interface{}) + parameters.StreamingEndpointProperties.CustomHostNames = utils.ExpandStringSlice(customHostNames) + } + + if description, ok := d.GetOk("description"); ok { + parameters.StreamingEndpointProperties.Description = utils.String(description.(string)) + } + + if maxCacheAge, ok := d.GetOk("max_cache_age_seconds"); ok { + parameters.StreamingEndpointProperties.MaxCacheAge = utils.Int64(int64(maxCacheAge.(int))) + } + + future, err := client.Create(ctx, resourceGroup, accountName, streamingEndpointName, parameters, autoStart) + if err != nil { + return fmt.Errorf("Error creating Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", streamingEndpointName, accountName, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation of Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", streamingEndpointName, accountName, resourceGroup, err) + } + + d.SetId(resourceId.ID()) + + return resourceMediaStreamingEndpointRead(d, meta) +} + +func resourceMediaStreamingEndpointUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingEndpointsClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StreamingEndpointID(d.Id()) + if err != nil { + return err + } + location := azure.NormalizeLocation(d.Get("location").(string)) + scaleUnits := d.Get("scale_units").(int) + + parameters := media.StreamingEndpoint{ + StreamingEndpointProperties: &media.StreamingEndpointProperties{ + ScaleUnits: utils.Int32(int32(scaleUnits)), + }, + Location: utils.String(location), + } + + if d.HasChange("scale_units") { + scaleParamaters := media.StreamingEntityScaleUnit{ + ScaleUnit: utils.Int32(int32(scaleUnits)), + } + + future, err := client.Scale(ctx, id.ResourceGroup, id.MediaserviceName, id.Name, scaleParamaters) + if err != nil { + return fmt.Errorf("Error scaling units in Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for scaling of Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + } + + if _, ok := d.GetOk("access_control"); ok { + accessControl, err := expandAccessControl(d) + if err != nil { + return err + } + parameters.StreamingEndpointProperties.AccessControl = accessControl + } + + if cdnEnabled, ok := d.GetOk("cdn_enabled"); ok { + parameters.StreamingEndpointProperties.CdnEnabled = utils.Bool(cdnEnabled.(bool)) + } + + if cdnProfile, ok := d.GetOk("cdn_profile"); ok { + parameters.StreamingEndpointProperties.CdnProfile = utils.String(cdnProfile.(string)) + } + + if cdnProvider, ok := d.GetOk("cdn_provider"); ok { + parameters.StreamingEndpointProperties.CdnProvider = utils.String(cdnProvider.(string)) + } + + if crossSitePolicies, ok := d.GetOk("cross_site_access_policy"); ok { + parameters.StreamingEndpointProperties.CrossSiteAccessPolicies = expandCrossSiteAccessPolicies(crossSitePolicies.([]interface{})) + } + + if _, ok := d.GetOk("custom_host_names"); ok { + customHostNames := d.Get("custom_host_names").([]interface{}) + parameters.StreamingEndpointProperties.CustomHostNames = utils.ExpandStringSlice(customHostNames) + } + + if description, ok := d.GetOk("description"); ok { + parameters.StreamingEndpointProperties.Description = utils.String(description.(string)) + } + + if maxCacheAge, ok := d.GetOk("max_cache_age_seconds"); ok { + parameters.StreamingEndpointProperties.MaxCacheAge = utils.Int64(int64(maxCacheAge.(int))) + } + + future, err := client.Update(ctx, id.ResourceGroup, id.MediaserviceName, id.Name, parameters) + if err != nil { + return fmt.Errorf("Error creating Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for creation of Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return resourceMediaStreamingEndpointRead(d, meta) +} + +func resourceMediaStreamingEndpointRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingEndpointsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StreamingEndpointID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Streaming Endpoint %q was not found in Media Services Account %q and Resource Group %q - removing from state", id.Name, id.MediaserviceName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("media_services_account_name", id.MediaserviceName) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := resp.StreamingEndpointProperties; props != nil { + if scaleUnits := props.ScaleUnits; scaleUnits != nil { + d.Set("scale_units", scaleUnits) + } + + accessControl := flattenAccessControl(props.AccessControl) + if err := d.Set("access_control", accessControl); err != nil { + return fmt.Errorf("Error flattening `access_control`: %s", err) + } + + d.Set("cdn_enabled", props.CdnEnabled) + d.Set("cdn_profile", props.CdnProfile) + d.Set("cdn_provider", props.CdnProvider) + + crossSiteAccessPolicies := flattenCrossSiteAccessPolicies(resp.CrossSiteAccessPolicies) + if err := d.Set("cross_site_access_policy", crossSiteAccessPolicies); err != nil { + return fmt.Errorf("Error flattening `cross_site_access_policy`: %s", err) + } + + d.Set("custom_host_names", props.CustomHostNames) + d.Set("description", props.Description) + + maxCacheAge := 0 + if props.MaxCacheAge != nil { + maxCacheAge = int(*props.MaxCacheAge) + } + d.Set("max_cache_age_seconds", maxCacheAge) + } + + return nil +} + +func resourceMediaStreamingEndpointDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingEndpointsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StreamingEndpointID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + return fmt.Errorf("Error deleting Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of Streaming Endpoint %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return nil +} + +func expandAccessControl(d *schema.ResourceData) (*media.StreamingEndpointAccessControl, error) { + accessControls := d.Get("access_control").([]interface{}) + if len(accessControls) == 0 { + return nil, nil + } + accessControlResult := new(media.StreamingEndpointAccessControl) + accessControl := accessControls[0].(map[string]interface{}) + // Get IP information + if raw, ok := accessControl["ip_allow"]; ok { + ipAllowsList := raw.([]interface{}) + ipRanges := make([]media.IPRange, len(ipAllowsList)) + for index, ipAllow := range ipAllowsList { + allow := ipAllow.(map[string]interface{}) + address := allow["address"].(string) + name := allow["name"].(string) + + ipRange := media.IPRange{ + Name: utils.String(name), + Address: utils.String(address), + } + subnetPrefixLengthRaw := allow["subnet_prefix_length"] + if subnetPrefixLengthRaw != "" { + ipRange.SubnetPrefixLength = utils.Int32(int32(subnetPrefixLengthRaw.(int))) + } + ipRanges[index] = ipRange + } + accessControlResult.IP = &media.IPAccessControl{ + Allow: &ipRanges, + } + } + // Get Akamai information + if raw, ok := accessControl["akamai_signature_header_authentication_key"]; ok { + akamaiSignatureKeyList := raw.([]interface{}) + akamaiSignatureHeaderAuthenticationKeyList := make([]media.AkamaiSignatureHeaderAuthenticationKey, len(akamaiSignatureKeyList)) + for index, akamaiSignatureKey := range akamaiSignatureKeyList { + akamaiKey := akamaiSignatureKey.(map[string]interface{}) + base64Key := akamaiKey["base64_key"].(string) + expirationRaw := akamaiKey["expiration"].(string) + identifier := akamaiKey["identifier"].(string) + + akamaiSignatureHeaderAuthenticationKey := media.AkamaiSignatureHeaderAuthenticationKey{ + Base64Key: utils.String(base64Key), + Identifier: utils.String(identifier), + } + if expirationRaw != "" { + expiration, err := date.ParseTime(time.RFC3339, expirationRaw) + if err != nil { + return nil, err + } + akamaiSignatureHeaderAuthenticationKey.Expiration = &date.Time{ + Time: expiration, + } + } + akamaiSignatureHeaderAuthenticationKeyList[index] = akamaiSignatureHeaderAuthenticationKey + accessControlResult.Akamai = &media.AkamaiAccessControl{ + AkamaiSignatureHeaderAuthenticationKeyList: &akamaiSignatureHeaderAuthenticationKeyList, + } + } + } + + return accessControlResult, nil +} + +func flattenAccessControl(input *media.StreamingEndpointAccessControl) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + ipAllowRules := make([]interface{}, 0) + if input.IP != nil && input.IP.Allow != nil { + for _, v := range *input.IP.Allow { + name := "" + if v.Name != nil { + name = *v.Name + } + + address := "" + if v.Address != nil { + address = *v.Address + } + + var subnetPrefixLength int32 + if v.SubnetPrefixLength != nil { + subnetPrefixLength = *v.SubnetPrefixLength + } + + ipAllowRules = append(ipAllowRules, map[string]interface{}{ + "name": name, + "address": address, + "subnet_prefix_length": subnetPrefixLength, + }) + } + } + + akamaiRules := make([]interface{}, 0) + if input.Akamai != nil && input.Akamai.AkamaiSignatureHeaderAuthenticationKeyList != nil { + for _, v := range *input.Akamai.AkamaiSignatureHeaderAuthenticationKeyList { + base64Key := "" + if v.Base64Key != nil { + base64Key = *v.Base64Key + } + + expiration := "" + if v.Expiration != nil { + expiration = v.Expiration.Format(time.RFC3339) + } + + identifier := "" + if v.Identifier != nil { + identifier = *v.Identifier + } + + akamaiRules = append(akamaiRules, map[string]interface{}{ + "base64_key": base64Key, + "expiration": expiration, + "identifier": identifier, + }) + } + } + + return []interface{}{ + map[string]interface{}{ + "akamai_signature_header_authentication_key": akamaiRules, + "ip_allow": ipAllowRules, + }, + } +} + +func expandCrossSiteAccessPolicies(input []interface{}) *media.CrossSiteAccessPolicies { + if len(input) == 0 { + return nil + } + + crossSiteAccessPolicy := input[0].(map[string]interface{}) + clientAccessPolicy := crossSiteAccessPolicy["client_access_policy"].(string) + crossDomainPolicy := crossSiteAccessPolicy["cross_domain_policy"].(string) + return &media.CrossSiteAccessPolicies{ + ClientAccessPolicy: &clientAccessPolicy, + CrossDomainPolicy: &crossDomainPolicy, + } +} + +func flattenCrossSiteAccessPolicies(input *media.CrossSiteAccessPolicies) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + clientAccessPolicy := "" + if input.ClientAccessPolicy != nil { + clientAccessPolicy = *input.ClientAccessPolicy + } + + crossDomainPolicy := "" + if input.CrossDomainPolicy != nil { + crossDomainPolicy = *input.CrossDomainPolicy + } + + return []interface{}{ + map[string]interface{}{ + "client_access_policy": clientAccessPolicy, + "cross_domain_policy": crossDomainPolicy, + }, + } +} diff --git a/azurerm/internal/services/media/media_streaming_endpoint_resource_test.go b/azurerm/internal/services/media/media_streaming_endpoint_resource_test.go new file mode 100644 index 000000000000..d73d88208a84 --- /dev/null +++ b/azurerm/internal/services/media/media_streaming_endpoint_resource_test.go @@ -0,0 +1,173 @@ +package media_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MediaStreamingEndpointResource struct { +} + +func TestAccMediaStreamingEndpoint_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_endpoint", "test") + r := MediaStreamingEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("scale_units").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaStreamingEndpoint_CDN(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_endpoint", "test") + r := MediaStreamingEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.CDN(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("cdn_profile").HasValue("MyCDNProfile"), + check.That(data.ResourceName).Key("cdn_provider").HasValue("StandardVerizon"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaStreamingEndpoint_MaxCacheAge(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_endpoint", "test") + r := MediaStreamingEndpointResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.maxCacheAge(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("max_cache_age_seconds").HasValue("60"), + ), + }, + data.ImportStep(), + }) +} + +func (MediaStreamingEndpointResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.StreamingEndpointID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Media.StreamingEndpointsClient.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.StreamingEndpointProperties != nil), nil +} + +func (r MediaStreamingEndpointResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_media_streaming_endpoint" "test" { + name = "endpoint1" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + media_services_account_name = azurerm_media_services_account.test.name + scale_units = 1 +} +`, r.template(data)) +} + +func (r MediaStreamingEndpointResource) CDN(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_media_streaming_endpoint" "test" { + name = "endpoint1" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + media_services_account_name = azurerm_media_services_account.test.name + scale_units = 1 + cdn_enabled = true + cdn_provider = "StandardVerizon" + cdn_profile = "MyCDNProfile" +} +`, r.template(data)) +} + +func (r MediaStreamingEndpointResource) maxCacheAge(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_media_streaming_endpoint" "test" { + name = "endpoint1" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + media_services_account_name = azurerm_media_services_account.test.name + scale_units = 1 + access_control { + ip_allow { + name = "AllowedIP" + address = "192.168.1.1" + } + + ip_allow { + name = "AnotherIp" + address = "192.168.1.2" + } + + akamai_signature_header_authentication_key { + identifier = "id1" + expiration = "2030-12-31T16:00:00Z" + base64_key = "dGVzdGlkMQ==" + } + + akamai_signature_header_authentication_key { + identifier = "id2" + expiration = "2032-01-28T16:00:00Z" + base64_key = "dGVzdGlkMQ==" + } + } + max_cache_age_seconds = 60 + +} +`, r.template(data)) +} + +func (MediaStreamingEndpointResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-media-%d" + location = "%s" +} +resource "azurerm_storage_account" "test" { + name = "acctestsa1%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + storage_account { + id = azurerm_storage_account.test.id + is_primary = true + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/media/media_streaming_locator_resource.go b/azurerm/internal/services/media/media_streaming_locator_resource.go new file mode 100644 index 000000000000..6424ee70d0e7 --- /dev/null +++ b/azurerm/internal/services/media/media_streaming_locator_resource.go @@ -0,0 +1,389 @@ +package media + +import ( + "fmt" + "log" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + uuid "github.com/satori/go.uuid" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceMediaStreamingLocator() *schema.Resource { + return &schema.Resource{ + Create: resourceMediaStreamingLocatorCreate, + Read: resourceMediaStreamingLocatorRead, + Delete: resourceMediaStreamingLocatorDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.StreamingLocatorID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9(_)]{1,128}$"), + "Steraming Locator name must be 1 - 128 characters long, can contain letters, numbers, underscores, and hyphens (but the first and last character must be a letter or number).", + ), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "media_services_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: ValidateMediaServicesAccountName, + }, + + "asset_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{1,128}$"), + "Asset name must be 1 - 128 characters long, contain only letters, hyphen and numbers.", + ), + }, + + "streaming_policy_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "alternative_media_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "content_key": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "content_key_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + + "label_reference_in_streaming_policy": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "policy_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.StreamingLocatorContentKeyTypeCommonEncryptionCbcs), + string(media.StreamingLocatorContentKeyTypeCommonEncryptionCenc), + string(media.StreamingLocatorContentKeyTypeEnvelopeEncryption), + }, false), + }, + + "value": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "default_content_key_policy_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "end_time": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + ValidateFunc: validation.IsRFC3339Time, + }, + + "start_time": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.IsRFC3339Time, + }, + + "streaming_locator_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, + }, + }, + } +} + +func resourceMediaStreamingLocatorCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingLocatorsClient + subscriptionID := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceID := parse.NewStreamingLocatorID(subscriptionID, d.Get("resource_group_name").(string), d.Get("media_services_account_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceID.ResourceGroup, resourceID.MediaserviceName, resourceID.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Media Job %q (Media Service account %q) (ResourceGroup %q): %s", resourceID.Name, resourceID.MediaserviceName, resourceID.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_media_streaming_locator", *existing.ID) + } + } + + parameters := media.StreamingLocator{ + StreamingLocatorProperties: &media.StreamingLocatorProperties{ + AssetName: utils.String(d.Get("asset_name").(string)), + StreamingPolicyName: utils.String(d.Get("streaming_policy_name").(string)), + }, + } + + if alternativeMediaID, ok := d.GetOk("alternative_media_id"); ok { + parameters.StreamingLocatorProperties.AlternativeMediaID = utils.String(alternativeMediaID.(string)) + } + + if contentKeys, ok := d.GetOk("content_key"); ok { + parameters.StreamingLocatorProperties.ContentKeys = expandContentKeys(contentKeys.([]interface{})) + } + + if defaultContentKeyPolicyName, ok := d.GetOk("default_content_key_policy_name"); ok { + parameters.StreamingLocatorProperties.DefaultContentKeyPolicyName = utils.String(defaultContentKeyPolicyName.(string)) + } + + if endTimeRaw, ok := d.GetOk("end_time"); ok { + if endTimeRaw.(string) != "" { + endTime, err := date.ParseTime(time.RFC3339, endTimeRaw.(string)) + if err != nil { + return err + } + parameters.StreamingLocatorProperties.EndTime = &date.Time{ + Time: endTime, + } + } + } + + if startTimeRaw, ok := d.GetOk("start_time"); ok { + if startTimeRaw.(string) != "" { + startTime, err := date.ParseTime(time.RFC3339, startTimeRaw.(string)) + if err != nil { + return err + } + parameters.StreamingLocatorProperties.StartTime = &date.Time{ + Time: startTime, + } + } + } + + if idRaw, ok := d.GetOk("streaming_locator_id"); ok { + id := uuid.FromStringOrNil(idRaw.(string)) + parameters.StreamingLocatorProperties.StreamingLocatorID = &id + } + + if _, err := client.Create(ctx, resourceID.ResourceGroup, resourceID.MediaserviceName, resourceID.Name, parameters); err != nil { + return fmt.Errorf("Error creating Streaming Locator %q in Media Services Account %q (Resource Group %q): %+v", resourceID.Name, resourceID.MediaserviceName, resourceID.ResourceGroup, err) + } + + d.SetId(resourceID.ID()) + + return resourceMediaStreamingLocatorRead(d, meta) +} + +func resourceMediaStreamingLocatorRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingLocatorsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StreamingLocatorID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Streaming Locator %q was not found in Media Services Account %q and Resource Group %q - removing from state", id.Name, id.MediaserviceName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Streaming Locator %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("media_services_account_name", id.MediaserviceName) + + if props := resp.StreamingLocatorProperties; props != nil { + d.Set("asset_name", props.AssetName) + d.Set("streaming_policy_name", props.StreamingPolicyName) + d.Set("alternative_media_id", props.AlternativeMediaID) + d.Set("default_content_key_policy_name", props.DefaultContentKeyPolicyName) + + contentKeys := flattenContentKeys(resp.ContentKeys) + if err := d.Set("content_key", contentKeys); err != nil { + return fmt.Errorf("Error flattening `content_key`: %s", err) + } + + endTime := "" + if props.EndTime != nil { + endTime = props.EndTime.Format(time.RFC3339) + } + d.Set("end_time", endTime) + + startTime := "" + if props.StartTime != nil { + startTime = props.StartTime.Format(time.RFC3339) + } + d.Set("start_time", startTime) + + id := "" + if props.StreamingLocatorID != nil { + id = props.StreamingLocatorID.String() + } + d.Set("streaming_locator_id", id) + } + + return nil +} + +func resourceMediaStreamingLocatorDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.StreamingLocatorsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.StreamingLocatorID(d.Id()) + if err != nil { + return err + } + + if _, err = client.Delete(ctx, id.ResourceGroup, id.MediaserviceName, id.Name); err != nil { + return fmt.Errorf("Error deleting Streaming Locator %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return nil +} + +func expandContentKeys(input []interface{}) *[]media.StreamingLocatorContentKey { + results := make([]media.StreamingLocatorContentKey, 0) + + for _, contentKeyRaw := range input { + contentKey := contentKeyRaw.(map[string]interface{}) + + streamingLocatorContentKey := media.StreamingLocatorContentKey{} + + if contentKey["content_key_id"] != nil { + id := uuid.FromStringOrNil(contentKey["content_key_id"].(string)) + streamingLocatorContentKey.ID = &id + } + + if contentKey["label_reference_in_streaming_policy"] != nil { + streamingLocatorContentKey.LabelReferenceInStreamingPolicy = utils.String(contentKey["label_reference_in_streaming_policy"].(string)) + } + + if contentKey["policy_name"] != nil { + streamingLocatorContentKey.PolicyName = utils.String(contentKey["policy_name"].(string)) + } + + if contentKey["type"] != nil { + streamingLocatorContentKey.Type = media.StreamingLocatorContentKeyType(contentKey["type"].(string)) + } + + if contentKey["value"] != nil { + streamingLocatorContentKey.Value = utils.String(contentKey["value"].(string)) + } + + results = append(results, streamingLocatorContentKey) + } + + return &results +} + +func flattenContentKeys(input *[]media.StreamingLocatorContentKey) []interface{} { + if input == nil { + return []interface{}{} + } + + results := make([]interface{}, 0) + for _, contentKey := range *input { + id := "" + if contentKey.ID != nil { + id = contentKey.ID.String() + } + + labelReferenceInStreamingPolicy := "" + if contentKey.LabelReferenceInStreamingPolicy != nil { + labelReferenceInStreamingPolicy = *contentKey.LabelReferenceInStreamingPolicy + } + + policyName := "" + if contentKey.PolicyName != nil { + policyName = *contentKey.PolicyName + } + + value := "" + if contentKey.Value != nil { + value = *contentKey.Value + } + + results = append(results, map[string]interface{}{ + "content_key_id": id, + "label_reference_in_streaming_policy": labelReferenceInStreamingPolicy, + "policy_name": policyName, + "type": string(contentKey.Type), + "value": value, + }) + } + + return results +} diff --git a/azurerm/internal/services/media/media_streaming_locator_resource_test.go b/azurerm/internal/services/media/media_streaming_locator_resource_test.go new file mode 100644 index 000000000000..db6dbd572c67 --- /dev/null +++ b/azurerm/internal/services/media/media_streaming_locator_resource_test.go @@ -0,0 +1,198 @@ +package media_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type StreamingLocatorResource struct{} + +func TestAccStreamingLocator_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_locator", "test") + r := StreamingLocatorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Locator-1"), + check.That(data.ResourceName).Key("asset_name").HasValue("test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccStreamingLocator_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_locator", "test") + r := StreamingLocatorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Locator-1"), + check.That(data.ResourceName).Key("asset_name").HasValue("test"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccStreamingLocator_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_locator", "test") + r := StreamingLocatorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("start_time").HasValue("2018-03-01T00:00:00Z"), + check.That(data.ResourceName).Key("end_time").HasValue("2028-12-31T23:59:59Z"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccStreamingLocator_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_streaming_locator", "test") + r := StreamingLocatorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Locator-1"), + check.That(data.ResourceName).Key("asset_name").HasValue("test"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("start_time").HasValue("2018-03-01T00:00:00Z"), + check.That(data.ResourceName).Key("end_time").HasValue("2028-12-31T23:59:59Z"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Locator-1"), + check.That(data.ResourceName).Key("asset_name").HasValue("test"), + ), + }, + data.ImportStep(), + }) +} + +func (StreamingLocatorResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.StreamingLocatorID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Media.StreamingLocatorsClient.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Content Key Policy %s (Media Services Account %s) (resource group: %s): %v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return utils.Bool(resp.StreamingLocatorProperties != nil), nil +} + +func (r StreamingLocatorResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_streaming_locator" "test" { + name = "Locator-1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + streaming_policy_name = "Predefined_ClearStreamingOnly" + asset_name = azurerm_media_asset.test.name +} + +`, r.template(data)) +} + +func (r StreamingLocatorResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_streaming_locator" "import" { + name = azurerm_media_streaming_locator.test.name + resource_group_name = azurerm_media_streaming_locator.test.resource_group_name + media_services_account_name = azurerm_media_streaming_locator.test.media_services_account_name + streaming_policy_name = "Predefined_ClearStreamingOnly" + asset_name = azurerm_media_asset.test.name +} + +`, r.basic(data)) +} + +func (r StreamingLocatorResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_streaming_locator" "test" { + name = "Job-1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + streaming_policy_name = "Predefined_DownloadOnly" + asset_name = azurerm_media_asset.test.name + start_time = "2018-03-01T00:00:00Z" + end_time = "2028-12-31T23:59:59Z" + streaming_locator_id = "90000000-0000-0000-0000-000000000000" + alternative_media_id = "my-Alternate-MediaID" +} + +`, r.template(data)) +} + +func (StreamingLocatorResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-media-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa1%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.test.id + is_primary = true + } +} + +resource "azurerm_media_asset" "test" { + name = "test" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/media/media_transform_resource.go b/azurerm/internal/services/media/media_transform_resource.go new file mode 100644 index 000000000000..e1476394e69b --- /dev/null +++ b/azurerm/internal/services/media/media_transform_resource.go @@ -0,0 +1,505 @@ +package media + +import ( + "fmt" + "log" + "regexp" + "time" + + "github.com/Azure/azure-sdk-for-go/services/mediaservices/mgmt/2020-05-01/media" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azuread/azuread/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceMediaTransform() *schema.Resource { + return &schema.Resource{ + Create: resourceMediaTransformCreateUpdate, + Read: resourceMediaTransformRead, + Update: resourceMediaTransformCreateUpdate, + Delete: resourceMediaTransformDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.TransformID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9(_)]{1,128}$"), + "Transform name must be 1 - 128 characters long, can contain letters, numbers, underscores, and hyphens (but the first and last character must be a letter or number).", + ), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "media_services_account_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-z0-9]{3,24}$"), + "Media Services Account name must be 3 - 24 characters long, contain only lowercase letters and numbers.", + ), + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "output": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "on_error_action": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.ContinueJob), + string(media.StopProcessingJob), + }, false), + }, + "builtin_preset": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "preset_name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.AACGoodQualityAudio), + string(media.AdaptiveStreaming), + string(media.ContentAwareEncoding), + string(media.ContentAwareEncodingExperimental), + string(media.CopyAllBitrateNonInterleaved), + string(media.H264MultipleBitrate1080p), + string(media.H264MultipleBitrate720p), + string(media.H264MultipleBitrateSD), + string(media.H264SingleBitrate1080p), + string(media.H264SingleBitrate720p), + string(media.H264MultipleBitrateSD), + }, false), + }, + }, + }, + }, + "audio_analyzer_preset": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "audio_language": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + "ar-EG", + "ar-SY", + "de-DE", + "en-AU", + "en-GB", + "en-US", + "es-ES", + "es-MX", + "fr-FR", + "hi-IN", + "it-IT", + "ja-JP", + "ko-KR", + "pt-BR", + "ru-RU", + "zh-CN", + }, false), + }, + "audio_analysis_mode": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.Basic), + string(media.Standard), + }, false), + }, + }, + }, + }, + "video_analyzer_preset": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "audio_language": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + "ar-EG", + "ar-SY", + "de-DE", + "en-AU", + "en-GB", + "en-US", + "es-ES", + "es-MX", + "fr-FR", + "hi-IN", + "it-IT", + "ja-JP", + "ko-KR", + "pt-BR", + "ru-RU", + "zh-CN", + }, false), + }, + "audio_analysis_mode": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.Basic), + string(media.Standard), + }, false), + }, + "insights_type": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.AllInsights), + string(media.AudioInsightsOnly), + string(media.VideoInsightsOnly), + }, false), + }, + }, + }, + }, + "face_detector_preset": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "analysis_resolution": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.SourceResolution), + string(media.StandardDefinition), + }, false), + }, + }, + }, + }, + "relative_priority": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(media.High), + string(media.Normal), + string(media.Low), + }, false), + }, + }, + }, + }, + }, + } +} + +func resourceMediaTransformCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.TransformsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceId := parse.NewTransformID(subscriptionId, d.Get("resource_group_name").(string), d.Get("media_services_account_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for existing %s: %+v", resourceId, err) + } + } + + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_media_transform", resourceId.ID()) + } + } + + parameters := media.Transform{ + TransformProperties: &media.TransformProperties{ + Description: utils.String(d.Get("description").(string)), + }, + } + + if v, ok := d.GetOk("output"); ok { + transformOutput, err := expandTransformOuputs(v.([]interface{})) + if err != nil { + return err + } + parameters.Outputs = transformOutput + } + + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.MediaserviceName, resourceId.Name, parameters); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) + } + + d.SetId(resourceId.ID()) + return resourceMediaTransformRead(d, meta) +} + +func resourceMediaTransformRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.TransformsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.TransformID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Transform %q was not found in Media Services Account %q and Resource Group %q - removing from state", id.Name, id.MediaserviceName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Transform %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("media_services_account_name", id.MediaserviceName) + + if props := resp.TransformProperties; props != nil { + if description := props.Description; description != nil { + d.Set("description", description) + } + + outputs := flattenTransformOutputs(props.Outputs) + if err := d.Set("output", outputs); err != nil { + return fmt.Errorf("Error flattening `output`: %s", err) + } + } + + return nil +} + +func resourceMediaTransformDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Media.TransformsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.TransformID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Delete(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + if response.WasNotFound(resp.Response) { + return nil + } + return fmt.Errorf("Error deleting Transform %q in Media Services Account %q (Resource Group %q): %+v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return nil +} + +func expandTransformOuputs(input []interface{}) (*[]media.TransformOutput, error) { + results := make([]media.TransformOutput, 0) + + for _, transformOuputRaw := range input { + transform := transformOuputRaw.(map[string]interface{}) + + preset, err := expandPreset(transform) + if err != nil { + return nil, err + } + + transformOuput := media.TransformOutput{ + Preset: preset, + } + + if transform["on_error_action"] != nil { + transformOuput.OnError = media.OnErrorType(transform["on_error_action"].(string)) + } + + if transform["relative_priority"] != nil { + transformOuput.RelativePriority = media.Priority(transform["relative_priority"].(string)) + } + + results = append(results, transformOuput) + } + + return &results, nil +} + +func flattenTransformOutputs(input *[]media.TransformOutput) []interface{} { + if input == nil { + return []interface{}{} + } + + results := make([]interface{}, 0) + for _, transformOuput := range *input { + output := make(map[string]interface{}) + output["on_error_action"] = string(transformOuput.OnError) + output["relative_priority"] = string(transformOuput.RelativePriority) + attribute, preset := flattenPreset(transformOuput.Preset) + if attribute != "" { + output[attribute] = preset + } + results = append(results, output) + } + + return results +} + +func expandPreset(transform map[string]interface{}) (media.BasicPreset, error) { + presetsCount := 0 + presetType := "" + if transform["builtin_preset"] != nil && len(transform["builtin_preset"].([]interface{})) > 0 { + presetsCount++ + presetType = string(media.OdataTypeMicrosoftMediaBuiltInStandardEncoderPreset) + } + if transform["audio_analyzer_preset"] != nil && len(transform["audio_analyzer_preset"].([]interface{})) > 0 { + presetsCount++ + presetType = string(media.OdataTypeMicrosoftMediaAudioAnalyzerPreset) + } + if transform["video_analyzer_preset"] != nil && len(transform["video_analyzer_preset"].([]interface{})) > 0 { + presetsCount++ + presetType = string(media.OdataTypeMicrosoftMediaVideoAnalyzerPreset) + } + if transform["face_detector_preset"] != nil && len(transform["face_detector_preset"].([]interface{})) > 0 { + presetsCount++ + presetType = string(media.OdataTypeMicrosoftMediaFaceDetectorPreset) + } + + if presetsCount == 0 { + return nil, fmt.Errorf("output must contain at least one type of preset: builtin_preset,face_detector_preset,video_analyzer_preset or audio_analyzer_preset.") + } + + if presetsCount > 1 { + return nil, fmt.Errorf("more than one type of preset in the same output is not allowed.") + } + + switch presetType { + case string(media.OdataTypeMicrosoftMediaBuiltInStandardEncoderPreset): + presets := transform["builtin_preset"].([]interface{}) + preset := presets[0].(map[string]interface{}) + if preset["preset_name"] == nil { + return nil, fmt.Errorf("preset_name is required for BuiltInStandardEncoderPreset") + } + presetName := preset["preset_name"].(string) + builtInPreset := &media.BuiltInStandardEncoderPreset{ + PresetName: media.EncoderNamedPreset(presetName), + OdataType: media.OdataTypeMicrosoftMediaBuiltInStandardEncoderPreset, + } + return builtInPreset, nil + case string(media.OdataTypeMicrosoftMediaAudioAnalyzerPreset): + presets := transform["audio_analyzer_preset"].([]interface{}) + preset := presets[0].(map[string]interface{}) + audioAnalyzerPreset := &media.AudioAnalyzerPreset{ + OdataType: media.OdataTypeMicrosoftMediaAudioAnalyzerPreset, + } + if preset["audio_language"] != nil && preset["audio_language"].(string) != "" { + audioAnalyzerPreset.AudioLanguage = utils.String(preset["audio_language"].(string)) + } + if preset["audio_analysis_mode"] != nil { + audioAnalyzerPreset.Mode = media.AudioAnalysisMode(preset["audio_analysis_mode"].(string)) + } + return audioAnalyzerPreset, nil + case string(media.OdataTypeMicrosoftMediaFaceDetectorPreset): + presets := transform["face_detector_preset"].([]interface{}) + preset := presets[0].(map[string]interface{}) + faceDetectorPreset := &media.FaceDetectorPreset{ + OdataType: media.OdataTypeMicrosoftMediaFaceDetectorPreset, + } + if preset["analysis_resolution"] != nil { + faceDetectorPreset.Resolution = media.AnalysisResolution(preset["analysis_resolution"].(string)) + } + return faceDetectorPreset, nil + case string(media.OdataTypeMicrosoftMediaVideoAnalyzerPreset): + presets := transform["video_analyzer_preset"].([]interface{}) + preset := presets[0].(map[string]interface{}) + videoAnalyzerPreset := &media.VideoAnalyzerPreset{ + OdataType: media.OdataTypeMicrosoftMediaVideoAnalyzerPreset, + } + if preset["audio_language"] != nil { + videoAnalyzerPreset.AudioLanguage = utils.String(preset["audio_language"].(string)) + } + if preset["audio_analysis_mode"] != nil { + videoAnalyzerPreset.Mode = media.AudioAnalysisMode(preset["audio_analysis_mode"].(string)) + } + if preset["insights_type"] != nil { + videoAnalyzerPreset.InsightsToExtract = media.InsightsType(preset["insights_type"].(string)) + } + return videoAnalyzerPreset, nil + default: + return nil, fmt.Errorf("output must contain at least one type of preset: builtin_preset,face_detector_preset,video_analyzer_preset or audio_analyzer_preset") + } +} + +func flattenPreset(preset media.BasicPreset) (string, []interface{}) { + if preset == nil { + return "", []interface{}{} + } + + results := make([]interface{}, 0) + result := make(map[string]interface{}) + switch preset.(type) { + case media.AudioAnalyzerPreset: + mediaAudioAnalyzerPreset, _ := preset.AsAudioAnalyzerPreset() + result["audio_analysis_mode"] = string(mediaAudioAnalyzerPreset.Mode) + if mediaAudioAnalyzerPreset.AudioLanguage != nil { + result["audio_language"] = mediaAudioAnalyzerPreset.AudioLanguage + } + results = append(results, result) + return "audio_analyzer_preset", results + case media.BuiltInStandardEncoderPreset: + builtInStandardEncoderPreset, _ := preset.AsBuiltInStandardEncoderPreset() + result["preset_name"] = string(builtInStandardEncoderPreset.PresetName) + results = append(results, result) + return "builtin_preset", results + case media.FaceDetectorPreset: + faceDetectorPreset, _ := preset.AsFaceDetectorPreset() + result["analysis_resolution"] = string(faceDetectorPreset.Resolution) + results = append(results, result) + return "face_detector_preset", results + case media.VideoAnalyzerPreset: + videoAnalyzerPreset, _ := preset.AsVideoAnalyzerPreset() + result["audio_analysis_mode"] = string(videoAnalyzerPreset.Mode) + result["insights_type"] = string(videoAnalyzerPreset.InsightsToExtract) + if videoAnalyzerPreset.AudioLanguage != nil { + result["audio_language"] = videoAnalyzerPreset.AudioLanguage + } + results = append(results, result) + return "video_analyzer_preset", results + } + + return "", results +} diff --git a/azurerm/internal/services/media/media_transform_resource_test.go b/azurerm/internal/services/media/media_transform_resource_test.go new file mode 100644 index 000000000000..cc51b022b9eb --- /dev/null +++ b/azurerm/internal/services/media/media_transform_resource_test.go @@ -0,0 +1,235 @@ +package media_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MediaTransformResource struct { +} + +func TestAccMediaTransform_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_transform", "test") + r := MediaTransformResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Transform-1"), + check.That(data.ResourceName).Key("output.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaTransform_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_transform", "test") + r := MediaTransformResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Transform-1"), + check.That(data.ResourceName).Key("output.#").HasValue("1"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMediaTransform_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_transform", "test") + r := MediaTransformResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("description").HasValue("Transform description"), + check.That(data.ResourceName).Key("output.#").HasValue("4"), + check.That(data.ResourceName).Key("name").HasValue("Transform-1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMediaTransform_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_media_transform", "test") + r := MediaTransformResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Transform-1"), + check.That(data.ResourceName).Key("output.#").HasValue("1"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("description").HasValue("Transform description"), + check.That(data.ResourceName).Key("output.#").HasValue("4"), + check.That(data.ResourceName).Key("name").HasValue("Transform-1"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue("Transform-1"), + check.That(data.ResourceName).Key("output.#").HasValue("1"), + check.That(data.ResourceName).Key("description").HasValue(""), + ), + }, + data.ImportStep(), + }) +} + +func (r MediaTransformResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.TransformID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Media.TransformsClient.Get(ctx, id.ResourceGroup, id.MediaserviceName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Transform %s (Media Account %s) (resource group: %s): %v", id.Name, id.MediaserviceName, id.ResourceGroup, err) + } + + return utils.Bool(resp.TransformProperties != nil), nil +} + +func (r MediaTransformResource) basic(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_transform" "test" { + name = "Transform-1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + output { + relative_priority = "High" + on_error_action = "ContinueJob" + builtin_preset { + preset_name = "AACGoodQualityAudio" + } + } +} +`, template) +} + +func (r MediaTransformResource) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_media_transform" "import" { + name = azurerm_media_transform.test.name + resource_group_name = azurerm_media_transform.test.resource_group_name + media_services_account_name = azurerm_media_transform.test.media_services_account_name + + output { + relative_priority = "High" + on_error_action = "ContinueJob" + builtin_preset { + preset_name = "AACGoodQualityAudio" + } + } +} +`, template) +} + +func (r MediaTransformResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_media_transform" "test" { + name = "Transform-1" + resource_group_name = azurerm_resource_group.test.name + media_services_account_name = azurerm_media_services_account.test.name + description = "Transform description" + output { + relative_priority = "High" + on_error_action = "ContinueJob" + builtin_preset { + preset_name = "AACGoodQualityAudio" + } + } + + output { + relative_priority = "High" + on_error_action = "StopProcessingJob" + audio_analyzer_preset { + audio_language = "en-US" + audio_analysis_mode = "Basic" + } + } + + output { + relative_priority = "Low" + on_error_action = "StopProcessingJob" + face_detector_preset { + analysis_resolution = "StandardDefinition" + } + } + + output { + relative_priority = "Normal" + on_error_action = "StopProcessingJob" + video_analyzer_preset { + audio_language = "en-US" + audio_analysis_mode = "Basic" + insights_type = "AllInsights" + } + } +} +`, r.template(data)) +} + +func (r MediaTransformResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-media-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa1%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_media_services_account" "test" { + name = "acctestmsa%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + storage_account { + id = azurerm_storage_account.test.id + is_primary = true + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomString) +} diff --git a/azurerm/internal/services/media/parse/asset.go b/azurerm/internal/services/media/parse/asset.go new file mode 100644 index 000000000000..ebcba0171b17 --- /dev/null +++ b/azurerm/internal/services/media/parse/asset.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AssetId struct { + SubscriptionId string + ResourceGroup string + MediaserviceName string + Name string +} + +func NewAssetID(subscriptionId, resourceGroup, mediaserviceName, name string) AssetId { + return AssetId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + MediaserviceName: mediaserviceName, + Name: name, + } +} + +func (id AssetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Mediaservice Name %q", id.MediaserviceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Asset", segmentsStr) +} + +func (id AssetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Media/mediaservices/%s/assets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.MediaserviceName, id.Name) +} + +// AssetID parses a Asset ID into an AssetId struct +func AssetID(input string) (*AssetId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AssetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.MediaserviceName, err = id.PopSegment("mediaservices"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("assets"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/media/parse/asset_test.go b/azurerm/internal/services/media/parse/asset_test.go new file mode 100644 index 000000000000..5288413600f7 --- /dev/null +++ b/azurerm/internal/services/media/parse/asset_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AssetId{} + +func TestAssetIDFormatter(t *testing.T) { + actual := NewAssetID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "asset1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/assets/asset1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAssetID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AssetId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Error: true, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/assets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/assets/asset1", + Expected: &AssetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + MediaserviceName: "account1", + Name: "asset1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/ASSETS/ASSET1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AssetID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.MediaserviceName != v.Expected.MediaserviceName { + t.Fatalf("Expected %q but got %q for MediaserviceName", v.Expected.MediaserviceName, actual.MediaserviceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/media/parse/job.go b/azurerm/internal/services/media/parse/job.go new file mode 100644 index 000000000000..25902b21e9c0 --- /dev/null +++ b/azurerm/internal/services/media/parse/job.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type JobId struct { + SubscriptionId string + ResourceGroup string + MediaserviceName string + TransformName string + Name string +} + +func NewJobID(subscriptionId, resourceGroup, mediaserviceName, transformName, name string) JobId { + return JobId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + MediaserviceName: mediaserviceName, + TransformName: transformName, + Name: name, + } +} + +func (id JobId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Transform Name %q", id.TransformName), + fmt.Sprintf("Mediaservice Name %q", id.MediaserviceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Job", segmentsStr) +} + +func (id JobId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Media/mediaservices/%s/transforms/%s/jobs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.MediaserviceName, id.TransformName, id.Name) +} + +// JobID parses a Job ID into an JobId struct +func JobID(input string) (*JobId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := JobId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.MediaserviceName, err = id.PopSegment("mediaservices"); err != nil { + return nil, err + } + if resourceId.TransformName, err = id.PopSegment("transforms"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("jobs"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/media/parse/job_test.go b/azurerm/internal/services/media/parse/job_test.go new file mode 100644 index 000000000000..446583168a76 --- /dev/null +++ b/azurerm/internal/services/media/parse/job_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = JobId{} + +func TestJobIDFormatter(t *testing.T) { + actual := NewJobID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "transform1", "job1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/jobs/job1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestJobID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *JobId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Error: true, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Error: true, + }, + + { + // missing TransformName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Error: true, + }, + + { + // missing value for TransformName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/jobs/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/jobs/job1", + Expected: &JobId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + MediaserviceName: "account1", + TransformName: "transform1", + Name: "job1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/TRANSFORMS/TRANSFORM1/JOBS/JOB1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := JobID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.MediaserviceName != v.Expected.MediaserviceName { + t.Fatalf("Expected %q but got %q for MediaserviceName", v.Expected.MediaserviceName, actual.MediaserviceName) + } + if actual.TransformName != v.Expected.TransformName { + t.Fatalf("Expected %q but got %q for TransformName", v.Expected.TransformName, actual.TransformName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/media/parse/media_service.go b/azurerm/internal/services/media/parse/media_service.go new file mode 100644 index 000000000000..4774d1de98f8 --- /dev/null +++ b/azurerm/internal/services/media/parse/media_service.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type MediaServiceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewMediaServiceID(subscriptionId, resourceGroup, name string) MediaServiceId { + return MediaServiceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id MediaServiceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Media Service", segmentsStr) +} + +func (id MediaServiceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Media/mediaservices/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// MediaServiceID parses a MediaService ID into an MediaServiceId struct +func MediaServiceID(input string) (*MediaServiceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := MediaServiceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("mediaservices"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/media/parse/media_service_test.go b/azurerm/internal/services/media/parse/media_service_test.go new file mode 100644 index 000000000000..815d1afb4511 --- /dev/null +++ b/azurerm/internal/services/media/parse/media_service_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = MediaServiceId{} + +func TestMediaServiceIDFormatter(t *testing.T) { + actual := NewMediaServiceID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestMediaServiceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *MediaServiceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1", + Expected: &MediaServiceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := MediaServiceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/media/parse/media_services_account.go b/azurerm/internal/services/media/parse/media_services_account.go deleted file mode 100644 index 57d049f1c020..000000000000 --- a/azurerm/internal/services/media/parse/media_services_account.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MediaServicesAccountId struct { - ResourceGroup string - Name string -} - -func MediaServicesAccountID(input string) (*MediaServicesAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Media Services Account ID %q: %+v", input, err) - } - - service := MediaServicesAccountId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("mediaservices"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/media/parse/media_services_account_test.go b/azurerm/internal/services/media/parse/media_services_account_test.go deleted file mode 100644 index 6789c5e7aaed..000000000000 --- a/azurerm/internal/services/media/parse/media_services_account_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestMediaServicesAccountId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MediaServicesAccountId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Media Services Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", - Expected: nil, - }, - { - Name: "Media Services Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/Service1", - Expected: &MediaServicesAccountId{ - Name: "Service1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Media/Mediaservices/Service1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MediaServicesAccountID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/media/parse/streaming_endpoint.go b/azurerm/internal/services/media/parse/streaming_endpoint.go new file mode 100644 index 000000000000..6dc0a3981094 --- /dev/null +++ b/azurerm/internal/services/media/parse/streaming_endpoint.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type StreamingEndpointId struct { + SubscriptionId string + ResourceGroup string + MediaserviceName string + Name string +} + +func NewStreamingEndpointID(subscriptionId, resourceGroup, mediaserviceName, name string) StreamingEndpointId { + return StreamingEndpointId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + MediaserviceName: mediaserviceName, + Name: name, + } +} + +func (id StreamingEndpointId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Mediaservice Name %q", id.MediaserviceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Streaming Endpoint", segmentsStr) +} + +func (id StreamingEndpointId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Media/mediaservices/%s/streamingendpoints/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.MediaserviceName, id.Name) +} + +// StreamingEndpointID parses a StreamingEndpoint ID into an StreamingEndpointId struct +func StreamingEndpointID(input string) (*StreamingEndpointId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := StreamingEndpointId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.MediaserviceName, err = id.PopSegment("mediaservices"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("streamingendpoints"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/media/parse/streaming_endpoint_test.go b/azurerm/internal/services/media/parse/streaming_endpoint_test.go new file mode 100644 index 000000000000..01aa60749f92 --- /dev/null +++ b/azurerm/internal/services/media/parse/streaming_endpoint_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = StreamingEndpointId{} + +func TestStreamingEndpointIDFormatter(t *testing.T) { + actual := NewStreamingEndpointID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "endpoint1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streamingendpoints/endpoint1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestStreamingEndpointID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *StreamingEndpointId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Error: true, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streamingendpoints/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streamingendpoints/endpoint1", + Expected: &StreamingEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + MediaserviceName: "account1", + Name: "endpoint1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/STREAMINGENDPOINTS/ENDPOINT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := StreamingEndpointID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.MediaserviceName != v.Expected.MediaserviceName { + t.Fatalf("Expected %q but got %q for MediaserviceName", v.Expected.MediaserviceName, actual.MediaserviceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/media/parse/streaming_locator.go b/azurerm/internal/services/media/parse/streaming_locator.go new file mode 100644 index 000000000000..2761f94b3f78 --- /dev/null +++ b/azurerm/internal/services/media/parse/streaming_locator.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type StreamingLocatorId struct { + SubscriptionId string + ResourceGroup string + MediaserviceName string + Name string +} + +func NewStreamingLocatorID(subscriptionId, resourceGroup, mediaserviceName, name string) StreamingLocatorId { + return StreamingLocatorId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + MediaserviceName: mediaserviceName, + Name: name, + } +} + +func (id StreamingLocatorId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Mediaservice Name %q", id.MediaserviceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Streaming Locator", segmentsStr) +} + +func (id StreamingLocatorId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Media/mediaservices/%s/streaminglocators/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.MediaserviceName, id.Name) +} + +// StreamingLocatorID parses a StreamingLocator ID into an StreamingLocatorId struct +func StreamingLocatorID(input string) (*StreamingLocatorId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := StreamingLocatorId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.MediaserviceName, err = id.PopSegment("mediaservices"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("streaminglocators"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/media/parse/streaming_locator_test.go b/azurerm/internal/services/media/parse/streaming_locator_test.go new file mode 100644 index 000000000000..081df8a34af3 --- /dev/null +++ b/azurerm/internal/services/media/parse/streaming_locator_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = StreamingLocatorId{} + +func TestStreamingLocatorIDFormatter(t *testing.T) { + actual := NewStreamingLocatorID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "locator1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streaminglocators/locator1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestStreamingLocatorID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *StreamingLocatorId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Error: true, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streaminglocators/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streaminglocators/locator1", + Expected: &StreamingLocatorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + MediaserviceName: "account1", + Name: "locator1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/STREAMINGLOCATORS/LOCATOR1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := StreamingLocatorID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.MediaserviceName != v.Expected.MediaserviceName { + t.Fatalf("Expected %q but got %q for MediaserviceName", v.Expected.MediaserviceName, actual.MediaserviceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/media/parse/transform.go b/azurerm/internal/services/media/parse/transform.go new file mode 100644 index 000000000000..e24d48f71604 --- /dev/null +++ b/azurerm/internal/services/media/parse/transform.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type TransformId struct { + SubscriptionId string + ResourceGroup string + MediaserviceName string + Name string +} + +func NewTransformID(subscriptionId, resourceGroup, mediaserviceName, name string) TransformId { + return TransformId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + MediaserviceName: mediaserviceName, + Name: name, + } +} + +func (id TransformId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Mediaservice Name %q", id.MediaserviceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Transform", segmentsStr) +} + +func (id TransformId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Media/mediaservices/%s/transforms/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.MediaserviceName, id.Name) +} + +// TransformID parses a Transform ID into an TransformId struct +func TransformID(input string) (*TransformId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := TransformId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.MediaserviceName, err = id.PopSegment("mediaservices"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("transforms"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/media/parse/transform_test.go b/azurerm/internal/services/media/parse/transform_test.go new file mode 100644 index 000000000000..de066c5ede45 --- /dev/null +++ b/azurerm/internal/services/media/parse/transform_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = TransformId{} + +func TestTransformIDFormatter(t *testing.T) { + actual := NewTransformID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "transform1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestTransformID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *TransformId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Error: true, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1", + Expected: &TransformId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + MediaserviceName: "account1", + Name: "transform1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/TRANSFORMS/TRANSFORM1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := TransformID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.MediaserviceName != v.Expected.MediaserviceName { + t.Fatalf("Expected %q but got %q for MediaserviceName", v.Expected.MediaserviceName, actual.MediaserviceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/media/registration.go b/azurerm/internal/services/media/registration.go index 6d1002195dde..ec336b9e6960 100644 --- a/azurerm/internal/services/media/registration.go +++ b/azurerm/internal/services/media/registration.go @@ -26,6 +26,11 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_media_services_account": resourceArmMediaServicesAccount(), + "azurerm_media_services_account": resourceMediaServicesAccount(), + "azurerm_media_asset": resourceMediaAsset(), + "azurerm_media_job": resourceMediaJob(), + "azurerm_media_streaming_endpoint": resourceMediaStreamingEndpoint(), + "azurerm_media_transform": resourceMediaTransform(), + "azurerm_media_streaming_locator": resourceMediaStreamingLocator(), } } diff --git a/azurerm/internal/services/media/resourceids.go b/azurerm/internal/services/media/resourceids.go new file mode 100644 index 000000000000..4d11e9e97446 --- /dev/null +++ b/azurerm/internal/services/media/resourceids.go @@ -0,0 +1,8 @@ +package media + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=MediaService -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Transform -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Asset -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/assets/asset1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=StreamingEndpoint -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streamingendpoints/endpoint1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Job -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/jobs/job1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=StreamingLocator -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streaminglocators/locator1 diff --git a/azurerm/internal/services/media/tests/media_services_account_resource_test.go b/azurerm/internal/services/media/tests/media_services_account_resource_test.go deleted file mode 100644 index 0ad8c34292a4..000000000000 --- a/azurerm/internal/services/media/tests/media_services_account_resource_test.go +++ /dev/null @@ -1,260 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" -) - -func TestAccAzureRMMediaServicesAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMediaServicesAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMediaServicesAccount_basic(data), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "storage_account.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMediaServicesAccount_multipleAccounts(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMediaServicesAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMediaServicesAccount_multipleAccounts(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMMediaServicesAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "storage_account.#", "2"), - ), - }, - { - Config: testAccAzureRMMediaServicesAccount_multipleAccountsUpdated(data), - PlanOnly: true, - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMediaServicesAccount_multiplePrimaries(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_media_services_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMediaServicesAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMediaServicesAccount_multiplePrimaries(data), - ExpectError: regexp.MustCompile("Only one Storage Account can be set as Primary"), - }, - }, - }) -} - -func testCheckAzureRMMediaServicesAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Media.ServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Media service not found: %s", resourceName) - } - - id, err := parse.MediaServicesAccountID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on mediaServicesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Media Services Account %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMMediaServicesAccountDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Media.ServicesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_media_services_account" { - continue - } - - id, err := parse.MediaServicesAccountID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := conn.Get(ctx, id.ResourceGroup, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Media Services Account still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMMediaServicesAccount_basic(data acceptance.TestData) string { - template := testAccAzureRMMediaServicesAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_media_services_account" "test" { - name = "acctestmsa%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - storage_account { - id = azurerm_storage_account.first.id - is_primary = true - } -} -`, template, data.RandomString) -} - -func testAccAzureRMMediaServicesAccount_multipleAccounts(data acceptance.TestData) string { - template := testAccAzureRMMediaServicesAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "second" { - name = "acctestsa2%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_media_services_account" "test" { - name = "acctestmsa%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - storage_account { - id = azurerm_storage_account.first.id - is_primary = true - } - - storage_account { - id = azurerm_storage_account.second.id - is_primary = false - } -} -`, template, data.RandomString, data.RandomString) -} - -func testAccAzureRMMediaServicesAccount_multipleAccountsUpdated(data acceptance.TestData) string { - template := testAccAzureRMMediaServicesAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "second" { - name = "acctestsa2%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_media_services_account" "test" { - name = "acctestmsa%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - storage_account { - id = azurerm_storage_account.second.id - is_primary = false - } - - storage_account { - id = azurerm_storage_account.first.id - is_primary = true - } -} -`, template, data.RandomString, data.RandomString) -} - -func testAccAzureRMMediaServicesAccount_multiplePrimaries(data acceptance.TestData) string { - template := testAccAzureRMMediaServicesAccount_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "second" { - name = "acctestsa2%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_media_services_account" "test" { - name = "acctestmsa%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - storage_account { - id = azurerm_storage_account.first.id - is_primary = true - } - - storage_account { - id = azurerm_storage_account.second.id - is_primary = true - } -} -`, template, data.RandomString, data.RandomString) -} - -func testAccAzureRMMediaServicesAccount_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-media-%d" - location = "%s" -} - -resource "azurerm_storage_account" "first" { - name = "acctestsa1%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/media/validate/asset_id.go b/azurerm/internal/services/media/validate/asset_id.go new file mode 100644 index 000000000000..a0ae9711e7f3 --- /dev/null +++ b/azurerm/internal/services/media/validate/asset_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" +) + +func AssetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AssetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/media/validate/asset_id_test.go b/azurerm/internal/services/media/validate/asset_id_test.go new file mode 100644 index 000000000000..817bde1be788 --- /dev/null +++ b/azurerm/internal/services/media/validate/asset_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAssetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Valid: false, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/assets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/assets/asset1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/ASSETS/ASSET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AssetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/media/validate/job_id.go b/azurerm/internal/services/media/validate/job_id.go new file mode 100644 index 000000000000..8d3956d33150 --- /dev/null +++ b/azurerm/internal/services/media/validate/job_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" +) + +func JobID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.JobID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/media/validate/job_id_test.go b/azurerm/internal/services/media/validate/job_id_test.go new file mode 100644 index 000000000000..b2df658a22bc --- /dev/null +++ b/azurerm/internal/services/media/validate/job_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestJobID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Valid: false, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Valid: false, + }, + + { + // missing TransformName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Valid: false, + }, + + { + // missing value for TransformName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/jobs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1/jobs/job1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/TRANSFORMS/TRANSFORM1/JOBS/JOB1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := JobID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/media/validate/media_service_id.go b/azurerm/internal/services/media/validate/media_service_id.go new file mode 100644 index 000000000000..020332355447 --- /dev/null +++ b/azurerm/internal/services/media/validate/media_service_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" +) + +func MediaServiceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.MediaServiceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/media/validate/media_service_id_test.go b/azurerm/internal/services/media/validate/media_service_id_test.go new file mode 100644 index 000000000000..550ecfcaec72 --- /dev/null +++ b/azurerm/internal/services/media/validate/media_service_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestMediaServiceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := MediaServiceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/media/validate/streaming_endpoint_id.go b/azurerm/internal/services/media/validate/streaming_endpoint_id.go new file mode 100644 index 000000000000..11998bf61e33 --- /dev/null +++ b/azurerm/internal/services/media/validate/streaming_endpoint_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" +) + +func StreamingEndpointID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.StreamingEndpointID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/media/validate/streaming_endpoint_id_test.go b/azurerm/internal/services/media/validate/streaming_endpoint_id_test.go new file mode 100644 index 000000000000..710f37df77de --- /dev/null +++ b/azurerm/internal/services/media/validate/streaming_endpoint_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestStreamingEndpointID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Valid: false, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streamingendpoints/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streamingendpoints/endpoint1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/STREAMINGENDPOINTS/ENDPOINT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := StreamingEndpointID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/media/validate/streaming_locator_id.go b/azurerm/internal/services/media/validate/streaming_locator_id.go new file mode 100644 index 000000000000..d5ef2300ae98 --- /dev/null +++ b/azurerm/internal/services/media/validate/streaming_locator_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" +) + +func StreamingLocatorID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.StreamingLocatorID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/media/validate/streaming_locator_id_test.go b/azurerm/internal/services/media/validate/streaming_locator_id_test.go new file mode 100644 index 000000000000..3f069520c444 --- /dev/null +++ b/azurerm/internal/services/media/validate/streaming_locator_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestStreamingLocatorID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Valid: false, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streaminglocators/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/streaminglocators/locator1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/STREAMINGLOCATORS/LOCATOR1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := StreamingLocatorID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/media/validate/transform_id.go b/azurerm/internal/services/media/validate/transform_id.go new file mode 100644 index 000000000000..1755cc5f65ad --- /dev/null +++ b/azurerm/internal/services/media/validate/transform_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/media/parse" +) + +func TransformID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.TransformID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/media/validate/transform_id_test.go b/azurerm/internal/services/media/validate/transform_id_test.go new file mode 100644 index 000000000000..478094b07966 --- /dev/null +++ b/azurerm/internal/services/media/validate/transform_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestTransformID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/", + Valid: false, + }, + + { + // missing value for MediaserviceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Media/mediaservices/account1/transforms/transform1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MEDIA/MEDIASERVICES/ACCOUNT1/TRANSFORMS/TRANSFORM1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := TransformID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/media/validation.go b/azurerm/internal/services/media/validation.go new file mode 100644 index 000000000000..2b2ef2379e71 --- /dev/null +++ b/azurerm/internal/services/media/validation.go @@ -0,0 +1,53 @@ +package media + +import ( + "fmt" + "regexp" + "strings" +) + +func ValidateMediaServicesAccountName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("Expected %q to be a string but it wasn't!", k)) + return + } + + // The value must not be empty. + if strings.TrimSpace(v) == "" { + errors = append(errors, fmt.Errorf("%q must not be empty", k)) + return + } + + if matched := regexp.MustCompile(`^[-a-z0-9]{3,24}$`).Match([]byte(v)); !matched { + errors = append(errors, fmt.Errorf("%q must be 3 - 24 characters long, contain only lowercase letters and numbers.", k)) + } + + return warnings, errors +} + +func ValidateStreamingEnpointName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("Expected %q to be a string but it wasn't!", k)) + return + } + + // The value must not be empty. + if strings.TrimSpace(v) == "" { + errors = append(errors, fmt.Errorf("%q must not be empty", k)) + return + } + + const maxLength = 32 + // Streaming endpoint name can be 1-32 characters in length + if len(v) > maxLength { + errors = append(errors, fmt.Errorf("%q cannot exceed 32 characters.", k)) + } + + if matched := regexp.MustCompile(`^[a-zA-Z0-9]+(-*[a-zA-Z0-9])*$`).Match([]byte(v)); !matched { + errors = append(errors, fmt.Errorf("%q can only contain alphanumeric characters and hyphens. Must not begin or end with hyphen.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/mixedreality/parse/spatial_anchors_account.go b/azurerm/internal/services/mixedreality/parse/spatial_anchors_account.go index 08c34cf44763..ee45ce714b77 100644 --- a/azurerm/internal/services/mixedreality/parse/spatial_anchors_account.go +++ b/azurerm/internal/services/mixedreality/parse/spatial_anchors_account.go @@ -1,27 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SpatialAnchorsAccountId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewSpatialAnchorsAccountID(subscriptionId, resourceGroup, name string) SpatialAnchorsAccountId { + return SpatialAnchorsAccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id SpatialAnchorsAccountId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Spatial Anchors Account", segmentsStr) } +func (id SpatialAnchorsAccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.MixedReality/spatialAnchorsAccounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// SpatialAnchorsAccountID parses a SpatialAnchorsAccount ID into an SpatialAnchorsAccountId struct func SpatialAnchorsAccountID(input string) (*SpatialAnchorsAccountId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Search Service ID %q: %+v", input, err) + return nil, err + } + + resourceId := SpatialAnchorsAccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - service := SpatialAnchorsAccountId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if service.Name, err = id.PopSegment("spatialAnchorsAccounts"); err != nil { + if resourceId.Name, err = id.PopSegment("spatialAnchorsAccounts"); err != nil { return nil, err } @@ -29,5 +65,5 @@ func SpatialAnchorsAccountID(input string) (*SpatialAnchorsAccountId, error) { return nil, err } - return &service, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/mixedreality/parse/spatial_anchors_account_test.go b/azurerm/internal/services/mixedreality/parse/spatial_anchors_account_test.go index c3edd765240e..864fb2427204 100644 --- a/azurerm/internal/services/mixedreality/parse/spatial_anchors_account_test.go +++ b/azurerm/internal/services/mixedreality/parse/spatial_anchors_account_test.go @@ -1,73 +1,112 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) -func TestSearchServiceId(t *testing.T) { +var _ resourceid.Formatter = SpatialAnchorsAccountId{} + +func TestSpatialAnchorsAccountIDFormatter(t *testing.T) { + actual := NewSpatialAnchorsAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "Account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/Account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSpatialAnchorsAccountID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *SpatialAnchorsAccountId }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/", + Error: true, }, + { - Name: "Missing Search Services Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/", - Expected: nil, + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/", + Error: true, }, + { - Name: "Search Service ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/Account1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/Account1", Expected: &SpatialAnchorsAccountId{ - Name: "Account1", - ResourceGroup: "resGroup1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "Account1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.MixedReality/SpatialAnchorsAccounts/Service1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MIXEDREALITY/SPATIALANCHORSACCOUNTS/ACCOUNT1", + Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SpatialAnchorsAccountID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/mixedreality/registration.go b/azurerm/internal/services/mixedreality/registration.go index 424e761692e4..601c13c1b447 100644 --- a/azurerm/internal/services/mixedreality/registration.go +++ b/azurerm/internal/services/mixedreality/registration.go @@ -26,6 +26,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_spatial_anchors_account": resourceArmSpatialAnchorsAccount(), + "azurerm_spatial_anchors_account": resourceSpatialAnchorsAccount(), } } diff --git a/azurerm/internal/services/mixedreality/resourceids.go b/azurerm/internal/services/mixedreality/resourceids.go new file mode 100644 index 000000000000..e3a9873453fb --- /dev/null +++ b/azurerm/internal/services/mixedreality/resourceids.go @@ -0,0 +1,3 @@ +package mixedreality + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SpatialAnchorsAccount -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/Account1 diff --git a/azurerm/internal/services/mixedreality/spatial_anchors_account_resource.go b/azurerm/internal/services/mixedreality/spatial_anchors_account_resource.go index ca3cad855041..6dfc62ac843d 100644 --- a/azurerm/internal/services/mixedreality/spatial_anchors_account_resource.go +++ b/azurerm/internal/services/mixedreality/spatial_anchors_account_resource.go @@ -18,11 +18,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmSpatialAnchorsAccount() *schema.Resource { +func resourceSpatialAnchorsAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmSpatialAnchorsAccountCreate, - Read: resourceArmSpatialAnchorsAccountRead, - Delete: resourceArmSpatialAnchorsAccountDelete, + Create: resourceSpatialAnchorsAccountCreate, + Read: resourceSpatialAnchorsAccountRead, + Delete: resourceSpatialAnchorsAccountDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.SpatialAnchorsAccountID(id) return err @@ -55,7 +55,7 @@ func resourceArmSpatialAnchorsAccount() *schema.Resource { } } -func resourceArmSpatialAnchorsAccountCreate(d *schema.ResourceData, meta interface{}) error { +func resourceSpatialAnchorsAccountCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MixedReality.SpatialAnchorsAccountClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -98,10 +98,10 @@ func resourceArmSpatialAnchorsAccountCreate(d *schema.ResourceData, meta interfa d.SetId(*resp.ID) - return resourceArmSpatialAnchorsAccountRead(d, meta) + return resourceSpatialAnchorsAccountRead(d, meta) } -func resourceArmSpatialAnchorsAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceSpatialAnchorsAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MixedReality.SpatialAnchorsAccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -130,7 +130,7 @@ func resourceArmSpatialAnchorsAccountRead(d *schema.ResourceData, meta interface return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmSpatialAnchorsAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceSpatialAnchorsAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MixedReality.SpatialAnchorsAccountClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mixedreality/spatial_anchors_account_resource_test.go b/azurerm/internal/services/mixedreality/spatial_anchors_account_resource_test.go new file mode 100644 index 000000000000..2405c6be6942 --- /dev/null +++ b/azurerm/internal/services/mixedreality/spatial_anchors_account_resource_test.go @@ -0,0 +1,106 @@ +package mixedreality_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mixedreality/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type SpatialAnchorsAccountResource struct { +} + +func TestAccSpatialAnchorsAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_spatial_anchors_account", "test") + r := SpatialAnchorsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccSpatialAnchorsAccount_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_spatial_anchors_account", "test") + r := SpatialAnchorsAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.Environment").HasValue("Production"), + ), + }, + data.ImportStep(), + }) +} + +func (SpatialAnchorsAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SpatialAnchorsAccountID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.MixedReality.SpatialAnchorsAccountClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving Spatial Anchors Account %s (resource group: %s): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.SpatialAnchorsAccountProperties != nil), nil +} + +func (SpatialAnchorsAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mr-%d" + location = "%s" +} + +resource "azurerm_spatial_anchors_account" "test" { + name = "accTEst_saa%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) +} + +func (SpatialAnchorsAccountResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mr-%d" + location = "%s" +} + +resource "azurerm_spatial_anchors_account" "test" { + name = "acCTestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + Environment = "Production" + } +} +`, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) +} diff --git a/azurerm/internal/services/mixedreality/tests/spatial_anchors_account_resource_test.go b/azurerm/internal/services/mixedreality/tests/spatial_anchors_account_resource_test.go deleted file mode 100644 index 0d109a6368e5..000000000000 --- a/azurerm/internal/services/mixedreality/tests/spatial_anchors_account_resource_test.go +++ /dev/null @@ -1,152 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mixedreality/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMSpatialAnchorsAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spatial_anchors_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpatialAnchorsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpatialAnchorsAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpatialAnchorsAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMSpatialAnchorsAccount_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_spatial_anchors_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSpatialAnchorsAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSpatialAnchorsAccount_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSpatialAnchorsAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Environment", "Production"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMSpatialAnchorsAccountExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MixedReality.SpatialAnchorsAccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - id, err := parse.SpatialAnchorsAccountID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("Bad: Get on spatialAnchorsAccountClient: %+v", err) - } - - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Spatial Anchors Account %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return nil - } -} - -func testCheckAzureRMSpatialAnchorsAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MixedReality.SpatialAnchorsAccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_spatial_anchors_account" { - continue - } - - id, err := parse.SpatialAnchorsAccountID(rs.Primary.ID) - if err != nil { - return err - } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Spatial Anchors Account still exists: %q", id.Name) - } - } - - return nil -} - -func testAccAzureRMSpatialAnchorsAccount_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mr-%d" - location = "%s" -} - -resource "azurerm_spatial_anchors_account" "test" { - name = "accTEst_saa%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) -} - -func testAccAzureRMSpatialAnchorsAccount_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mr-%d" - location = "%s" -} - -resource "azurerm_spatial_anchors_account" "test" { - name = "acCTestdf%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - Environment = "Production" - } -} -`, data.RandomInteger, data.Locations.Secondary, data.RandomInteger) -} diff --git a/azurerm/internal/services/mixedreality/validate/spatial_anchors_account_id.go b/azurerm/internal/services/mixedreality/validate/spatial_anchors_account_id.go new file mode 100644 index 000000000000..018f9398a29a --- /dev/null +++ b/azurerm/internal/services/mixedreality/validate/spatial_anchors_account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mixedreality/parse" +) + +func SpatialAnchorsAccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SpatialAnchorsAccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mixedreality/validate/spatial_anchors_account_id_test.go b/azurerm/internal/services/mixedreality/validate/spatial_anchors_account_id_test.go new file mode 100644 index 000000000000..bb73b27e919a --- /dev/null +++ b/azurerm/internal/services/mixedreality/validate/spatial_anchors_account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSpatialAnchorsAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.MixedReality/spatialAnchorsAccounts/Account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.MIXEDREALITY/SPATIALANCHORSACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SpatialAnchorsAccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/monitor/action_rule.go b/azurerm/internal/services/monitor/action_rule.go index 3879bfef7fd2..775341236f84 100644 --- a/azurerm/internal/services/monitor/action_rule.go +++ b/azurerm/internal/services/monitor/action_rule.go @@ -3,7 +3,7 @@ package monitor import ( "fmt" - "github.com/Azure/azure-sdk-for-go/services/preview/alertsmanagement/mgmt/2019-05-05/alertsmanagement" + "github.com/Azure/azure-sdk-for-go/services/preview/alertsmanagement/mgmt/2019-06-01-preview/alertsmanagement" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" @@ -171,7 +171,7 @@ func schemaActionRuleCondition(operatorValidateItems, valuesValidateItems []stri } } -func expandArmActionRuleCondition(input []interface{}) *alertsmanagement.Condition { +func expandActionRuleCondition(input []interface{}) *alertsmanagement.Condition { if len(input) == 0 { return nil } @@ -183,7 +183,7 @@ func expandArmActionRuleCondition(input []interface{}) *alertsmanagement.Conditi } } -func expandArmActionRuleScope(input []interface{}) *alertsmanagement.Scope { +func expandActionRuleScope(input []interface{}) *alertsmanagement.Scope { if len(input) == 0 { return nil } @@ -195,24 +195,24 @@ func expandArmActionRuleScope(input []interface{}) *alertsmanagement.Scope { } } -func expandArmActionRuleConditions(input []interface{}) *alertsmanagement.Conditions { +func expandActionRuleConditions(input []interface{}) *alertsmanagement.Conditions { if len(input) == 0 { return nil } v := input[0].(map[string]interface{}) return &alertsmanagement.Conditions{ - AlertContext: expandArmActionRuleCondition(v["alert_context"].([]interface{})), - AlertRuleID: expandArmActionRuleCondition(v["alert_rule_id"].([]interface{})), - Description: expandArmActionRuleCondition(v["description"].([]interface{})), - MonitorCondition: expandArmActionRuleCondition(v["monitor"].([]interface{})), - MonitorService: expandArmActionRuleCondition(v["monitor_service"].([]interface{})), - Severity: expandArmActionRuleCondition(v["severity"].([]interface{})), - TargetResourceType: expandArmActionRuleCondition(v["target_resource_type"].([]interface{})), + AlertContext: expandActionRuleCondition(v["alert_context"].([]interface{})), + AlertRuleID: expandActionRuleCondition(v["alert_rule_id"].([]interface{})), + Description: expandActionRuleCondition(v["description"].([]interface{})), + MonitorCondition: expandActionRuleCondition(v["monitor"].([]interface{})), + MonitorService: expandActionRuleCondition(v["monitor_service"].([]interface{})), + Severity: expandActionRuleCondition(v["severity"].([]interface{})), + TargetResourceType: expandActionRuleCondition(v["target_resource_type"].([]interface{})), } } -func flattenArmActionRuleCondition(input *alertsmanagement.Condition) []interface{} { +func flattenActionRuleCondition(input *alertsmanagement.Condition) []interface{} { if input == nil { return make([]interface{}, 0) } @@ -229,7 +229,7 @@ func flattenArmActionRuleCondition(input *alertsmanagement.Condition) []interfac } } -func flattenArmActionRuleScope(input *alertsmanagement.Scope) []interface{} { +func flattenActionRuleScope(input *alertsmanagement.Scope) []interface{} { if input == nil { return make([]interface{}, 0) } @@ -246,19 +246,19 @@ func flattenArmActionRuleScope(input *alertsmanagement.Scope) []interface{} { } } -func flattenArmActionRuleConditions(input *alertsmanagement.Conditions) []interface{} { +func flattenActionRuleConditions(input *alertsmanagement.Conditions) []interface{} { if input == nil { return make([]interface{}, 0) } return []interface{}{ map[string]interface{}{ - "alert_context": flattenArmActionRuleCondition(input.AlertContext), - "alert_rule_id": flattenArmActionRuleCondition(input.AlertRuleID), - "description": flattenArmActionRuleCondition(input.Description), - "monitor": flattenArmActionRuleCondition(input.MonitorCondition), - "monitor_service": flattenArmActionRuleCondition(input.MonitorService), - "severity": flattenArmActionRuleCondition(input.Severity), - "target_resource_type": flattenArmActionRuleCondition(input.TargetResourceType), + "alert_context": flattenActionRuleCondition(input.AlertContext), + "alert_rule_id": flattenActionRuleCondition(input.AlertRuleID), + "description": flattenActionRuleCondition(input.Description), + "monitor": flattenActionRuleCondition(input.MonitorCondition), + "monitor_service": flattenActionRuleCondition(input.MonitorService), + "severity": flattenActionRuleCondition(input.Severity), + "target_resource_type": flattenActionRuleCondition(input.TargetResourceType), }, } } diff --git a/azurerm/internal/services/monitor/client/client.go b/azurerm/internal/services/monitor/client/client.go index 6903f232bd96..e61a76e74a46 100644 --- a/azurerm/internal/services/monitor/client/client.go +++ b/azurerm/internal/services/monitor/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/preview/alertsmanagement/mgmt/2019-05-05/alertsmanagement" + "github.com/Azure/azure-sdk-for-go/services/preview/alertsmanagement/mgmt/2019-06-01-preview/alertsmanagement" "github.com/Azure/azure-sdk-for-go/services/preview/monitor/mgmt/2019-06-01/insights" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) @@ -11,7 +11,8 @@ type Client struct { AutoscaleSettingsClient *insights.AutoscaleSettingsClient // alerts management - ActionRulesClient *alertsmanagement.ActionRulesClient + ActionRulesClient *alertsmanagement.ActionRulesClient + SmartDetectorAlertRulesClient *alertsmanagement.SmartDetectorAlertRulesClient // Monitor ActionGroupsClient *insights.ActionGroupsClient @@ -31,6 +32,9 @@ func NewClient(o *common.ClientOptions) *Client { ActionRulesClient := alertsmanagement.NewActionRulesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&ActionRulesClient.Client, o.ResourceManagerAuthorizer) + SmartDetectorAlertRulesClient := alertsmanagement.NewSmartDetectorAlertRulesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&SmartDetectorAlertRulesClient.Client, o.ResourceManagerAuthorizer) + ActionGroupsClient := insights.NewActionGroupsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&ActionGroupsClient.Client, o.ResourceManagerAuthorizer) @@ -58,6 +62,7 @@ func NewClient(o *common.ClientOptions) *Client { return &Client{ AutoscaleSettingsClient: &AutoscaleSettingsClient, ActionRulesClient: &ActionRulesClient, + SmartDetectorAlertRulesClient: &SmartDetectorAlertRulesClient, ActionGroupsClient: &ActionGroupsClient, ActivityLogAlertsClient: &ActivityLogAlertsClient, AlertRulesClient: &AlertRulesClient, diff --git a/azurerm/internal/services/monitor/monitor_action_group_data_source.go b/azurerm/internal/services/monitor/monitor_action_group_data_source.go index 4f154511855b..a1ef74a48ea1 100644 --- a/azurerm/internal/services/monitor/monitor_action_group_data_source.go +++ b/azurerm/internal/services/monitor/monitor_action_group_data_source.go @@ -12,9 +12,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMonitorActionGroup() *schema.Resource { +func dataSourceMonitorActionGroup() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMonitorActionGroupRead, + Read: dataSourceMonitorActionGroupRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -284,7 +284,7 @@ func dataSourceArmMonitorActionGroup() *schema.Resource { } } -func dataSourceArmMonitorActionGroupRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMonitorActionGroupRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ActionGroupsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -340,7 +340,7 @@ func dataSourceArmMonitorActionGroupRead(d *schema.ResourceData, meta interface{ if err = d.Set("azure_function_receiver", flattenMonitorActionGroupAzureFunctionReceiver(group.AzureFunctionReceivers)); err != nil { return fmt.Errorf("Error setting `azure_function_receiver`: %+v", err) } - if err = d.Set("arm_role_receiver", flattenMonitorActionGroupArmRoleReceiver(group.ArmRoleReceivers)); err != nil { + if err = d.Set("arm_role_receiver", flattenMonitorActionGroupRoleReceiver(group.ArmRoleReceivers)); err != nil { return fmt.Errorf("Error setting `arm_role_receiver`: %+v", err) } } diff --git a/azurerm/internal/services/monitor/monitor_action_group_data_source_test.go b/azurerm/internal/services/monitor/monitor_action_group_data_source_test.go new file mode 100644 index 000000000000..f68d7462155e --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_action_group_data_source_test.go @@ -0,0 +1,367 @@ +package monitor_test + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MonitorActionGroupDataSource struct { +} + +func TestAccDataSourceMonitorActionGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_monitor_action_group", "test") + r := MonitorActionGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("enabled").HasValue("true"), + check.That(data.ResourceName).Key("short_name").HasValue("acctestag"), + check.That(data.ResourceName).Key("email_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("itsm_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("azure_app_push_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("sms_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("webhook_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("automation_runbook_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("voice_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("logic_app_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("azure_function_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("arm_role_receiver.#").HasValue("0"), + ), + }, + }) +} + +func TestAccDataSourceMonitorActionGroup_disabledBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_monitor_action_group", "test") + r := MonitorActionGroupDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.disabledBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("enabled").HasValue("false"), + check.That(data.ResourceName).Key("short_name").HasValue("acctestag"), + check.That(data.ResourceName).Key("email_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("itsm_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("azure_app_push_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("sms_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("webhook_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("automation_runbook_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("voice_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("logic_app_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("azure_function_receiver.#").HasValue("0"), + check.That(data.ResourceName).Key("arm_role_receiver.#").HasValue("0"), + ), + }, + }) +} + +func TestAccDataSourceMonitorActionGroup_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_monitor_action_group", "test") + r := MonitorActionGroupDataSource{} + + aaName := fmt.Sprintf("acctestAA-%d", data.RandomInteger) + faName := fmt.Sprintf("acctestFA-%d", data.RandomInteger) + laName := fmt.Sprintf("acctestLA-%d", data.RandomInteger) + webhookName := "webhook_alert" + resGroup := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + aaResourceID := fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Automation/AutomationAccounts/%s", os.Getenv("ARM_SUBSCRIPTION_ID"), resGroup, aaName) + aaWebhookResourceID := fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Automation/AutomationAccounts/%s/webhooks/%s", os.Getenv("ARM_SUBSCRIPTION_ID"), resGroup, aaName, webhookName) + faResourceID := fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Web/sites/%s", os.Getenv("ARM_SUBSCRIPTION_ID"), resGroup, faName) + laResourceID := fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Logic/workflows/%s", os.Getenv("ARM_SUBSCRIPTION_ID"), resGroup, laName) + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + check.That(data.ResourceName).Key("enabled").HasValue("true"), + check.That(data.ResourceName).Key("email_receiver.#").HasValue("2"), + check.That(data.ResourceName).Key("email_receiver.0.email_address").HasValue("admin@contoso.com"), + check.That(data.ResourceName).Key("email_receiver.1.email_address").HasValue("devops@contoso.com"), + check.That(data.ResourceName).Key("email_receiver.1.use_common_alert_schema").HasValue("false"), + check.That(data.ResourceName).Key("itsm_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("itsm_receiver.0.workspace_id").HasValue("6eee3a18-aac3-40e4-b98e-1f309f329816"), + check.That(data.ResourceName).Key("itsm_receiver.0.connection_id").HasValue("53de6956-42b4-41ba-be3c-b154cdf17b13"), + check.That(data.ResourceName).Key("itsm_receiver.0.ticket_configuration").HasValue("{}"), + check.That(data.ResourceName).Key("itsm_receiver.0.region").HasValue("southcentralus"), + check.That(data.ResourceName).Key("azure_app_push_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("azure_app_push_receiver.0.email_address").HasValue("admin@contoso.com"), + check.That(data.ResourceName).Key("sms_receiver.#").HasValue("2"), + check.That(data.ResourceName).Key("sms_receiver.0.country_code").HasValue("1"), + check.That(data.ResourceName).Key("sms_receiver.0.phone_number").HasValue("1231231234"), + check.That(data.ResourceName).Key("sms_receiver.1.country_code").HasValue("86"), + check.That(data.ResourceName).Key("sms_receiver.1.phone_number").HasValue("13888888888"), + check.That(data.ResourceName).Key("webhook_receiver.#").HasValue("2"), + check.That(data.ResourceName).Key("webhook_receiver.0.service_uri").HasValue("http://example.com/alert"), + check.That(data.ResourceName).Key("webhook_receiver.1.service_uri").HasValue("https://backup.example.com/warning"), + check.That(data.ResourceName).Key("webhook_receiver.1.use_common_alert_schema").HasValue("false"), + check.That(data.ResourceName).Key("automation_runbook_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("automation_runbook_receiver.0.automation_account_id").HasValue(aaResourceID), + check.That(data.ResourceName).Key("automation_runbook_receiver.0.runbook_name").HasValue(webhookName), + check.That(data.ResourceName).Key("automation_runbook_receiver.0.webhook_resource_id").HasValue(aaWebhookResourceID), + check.That(data.ResourceName).Key("automation_runbook_receiver.0.service_uri").HasValue("https://s13events.azure-automation.net/webhooks?token=randomtoken"), + check.That(data.ResourceName).Key("automation_runbook_receiver.0.use_common_alert_schema").HasValue("false"), + check.That(data.ResourceName).Key("voice_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("voice_receiver.0.country_code").HasValue("1"), + check.That(data.ResourceName).Key("voice_receiver.0.phone_number").HasValue("1231231234"), + check.That(data.ResourceName).Key("logic_app_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("logic_app_receiver.0.resource_id").HasValue(laResourceID), + check.That(data.ResourceName).Key("logic_app_receiver.0.callback_url").HasValue("http://test-host:100/workflows/fb9c8d79b15f41ce9b12861862f43546/versions/08587100027316071865/triggers/manualTrigger/paths/invoke?api-version=2015-08-01-preview&sp=%2Fversions%2F08587100027316071865%2Ftriggers%2FmanualTrigger%2Frun&sv=1.0&sig=IxEQ_ygZf6WNEQCbjV0Vs6p6Y4DyNEJVAa86U5B4xhk"), + check.That(data.ResourceName).Key("logic_app_receiver.0.use_common_alert_schema").HasValue("false"), + check.That(data.ResourceName).Key("azure_function_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("azure_function_receiver.0.function_app_resource_id").HasValue(faResourceID), + check.That(data.ResourceName).Key("azure_function_receiver.0.function_name").HasValue("myfunc"), + check.That(data.ResourceName).Key("azure_function_receiver.0.http_trigger_url").HasValue("https://example.com/trigger"), + check.That(data.ResourceName).Key("azure_function_receiver.0.use_common_alert_schema").HasValue("false"), + check.That(data.ResourceName).Key("arm_role_receiver.#").HasValue("1"), + check.That(data.ResourceName).Key("arm_role_receiver.0.role_id").HasValue("43d0d8ad-25c7-4714-9337-8ba259a9fe05"), + check.That(data.ResourceName).Key("arm_role_receiver.0.use_common_alert_schema").HasValue("false"), + ), + }, + }) +} + +func (MonitorActionGroupDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = azurerm_resource_group.test.name + short_name = "acctestag" +} + +data "azurerm_monitor_action_group" "test" { + resource_group_name = azurerm_resource_group.test.name + name = azurerm_monitor_action_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (MonitorActionGroupDataSource) disabledBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = azurerm_resource_group.test.name + short_name = "acctestag" + enabled = false +} + +data "azurerm_monitor_action_group" "test" { + resource_group_name = azurerm_resource_group.test.name + name = azurerm_monitor_action_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (MonitorActionGroupDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + short_name = "acctestag" + + email_receiver { + name = "sendtoadmin" + email_address = "admin@contoso.com" + use_common_alert_schema = false + } + + email_receiver { + name = "sendtodevops" + email_address = "devops@contoso.com" + } + + itsm_receiver { + name = "createorupdateticket" + workspace_id = "6eee3a18-aac3-40e4-b98e-1f309f329816" + connection_id = "53de6956-42b4-41ba-be3c-b154cdf17b13" + ticket_configuration = "{}" + region = "southcentralus" + } + + azure_app_push_receiver { + name = "pushtoadmin" + email_address = "admin@contoso.com" + } + + sms_receiver { + name = "oncallmsg" + country_code = "1" + phone_number = "1231231234" + } + + sms_receiver { + name = "remotesupport" + country_code = "86" + phone_number = "13888888888" + } + + webhook_receiver { + name = "callmyapiaswell" + service_uri = "http://example.com/alert" + } + + webhook_receiver { + name = "callmybackupapi" + service_uri = "https://backup.example.com/warning" + } + + automation_runbook_receiver { + name = "action_name_1" + automation_account_id = "${azurerm_automation_account.test.id}" + runbook_name = "my runbook" + webhook_resource_id = "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rg-runbooks/providers/microsoft.automation/automationaccounts/aaa001/webhooks/webhook_alert" + is_global_runbook = true + service_uri = "https://s13events.azure-automation.net/webhooks?token=randomtoken" + use_common_alert_schema = false + } + + voice_receiver { + name = "oncallmsg" + country_code = "1" + phone_number = "1231231234" + } + + voice_receiver { + name = "remotesupport" + country_code = "86" + phone_number = "13888888888" + } + + logic_app_receiver { + name = "logicappaction" + resource_id = "${azurerm_logic_app_workflow.test.id}" + callback_url = "http://test-host:100/workflows/fb9c8d79b15f41ce9b12861862f43546/versions/08587100027316071865/triggers/manualTrigger/paths/invoke?api-version=2015-08-01-preview&sp=%%2Fversions%%2F08587100027316071865%%2Ftriggers%%2FmanualTrigger%%2Frun&sv=1.0&sig=IxEQ_ygZf6WNEQCbjV0Vs6p6Y4DyNEJVAa86U5B4xhk" + use_common_alert_schema = false + } + + azure_function_receiver { + name = "funcaction" + function_app_resource_id = "${azurerm_function_app.test.id}" + function_name = "myfunc" + http_trigger_url = "https://example.com/trigger" + use_common_alert_schema = false + } + + arm_role_receiver { + name = "Monitoring Reader" + role_id = "43d0d8ad-25c7-4714-9337-8ba259a9fe05" + use_common_alert_schema = false + } +} + +resource "azurerm_automation_account" "test" { + name = "acctestAA-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + + sku_name = "Basic" +} + +resource "azurerm_automation_runbook" "test" { + name = "Get-AzureVMTutorial" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + automation_account_name = "${azurerm_automation_account.test.name}" + log_verbose = "true" + log_progress = "true" + description = "This is an test runbook" + runbook_type = "PowerShellWorkflow" + + publish_content_link { + uri = "https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-automation-runbook-getvms/Runbooks/Get-AzureVMTutorial.ps1" + } +} + +resource "azurerm_logic_app_workflow" "test" { + name = "acctestLA-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_logic_app_trigger_http_request" "test" { + name = "some-http-trigger" + logic_app_id = "${azurerm_logic_app_workflow.test.id}" + + schema = < + + + + +XML + lifecycle { + ignore_changes = [tags] + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r MonitorMetricAlertResource) applicationInsightsWebTest(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_monitor_metric_alert" "test" { + name = "acctestMetricAlert-%d" + resource_group_name = azurerm_resource_group.test.name + scopes = [ + azurerm_application_insights.test.id, + azurerm_application_insights_web_test.test.id, + ] + application_insights_web_test_location_availability_criteria { + web_test_id = azurerm_application_insights_web_test.test.id + component_id = azurerm_application_insights.test.id + failed_location_count = 2 + } + window_size = "PT15M" + frequency = "PT1M" +} +`, r.applicationInsightsWebTestTemplate(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source.go index 44e6634a01d9..2dc7601e226c 100644 --- a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source.go +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source.go @@ -14,9 +14,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMonitorScheduledQueryRulesAlert() *schema.Resource { +func dataSourceMonitorScheduledQueryRulesAlert() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMonitorScheduledQueryRulesAlertRead, + Read: dataSourceMonitorScheduledQueryRulesAlertRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -144,7 +144,7 @@ func dataSourceArmMonitorScheduledQueryRulesAlert() *schema.Resource { } } -func dataSourceArmMonitorScheduledQueryRulesAlertRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMonitorScheduledQueryRulesAlertRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source_test.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source_test.go new file mode 100644 index 000000000000..556282eec53a --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_data_source_test.go @@ -0,0 +1,66 @@ +package monitor_test + +import ( + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MonitorScheduledQueryRulesDataSource struct { +} + +func TestAccDataSourceMonitorScheduledQueryRules_AlertingAction(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_monitor_scheduled_query_rules_alert", "test") + r := MonitorScheduledQueryRulesDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.AlertingActionConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + ), + }, + }) +} + +func TestAccDataSourceMonitorScheduledQueryRules_AlertingActionCrossResource(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_monitor_scheduled_query_rules_alert", "test") + r := MonitorScheduledQueryRulesDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.AlertingActionCrossResourceConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + ), + }, + }) +} + +func (MonitorScheduledQueryRulesDataSource) AlertingActionConfig(data acceptance.TestData) string { + ts := time.Now().Format(time.RFC3339) + + return fmt.Sprintf(` +%s + +data "azurerm_monitor_scheduled_query_rules_alert" "test" { + name = basename(azurerm_monitor_scheduled_query_rules_alert.test.id) + resource_group_name = "${azurerm_resource_group.test.name}" +} +`, MonitorScheduledQueryRulesResource{}.AlertingActionConfigBasic(data, ts)) +} + +func (MonitorScheduledQueryRulesDataSource) AlertingActionCrossResourceConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_monitor_scheduled_query_rules_alert" "test" { + name = basename(azurerm_monitor_scheduled_query_rules_alert.test.id) + resource_group_name = "${azurerm_resource_group.test.name}" +} +`, MonitorScheduledQueryRulesResource{}.AlertingActionCrossResourceConfig(data)) +} diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource.go index 6e36e02421c3..a25dada671fc 100644 --- a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource.go +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMonitorScheduledQueryRulesAlert() *schema.Resource { +func resourceMonitorScheduledQueryRulesAlert() *schema.Resource { return &schema.Resource{ - Create: resourceArmMonitorScheduledQueryRulesAlertCreateUpdate, - Read: resourceArmMonitorScheduledQueryRulesAlertRead, - Update: resourceArmMonitorScheduledQueryRulesAlertCreateUpdate, - Delete: resourceArmMonitorScheduledQueryRulesAlertDelete, + Create: resourceMonitorScheduledQueryRulesAlertCreateUpdate, + Read: resourceMonitorScheduledQueryRulesAlertRead, + Update: resourceMonitorScheduledQueryRulesAlertCreateUpdate, + Delete: resourceMonitorScheduledQueryRulesAlertDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -200,7 +200,7 @@ func resourceArmMonitorScheduledQueryRulesAlert() *schema.Resource { } } -func resourceArmMonitorScheduledQueryRulesAlertCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMonitorScheduledQueryRulesAlertCreateUpdate(d *schema.ResourceData, meta interface{}) error { action := expandMonitorScheduledQueryRulesAlertingAction(d) schedule := expandMonitorScheduledQueryRulesAlertSchedule(d) client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient @@ -279,10 +279,10 @@ func resourceArmMonitorScheduledQueryRulesAlertCreateUpdate(d *schema.ResourceDa } d.SetId(*read.ID) - return resourceArmMonitorScheduledQueryRulesAlertRead(d, meta) + return resourceMonitorScheduledQueryRulesAlertRead(d, meta) } -func resourceArmMonitorScheduledQueryRulesAlertRead(d *schema.ResourceData, meta interface{}) error { +func resourceMonitorScheduledQueryRulesAlertRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -359,7 +359,7 @@ func resourceArmMonitorScheduledQueryRulesAlertRead(d *schema.ResourceData, meta return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMonitorScheduledQueryRulesAlertDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMonitorScheduledQueryRulesAlertDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource_test.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource_test.go new file mode 100644 index 000000000000..5a0b243cd496 --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_alert_resource_test.go @@ -0,0 +1,326 @@ +package monitor_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MonitorScheduledQueryRulesResource struct { +} + +func TestAccMonitorScheduledQueryRules_AlertingActionBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") + r := MonitorScheduledQueryRulesResource{} + ts := time.Now().Format(time.RFC3339) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AlertingActionConfigBasic(data, ts), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorScheduledQueryRules_AlertingActionUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") + r := MonitorScheduledQueryRulesResource{} + ts := time.Now().Format(time.RFC3339) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AlertingActionConfigBasic(data, ts), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.AlertingActionConfigUpdate(data, ts), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorScheduledQueryRules_AlertingActionComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") + r := MonitorScheduledQueryRulesResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AlertingActionConfigComplete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorScheduledQueryRules_AlertingActionCrossResource(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") + r := MonitorScheduledQueryRulesResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.AlertingActionCrossResourceConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (MonitorScheduledQueryRulesResource) AlertingActionConfigBasic(data acceptance.TestData, ts string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestAppInsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = azurerm_resource_group.test.name + short_name = "acctestag" +} + +resource "azurerm_monitor_scheduled_query_rules_alert" "test" { + name = "acctestsqr-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + data_source_id = azurerm_application_insights.test.id + query = <<-QUERY + let d=datatable(TimeGenerated: datetime, usage_percent: double) [ '%s', 25.4, '%s', 75.4 ]; + d | summarize AggregatedValue=avg(usage_percent) by bin(TimeGenerated, 1h) +QUERY + + + frequency = 60 + time_window = 60 + + action { + action_group = [azurerm_monitor_action_group.test.id] + } + + trigger { + operator = "GreaterThan" + threshold = 5000 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, ts, ts) +} + +func (MonitorScheduledQueryRulesResource) AlertingActionConfigUpdate(data acceptance.TestData, ts string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestAppInsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = azurerm_resource_group.test.name + short_name = "acctestag" +} + +resource "azurerm_monitor_scheduled_query_rules_alert" "test" { + name = "acctestsqr-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + data_source_id = azurerm_application_insights.test.id + query = <<-QUERY + let d=datatable(TimeGenerated: datetime, usage_percent: double) [ '%s', 25.4, '%s', 75.4 ]; + d | summarize AggregatedValue=avg(usage_percent) by bin(TimeGenerated, 1h) +QUERY + + + enabled = false + description = "test description" + + frequency = 30 + time_window = 30 + + action { + action_group = [azurerm_monitor_action_group.test.id] + } + + trigger { + operator = "GreaterThan" + threshold = 1000 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, ts, ts) +} + +func (MonitorScheduledQueryRulesResource) AlertingActionConfigComplete(data acceptance.TestData) string { + ts := time.Now().Format(time.RFC3339) + + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestAppInsights-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = azurerm_resource_group.test.name + short_name = "acctestag" +} + +resource "azurerm_monitor_scheduled_query_rules_alert" "test" { + name = "acctestsqr-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + description = "test alerting action" + enabled = true + + data_source_id = azurerm_application_insights.test.id + query = "let d=datatable(TimeGenerated: datetime, usage_percent: double) [ '%s', 25.4, '%s', 75.4 ]; d | summarize AggregatedValue=avg(usage_percent) by bin(TimeGenerated, 1h)" + + frequency = 60 + time_window = 60 + + severity = 3 + throttling = 5 + action { + action_group = [azurerm_monitor_action_group.test.id] + email_subject = "Custom alert email subject" + custom_webhook_payload = "{}" + } + + trigger { + operator = "GreaterThan" + threshold = 5000 + metric_trigger { + operator = "GreaterThan" + threshold = 1 + metric_trigger_type = "Total" + metric_column = "TimeGenerated" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, ts, ts) +} + +func (MonitorScheduledQueryRulesResource) AlertingActionCrossResourceConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestAppInsights-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + application_type = "web" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestWorkspace-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku = "PerGB2018" + retention_in_days = 30 +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + short_name = "acctestag" +} + +resource "azurerm_monitor_scheduled_query_rules_alert" "test" { + name = "acctestsqr-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + description = "test alerting action cross-resource" + enabled = true + + authorized_resource_ids = ["${azurerm_application_insights.test.id}", "${azurerm_log_analytics_workspace.test.id}"] + data_source_id = "${azurerm_application_insights.test.id}" + query = format(<<-QUERY + let a=workspace('%%s').Perf + | where Computer='dependency' and TimeGenerated > ago(1h) + | where ObjectName == 'Processor' and CounterName == '%%%% Processor Time' + | summarize cpu=avg(CounterValue) by bin(TimeGenerated, 1m) + | extend ts=tostring(TimeGenerated); let b=requests + | where resultCode == '200' and timestamp > ago(1h) + | summarize reqs=count() by bin(timestamp, 1m) + | extend ts = tostring(timestamp); a + | join b on $left.ts == $right.ts + | where cpu > 50 and reqs > 5 +QUERY + , azurerm_log_analytics_workspace.test.id) + + frequency = 60 + time_window = 60 + + severity = 3 + action { + action_group = ["${azurerm_monitor_action_group.test.id}"] + email_subject = "Custom alert email subject" + } + + trigger { + operator = "GreaterThan" + threshold = 5000 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (t MonitorScheduledQueryRulesResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + name := id.Path["scheduledqueryrules"] + + resp, err := clients.Monitor.ScheduledQueryRulesClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading Scheduled Query Rules (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source.go index 3710667bcf8d..80db4c5ceeaf 100644 --- a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source.go +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source.go @@ -13,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMonitorScheduledQueryRulesLog() *schema.Resource { +func dataSourceMonitorScheduledQueryRulesLog() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMonitorScheduledQueryRulesLogRead, + Read: dataSourceMonitorScheduledQueryRulesLogRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -91,7 +91,7 @@ func dataSourceArmMonitorScheduledQueryRulesLog() *schema.Resource { } } -func dataSourceArmMonitorScheduledQueryRulesLogRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMonitorScheduledQueryRulesLogRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source_test.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source_test.go new file mode 100644 index 000000000000..c36d575526f6 --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_data_source_test.go @@ -0,0 +1,38 @@ +package monitor_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MonitorScheduledQueryRulesLogDataSource struct { +} + +func TestAccDataSourceMonitorScheduledQueryRules_LogToMetricAction(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_monitor_scheduled_query_rules_log", "test") + r := MonitorScheduledQueryRulesLogDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.LogToMetricActionConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("id").Exists(), + ), + }, + }) +} + +func (r MonitorScheduledQueryRulesLogDataSource) LogToMetricActionConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_monitor_scheduled_query_rules_log" "test" { + name = basename(azurerm_monitor_scheduled_query_rules_log.test.id) + resource_group_name = "${azurerm_resource_group.test.name}" +} +`, MonitorScheduledQueryRulesLogResource{}.LogToMetricActionConfigBasic(data)) +} diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource.go index 733233559cf4..eb61066104dc 100644 --- a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource.go +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource.go @@ -17,12 +17,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMonitorScheduledQueryRulesLog() *schema.Resource { +func resourceMonitorScheduledQueryRulesLog() *schema.Resource { return &schema.Resource{ - Create: resourceArmMonitorScheduledQueryRulesLogCreateUpdate, - Read: resourceArmMonitorScheduledQueryRulesLogRead, - Update: resourceArmMonitorScheduledQueryRulesLogCreateUpdate, - Delete: resourceArmMonitorScheduledQueryRulesLogDelete, + Create: resourceMonitorScheduledQueryRulesLogCreateUpdate, + Read: resourceMonitorScheduledQueryRulesLogRead, + Update: resourceMonitorScheduledQueryRulesLogCreateUpdate, + Delete: resourceMonitorScheduledQueryRulesLogDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -119,7 +119,7 @@ func resourceArmMonitorScheduledQueryRulesLog() *schema.Resource { } } -func resourceArmMonitorScheduledQueryRulesLogCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMonitorScheduledQueryRulesLogCreateUpdate(d *schema.ResourceData, meta interface{}) error { action := expandMonitorScheduledQueryRulesLogToMetricAction(d) client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient @@ -181,10 +181,10 @@ func resourceArmMonitorScheduledQueryRulesLogCreateUpdate(d *schema.ResourceData } d.SetId(*read.ID) - return resourceArmMonitorScheduledQueryRulesLogRead(d, meta) + return resourceMonitorScheduledQueryRulesLogRead(d, meta) } -func resourceArmMonitorScheduledQueryRulesLogRead(d *schema.ResourceData, meta interface{}) error { +func resourceMonitorScheduledQueryRulesLogRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -239,7 +239,7 @@ func resourceArmMonitorScheduledQueryRulesLogRead(d *schema.ResourceData, meta i return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMonitorScheduledQueryRulesLogDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMonitorScheduledQueryRulesLogDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Monitor.ScheduledQueryRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource_test.go b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource_test.go new file mode 100644 index 000000000000..b811474a2280 --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_scheduled_query_rules_log_resource_test.go @@ -0,0 +1,217 @@ +package monitor_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MonitorScheduledQueryRulesLogResource struct { +} + +func TestAccMonitorScheduledQueryRules_LogToMetricActionBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_log", "test") + r := MonitorScheduledQueryRulesLogResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.LogToMetricActionConfigBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorScheduledQueryRules_LogToMetricActionUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_log", "test") + r := MonitorScheduledQueryRulesLogResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.LogToMetricActionConfigBasic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.LogToMetricActionConfigUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorScheduledQueryRules_LogToMetricActionComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_log", "test") + r := MonitorScheduledQueryRulesLogResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.LogToMetricActionConfigComplete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (MonitorScheduledQueryRulesLogResource) LogToMetricActionConfigBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestWorkspace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 +} + +resource "azurerm_monitor_scheduled_query_rules_log" "test" { + name = "acctestsqr-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + data_source_id = azurerm_log_analytics_workspace.test.id + + criteria { + metric_name = "Average_%% Idle Time" + dimension { + name = "InstanceName" + operator = "Include" + values = ["1"] + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (MonitorScheduledQueryRulesLogResource) LogToMetricActionConfigUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestWorkspace-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "PerGB2018" + retention_in_days = 30 +} + +resource "azurerm_monitor_scheduled_query_rules_log" "test" { + name = "acctestsqr-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + description = "test log to metric action" + enabled = true + + data_source_id = azurerm_log_analytics_workspace.test.id + + criteria { + metric_name = "Average_%% Idle Time" + dimension { + name = "InstanceName" + operator = "Include" + values = ["2"] + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (MonitorScheduledQueryRulesLogResource) LogToMetricActionConfigComplete(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-monitor-%d" + location = "%s" +} + +resource "azurerm_log_analytics_workspace" "test" { + name = "acctestWorkspace-%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku = "PerGB2018" + retention_in_days = 30 +} + +resource "azurerm_monitor_action_group" "test" { + name = "acctestActionGroup-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + short_name = "acctestag" +} + +resource "azurerm_monitor_scheduled_query_rules_log" "test" { + name = "acctestsqr-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + description = "test log to metric action" + enabled = true + + data_source_id = "${azurerm_log_analytics_workspace.test.id}" + + criteria { + metric_name = "Average_%% Idle Time" + dimension { + name = "Computer" + operator = "Include" + values = ["*"] + } + } +} + +resource "azurerm_monitor_metric_alert" "test" { + name = "acctestmal-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + scopes = ["${azurerm_log_analytics_workspace.test.id}"] + description = "Action will be triggered when Average %% Idle Time is less than 10." + + criteria { + metric_namespace = "Microsoft.OperationalInsights/workspaces" + metric_name = "${azurerm_monitor_scheduled_query_rules_log.test.criteria[0].metric_name}" + aggregation = "Average" + operator = "LessThan" + threshold = 10 + } + + action { + action_group_id = "${azurerm_monitor_action_group.test.id}" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (t MonitorScheduledQueryRulesLogResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + name := id.Path["scheduledqueryrules"] + + resp, err := clients.Monitor.ScheduledQueryRulesClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading scheduled query rules log (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/monitor/monitor_smart_detector_alert_rule_resource.go b/azurerm/internal/services/monitor/monitor_smart_detector_alert_rule_resource.go new file mode 100644 index 000000000000..d7fa775f81ad --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_smart_detector_alert_rule_resource.go @@ -0,0 +1,303 @@ +package monitor + +import ( + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/structure" + + "github.com/Azure/azure-sdk-for-go/services/preview/alertsmanagement/mgmt/2019-06-01-preview/alertsmanagement" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + commonValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceMonitorSmartDetectorAlertRule() *schema.Resource { + return &schema.Resource{ + Create: resourceMonitorSmartDetectorAlertRuleCreateUpdate, + Read: resourceMonitorSmartDetectorAlertRuleRead, + Update: resourceMonitorSmartDetectorAlertRuleCreateUpdate, + Delete: resourceMonitorSmartDetectorAlertRuleDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.SmartDetectorAlertRuleID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "detector_type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + "FailureAnomaliesDetector", + }, false), + }, + + "scope_resource_ids": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: azure.ValidateResourceID, + }, + Set: set.HashStringIgnoreCase, + }, + + "severity": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice( + []string{ + string(alertsmanagement.Sev0), + string(alertsmanagement.Sev1), + string(alertsmanagement.Sev2), + string(alertsmanagement.Sev3), + string(alertsmanagement.Sev4), + }, false), + }, + + "frequency": { + Type: schema.TypeString, + Required: true, + ValidateFunc: commonValidate.ISO8601Duration, + }, + + "action_group": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "ids": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validate.ActionGroupID, + }, + Set: set.HashStringIgnoreCase, + }, + + "email_subject": { + Type: schema.TypeString, + Optional: true, + }, + + "webhook_payload": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsJSON, + DiffSuppressFunc: structure.SuppressJsonDiff, + }, + }, + }, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + + "throttling_duration": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: commonValidate.ISO8601Duration, + }, + }, + } +} + +func resourceMonitorSmartDetectorAlertRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Monitor.SmartDetectorAlertRulesClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceGroup := d.Get("resource_group_name").(string) + name := d.Get("name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name, utils.Bool(true)) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Monitor Smart Detector Alert Rule %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_monitor_smart_detector_alert_rule", *existing.ID) + } + } + + state := alertsmanagement.AlertRuleStateDisabled + if d.Get("enabled").(bool) { + state = alertsmanagement.AlertRuleStateEnabled + } + + actionRule := alertsmanagement.AlertRule{ + // the location is always global from the portal + Location: utils.String(location.Normalize("Global")), + AlertRuleProperties: &alertsmanagement.AlertRuleProperties{ + Description: utils.String(d.Get("description").(string)), + State: state, + Severity: alertsmanagement.Severity(d.Get("severity").(string)), + Frequency: utils.String(d.Get("frequency").(string)), + Detector: &alertsmanagement.Detector{ + ID: utils.String(d.Get("detector_type").(string)), + }, + Scope: utils.ExpandStringSlice(d.Get("scope_resource_ids").(*schema.Set).List()), + ActionGroups: expandMonitorSmartDetectorAlertRuleActionGroup(d.Get("action_group").([]interface{})), + }, + } + + if v, ok := d.GetOk("throttling_duration"); ok { + actionRule.AlertRuleProperties.Throttling = &alertsmanagement.ThrottlingInformation{ + Duration: utils.String(v.(string)), + } + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, actionRule); err != nil { + return fmt.Errorf("creating/updating Monitor Smart Detector Alert Rule %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name, nil) + if err != nil { + return fmt.Errorf("retrieving Monitor Smart Detector Alert Rule %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Monitor Smart Detector Alert Rule %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(*resp.ID) + return resourceMonitorSmartDetectorAlertRuleRead(d, meta) +} + +func resourceMonitorSmartDetectorAlertRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Monitor.SmartDetectorAlertRulesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SmartDetectorAlertRuleID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, utils.Bool(true)) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Monitor Smart Detector Alert Rule %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Monitor Smart Detector Alert Rule %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", id.ResourceGroup) + if props := resp.AlertRuleProperties; props != nil { + d.Set("description", props.Description) + d.Set("enabled", props.State == alertsmanagement.AlertRuleStateEnabled) + d.Set("frequency", props.Frequency) + d.Set("severity", string(props.Severity)) + d.Set("scope_resource_ids", utils.FlattenStringSlice(props.Scope)) + + if props.Detector != nil { + d.Set("detector_type", props.Detector.ID) + } + + throttlingDuration := "" + if props.Throttling != nil && props.Throttling.Duration != nil { + throttlingDuration = *props.Throttling.Duration + } + d.Set("throttling_duration", throttlingDuration) + + if err := d.Set("action_group", flattenMonitorSmartDetectorAlertRuleActionGroup(props.ActionGroups)); err != nil { + return fmt.Errorf("setting `action_group`: %+v", err) + } + } + + return nil +} + +func resourceMonitorSmartDetectorAlertRuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Monitor.SmartDetectorAlertRulesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SmartDetectorAlertRuleID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { + return fmt.Errorf("deleting Monitor Smart Detector Alert Rule %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + return nil +} + +func expandMonitorSmartDetectorAlertRuleActionGroup(input []interface{}) *alertsmanagement.ActionGroupsInformation { + if len(input) == 0 || input[0] == nil { + return nil + } + v := input[0].(map[string]interface{}) + return &alertsmanagement.ActionGroupsInformation{ + CustomEmailSubject: utils.String(v["email_subject"].(string)), + CustomWebhookPayload: utils.String(v["webhook_payload"].(string)), + GroupIds: utils.ExpandStringSlice(v["ids"].(*schema.Set).List()), + } +} + +func flattenMonitorSmartDetectorAlertRuleActionGroup(input *alertsmanagement.ActionGroupsInformation) []interface{} { + if input == nil { + return []interface{}{} + } + + var customEmailSubject, CustomWebhookPayload string + if input.CustomEmailSubject != nil { + customEmailSubject = *input.CustomEmailSubject + } + if input.CustomWebhookPayload != nil { + CustomWebhookPayload = *input.CustomWebhookPayload + } + + return []interface{}{ + map[string]interface{}{ + "ids": utils.FlattenStringSlice(input.GroupIds), + "email_subject": customEmailSubject, + "webhook_payload": CustomWebhookPayload, + }, + } +} diff --git a/azurerm/internal/services/monitor/monitor_smart_detector_alert_rule_resource_test.go b/azurerm/internal/services/monitor/monitor_smart_detector_alert_rule_resource_test.go new file mode 100644 index 000000000000..e8afbe7c4bf0 --- /dev/null +++ b/azurerm/internal/services/monitor/monitor_smart_detector_alert_rule_resource_test.go @@ -0,0 +1,199 @@ +package monitor_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MonitorSmartDetectorAlertRuleResource struct { +} + +func TestAccMonitorSmartDetectorAlertRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_smart_detector_alert_rule", "test") + r := MonitorSmartDetectorAlertRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorSmartDetectorAlertRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_smart_detector_alert_rule", "test") + r := MonitorSmartDetectorAlertRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMonitorSmartDetectorAlertRule_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_smart_detector_alert_rule", "test") + r := MonitorSmartDetectorAlertRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMonitorSmartDetectorAlertRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_monitor_smart_detector_alert_rule", "test") + r := MonitorSmartDetectorAlertRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t MonitorSmartDetectorAlertRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SmartDetectorAlertRuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Monitor.SmartDetectorAlertRulesClient.Get(ctx, id.ResourceGroup, id.Name, utils.Bool(true)) + if err != nil { + return nil, fmt.Errorf("reading action rule (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r MonitorSmartDetectorAlertRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_monitor_smart_detector_alert_rule" "test" { + name = "acctestSDAR-%d" + resource_group_name = azurerm_resource_group.test.name + severity = "Sev0" + scope_resource_ids = [azurerm_application_insights.test.id] + frequency = "PT1M" + detector_type = "FailureAnomaliesDetector" + + action_group { + ids = [azurerm_monitor_action_group.test.id] + } +} +`, r.template(data), data.RandomInteger) +} + +func (r MonitorSmartDetectorAlertRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_monitor_smart_detector_alert_rule" "import" { + name = azurerm_monitor_smart_detector_alert_rule.test.name + resource_group_name = azurerm_monitor_smart_detector_alert_rule.test.resource_group_name + severity = azurerm_monitor_smart_detector_alert_rule.test.severity + scope_resource_ids = azurerm_monitor_smart_detector_alert_rule.test.scope_resource_ids + frequency = azurerm_monitor_smart_detector_alert_rule.test.frequency + detector_type = azurerm_monitor_smart_detector_alert_rule.test.detector_type + + action_group { + ids = [azurerm_monitor_action_group.test.id] + } +} +`, r.basic(data)) +} + +func (r MonitorSmartDetectorAlertRuleResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_monitor_smart_detector_alert_rule" "test" { + name = "acctestSDAR-%d" + resource_group_name = azurerm_resource_group.test.name + severity = "Sev0" + scope_resource_ids = [azurerm_application_insights.test.id] + frequency = "PT1M" + detector_type = "FailureAnomaliesDetector" + + description = "acctest" + enabled = false + + action_group { + ids = [azurerm_monitor_action_group.test.id] + email_subject = "acctest email subject" + webhook_payload = < - - - - -XML - lifecycle { - ignore_changes = [tags] - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMMonitorMetricAlert_applicationInsightsWebTest(data acceptance.TestData) string { - template := testAccAzureRMMonitorMetricAlert_applicationInsightsWebTestTemplate(data) - return fmt.Sprintf(` -%s - -resource "azurerm_monitor_metric_alert" "test" { - name = "acctestMetricAlert-%d" - resource_group_name = azurerm_resource_group.test.name - scopes = [ - azurerm_application_insights.test.id, - azurerm_application_insights_web_test.test.id, - ] - application_insights_web_test_location_availability_criteria { - web_test_id = azurerm_application_insights_web_test.test.id - component_id = azurerm_application_insights.test.id - failed_location_count = 2 - } - window_size = "PT15M" - frequency = "PT1M" -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_alert_data_source_test.go b/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_alert_data_source_test.go deleted file mode 100644 index 382002fc4d03..000000000000 --- a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_alert_data_source_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMonitorScheduledQueryRules_AlertingAction(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_monitor_scheduled_query_rules_alert", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMonitorScheduledQueryRules_AlertingActionConfig(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMonitorScheduledQueryRules_AlertingActionCrossResource(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_monitor_scheduled_query_rules_alert", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMonitorScheduledQueryRules_AlertingActionCrossResourceConfig(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMonitorScheduledQueryRules_AlertingActionConfig(data acceptance.TestData) string { - ts := time.Now().Format(time.RFC3339) - template := testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigBasic(data, ts) - - return fmt.Sprintf(` -%s - -data "azurerm_monitor_scheduled_query_rules_alert" "test" { - name = basename(azurerm_monitor_scheduled_query_rules_alert.test.id) - resource_group_name = "${azurerm_resource_group.test.name}" -} -`, template) -} - -func testAccDataSourceAzureRMMonitorScheduledQueryRules_AlertingActionCrossResourceConfig(data acceptance.TestData) string { - template := testAccAzureRMMonitorScheduledQueryRules_AlertingActionCrossResourceConfig(data) - return fmt.Sprintf(` -%s - -data "azurerm_monitor_scheduled_query_rules_alert" "test" { - name = basename(azurerm_monitor_scheduled_query_rules_alert.test.id) - resource_group_name = "${azurerm_resource_group.test.name}" -} -`, template) -} diff --git a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_alert_resource_test.go b/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_alert_resource_test.go deleted file mode 100644 index f166d9cd4958..000000000000 --- a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_alert_resource_test.go +++ /dev/null @@ -1,376 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMMonitorScheduledQueryRules_AlertingActionBasic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") - ts := time.Now().Format(time.RFC3339) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_AlertDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigBasic(data, ts), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_AlertExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMonitorScheduledQueryRules_AlertingActionUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") - ts := time.Now().Format(time.RFC3339) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_AlertDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigBasic(data, ts), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_AlertExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigUpdate(data, ts), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_AlertExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMonitorScheduledQueryRules_AlertingActionComplete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_AlertDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigComplete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_AlertExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMonitorScheduledQueryRules_AlertingActionCrossResource(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_alert", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_AlertDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_AlertingActionCrossResourceConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_AlertExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigBasic(data acceptance.TestData, ts string) string { - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestAppInsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_monitor_action_group" "test" { - name = "acctestActionGroup-%d" - resource_group_name = azurerm_resource_group.test.name - short_name = "acctestag" -} - -resource "azurerm_monitor_scheduled_query_rules_alert" "test" { - name = "acctestsqr-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - data_source_id = azurerm_application_insights.test.id - query = <<-QUERY - let d=datatable(TimeGenerated: datetime, usage_percent: double) [ '%s', 25.4, '%s', 75.4 ]; - d | summarize AggregatedValue=avg(usage_percent) by bin(TimeGenerated, 1h) -QUERY - - - frequency = 60 - time_window = 60 - - action { - action_group = [azurerm_monitor_action_group.test.id] - } - - trigger { - operator = "GreaterThan" - threshold = 5000 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, ts, ts) -} - -func testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigUpdate(data acceptance.TestData, ts string) string { - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestAppInsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_monitor_action_group" "test" { - name = "acctestActionGroup-%d" - resource_group_name = azurerm_resource_group.test.name - short_name = "acctestag" -} - -resource "azurerm_monitor_scheduled_query_rules_alert" "test" { - name = "acctestsqr-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - data_source_id = azurerm_application_insights.test.id - query = <<-QUERY - let d=datatable(TimeGenerated: datetime, usage_percent: double) [ '%s', 25.4, '%s', 75.4 ]; - d | summarize AggregatedValue=avg(usage_percent) by bin(TimeGenerated, 1h) -QUERY - - - enabled = false - description = "test description" - - frequency = 30 - time_window = 30 - - action { - action_group = [azurerm_monitor_action_group.test.id] - } - - trigger { - operator = "GreaterThan" - threshold = 1000 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, ts, ts) -} - -func testAccAzureRMMonitorScheduledQueryRules_AlertingActionConfigComplete(data acceptance.TestData) string { - ts := time.Now().Format(time.RFC3339) - - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestAppInsights-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - application_type = "web" -} - -resource "azurerm_monitor_action_group" "test" { - name = "acctestActionGroup-%d" - resource_group_name = azurerm_resource_group.test.name - short_name = "acctestag" -} - -resource "azurerm_monitor_scheduled_query_rules_alert" "test" { - name = "acctestsqr-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - description = "test alerting action" - enabled = true - - data_source_id = azurerm_application_insights.test.id - query = "let d=datatable(TimeGenerated: datetime, usage_percent: double) [ '%s', 25.4, '%s', 75.4 ]; d | summarize AggregatedValue=avg(usage_percent) by bin(TimeGenerated, 1h)" - - frequency = 60 - time_window = 60 - - severity = 3 - throttling = 5 - action { - action_group = [azurerm_monitor_action_group.test.id] - email_subject = "Custom alert email subject" - custom_webhook_payload = "{}" - } - - trigger { - operator = "GreaterThan" - threshold = 5000 - metric_trigger { - operator = "GreaterThan" - threshold = 1 - metric_trigger_type = "Total" - metric_column = "TimeGenerated" - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, ts, ts) -} - -func testAccAzureRMMonitorScheduledQueryRules_AlertingActionCrossResourceConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_application_insights" "test" { - name = "acctestAppInsights-%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - application_type = "web" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestWorkspace-%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku = "PerGB2018" - retention_in_days = 30 -} - -resource "azurerm_monitor_action_group" "test" { - name = "acctestActionGroup-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - short_name = "acctestag" -} - -resource "azurerm_monitor_scheduled_query_rules_alert" "test" { - name = "acctestsqr-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - description = "test alerting action cross-resource" - enabled = true - - authorized_resource_ids = ["${azurerm_application_insights.test.id}", "${azurerm_log_analytics_workspace.test.id}"] - data_source_id = "${azurerm_application_insights.test.id}" - query = format(<<-QUERY - let a=workspace('%%s').Perf - | where Computer='dependency' and TimeGenerated > ago(1h) - | where ObjectName == 'Processor' and CounterName == '%%%% Processor Time' - | summarize cpu=avg(CounterValue) by bin(TimeGenerated, 1m) - | extend ts=tostring(TimeGenerated); let b=requests - | where resultCode == '200' and timestamp > ago(1h) - | summarize reqs=count() by bin(timestamp, 1m) - | extend ts = tostring(timestamp); a - | join b on $left.ts == $right.ts - | where cpu > 50 and reqs > 5 -QUERY - , azurerm_log_analytics_workspace.test.id) - - frequency = 60 - time_window = 60 - - severity = 3 - action { - action_group = ["${azurerm_monitor_action_group.test.id}"] - email_subject = "Custom alert email subject" - } - - trigger { - operator = "GreaterThan" - threshold = 5000 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMMonitorScheduledQueryRules_AlertDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Monitor.ScheduledQueryRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_monitor_scheduled_query_rules_alert" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Scheduled Query Rule still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMMonitorScheduledQueryRules_AlertExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Scheduled Query Rule Instance: %s", name) - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).Monitor.ScheduledQueryRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on monitorScheduledQueryRulesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Scheduled Query Rule Instance %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} diff --git a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_log_data_source_test.go b/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_log_data_source_test.go deleted file mode 100644 index 6c23137eb96f..000000000000 --- a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_log_data_source_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMonitorScheduledQueryRules_LogToMetricAction(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_monitor_scheduled_query_rules_log", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMonitorScheduledQueryRules_LogToMetricActionConfig(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMonitorScheduledQueryRules_LogToMetricActionConfig(data acceptance.TestData) string { - template := testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigBasic(data) - return fmt.Sprintf(` -%s - -data "azurerm_monitor_scheduled_query_rules_log" "test" { - name = basename(azurerm_monitor_scheduled_query_rules_log.test.id) - resource_group_name = "${azurerm_resource_group.test.name}" -} -`, template) -} diff --git a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_log_resource_test.go b/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_log_resource_test.go deleted file mode 100644 index 2fdb78e9124e..000000000000 --- a/azurerm/internal/services/monitor/tests/monitor_scheduled_query_rules_log_resource_test.go +++ /dev/null @@ -1,263 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMMonitorScheduledQueryRules_LogToMetricActionBasic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_log", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_LogDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigBasic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_LogExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMonitorScheduledQueryRules_LogToMetricActionUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_log", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_LogDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigBasic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_LogExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_LogExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMonitorScheduledQueryRules_LogToMetricActionComplete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_monitor_scheduled_query_rules_log", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMonitorScheduledQueryRules_LogDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigComplete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMonitorScheduledQueryRules_LogExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigBasic(data acceptance.TestData) string { - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestWorkspace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" - retention_in_days = 30 -} - -resource "azurerm_monitor_scheduled_query_rules_log" "test" { - name = "acctestsqr-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - data_source_id = azurerm_log_analytics_workspace.test.id - - criteria { - metric_name = "Average_%% Idle Time" - dimension { - name = "InstanceName" - operator = "Include" - values = ["1"] - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestWorkspace-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "PerGB2018" - retention_in_days = 30 -} - -resource "azurerm_monitor_scheduled_query_rules_log" "test" { - name = "acctestsqr-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - description = "test log to metric action" - enabled = true - - data_source_id = azurerm_log_analytics_workspace.test.id - - criteria { - metric_name = "Average_%% Idle Time" - dimension { - name = "InstanceName" - operator = "Include" - values = ["2"] - } - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMonitorScheduledQueryRules_LogToMetricActionConfigComplete(data acceptance.TestData) string { - return fmt.Sprintf(` -resource "azurerm_resource_group" "test" { - name = "acctestRG-monitor-%d" - location = "%s" -} - -resource "azurerm_log_analytics_workspace" "test" { - name = "acctestWorkspace-%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku = "PerGB2018" - retention_in_days = 30 -} - -resource "azurerm_monitor_action_group" "test" { - name = "acctestActionGroup-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - short_name = "acctestag" -} - -resource "azurerm_monitor_scheduled_query_rules_log" "test" { - name = "acctestsqr-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - location = "${azurerm_resource_group.test.location}" - description = "test log to metric action" - enabled = true - - data_source_id = "${azurerm_log_analytics_workspace.test.id}" - - criteria { - metric_name = "Average_%% Idle Time" - dimension { - name = "Computer" - operator = "Include" - values = ["*"] - } - } -} - -resource "azurerm_monitor_metric_alert" "test" { - name = "acctestmal-%d" - resource_group_name = "${azurerm_resource_group.test.name}" - scopes = ["${azurerm_log_analytics_workspace.test.id}"] - description = "Action will be triggered when Average %% Idle Time is less than 10." - - criteria { - metric_namespace = "Microsoft.OperationalInsights/workspaces" - metric_name = "${azurerm_monitor_scheduled_query_rules_log.test.criteria[0].metric_name}" - aggregation = "Average" - operator = "LessThan" - threshold = 10 - } - - action { - action_group_id = "${azurerm_monitor_action_group.test.id}" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMMonitorScheduledQueryRules_LogDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Monitor.ScheduledQueryRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_monitor_scheduled_query_rules_log" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Scheduled Query Rule still exists:\n%#v", resp) - } - } - - return nil -} - -func testCheckAzureRMMonitorScheduledQueryRules_LogExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Scheduled Query Rule Instance: %s", name) - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).Monitor.ScheduledQueryRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on monitorScheduledQueryRulesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Scheduled Query Rule Instance %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} diff --git a/azurerm/internal/services/monitor/validate/action_group_id.go b/azurerm/internal/services/monitor/validate/action_group_id.go new file mode 100644 index 000000000000..5a99218c9e64 --- /dev/null +++ b/azurerm/internal/services/monitor/validate/action_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/parse" +) + +func ActionGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ActionGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/monitor/validate/action_group_id_test.go b/azurerm/internal/services/monitor/validate/action_group_id_test.go new file mode 100644 index 000000000000..dc3ae9fca6c0 --- /dev/null +++ b/azurerm/internal/services/monitor/validate/action_group_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestActionGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/actionGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/microsoft.insights/actionGroups/actionGroup1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.INSIGHTS/ACTIONGROUPS/ACTIONGROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ActionGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/monitor/validate/action_rule_id.go b/azurerm/internal/services/monitor/validate/action_rule_id.go new file mode 100644 index 000000000000..4c61fd2e0c6f --- /dev/null +++ b/azurerm/internal/services/monitor/validate/action_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/parse" +) + +func ActionRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ActionRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/monitor/validate/action_rule_id_test.go b/azurerm/internal/services/monitor/validate/action_rule_id_test.go new file mode 100644 index 000000000000..783cec24f92b --- /dev/null +++ b/azurerm/internal/services/monitor/validate/action_rule_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestActionRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AlertsManagement/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AlertsManagement/actionRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AlertsManagement/actionRules/actionRule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.ALERTSMANAGEMENT/ACTIONRULES/ACTIONRULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ActionRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/monitor/validate/monitor_action_rule.go b/azurerm/internal/services/monitor/validate/action_rule_name.go similarity index 100% rename from azurerm/internal/services/monitor/validate/monitor_action_rule.go rename to azurerm/internal/services/monitor/validate/action_rule_name.go diff --git a/azurerm/internal/services/monitor/validate/monitor_action_rule_test.go b/azurerm/internal/services/monitor/validate/action_rule_name_test.go similarity index 100% rename from azurerm/internal/services/monitor/validate/monitor_action_rule_test.go rename to azurerm/internal/services/monitor/validate/action_rule_name_test.go diff --git a/azurerm/internal/services/monitor/validate/monitor_action_group.go b/azurerm/internal/services/monitor/validate/monitor_action_group.go deleted file mode 100644 index d5b2d956c567..000000000000 --- a/azurerm/internal/services/monitor/validate/monitor_action_group.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/parse" -) - -func ActionGroupID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.ActionGroupID(v); err != nil { - errors = append(errors, fmt.Errorf("validating %q as a resource id: %v", k, err)) - return - } - - return -} diff --git a/azurerm/internal/services/monitor/validate/smart_detector_alert_rule_id.go b/azurerm/internal/services/monitor/validate/smart_detector_alert_rule_id.go new file mode 100644 index 000000000000..bfb8f5ecb6b7 --- /dev/null +++ b/azurerm/internal/services/monitor/validate/smart_detector_alert_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/monitor/parse" +) + +func SmartDetectorAlertRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SmartDetectorAlertRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/monitor/validate/smart_detector_alert_rule_id_test.go b/azurerm/internal/services/monitor/validate/smart_detector_alert_rule_id_test.go new file mode 100644 index 000000000000..481a7feb3ac7 --- /dev/null +++ b/azurerm/internal/services/monitor/validate/smart_detector_alert_rule_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSmartDetectorAlertRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AlertsManagement/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AlertsManagement/smartdetectoralertrules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.AlertsManagement/smartdetectoralertrules/rule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.ALERTSMANAGEMENT/SMARTDETECTORALERTRULES/RULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SmartDetectorAlertRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/msi/parse/identity.go b/azurerm/internal/services/msi/parse/identity.go deleted file mode 100644 index b31ac41bdaa7..000000000000 --- a/azurerm/internal/services/msi/parse/identity.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type UserAssignedIdentityId struct { - // "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/contoso-resource-group/providers/Microsoft.ManagedIdentity/userAssignedIdentities/contoso-identity" - Subscription string - ResourceGroup string - Name string -} - -func UserAssignedIdentityID(input string) (*UserAssignedIdentityId, error) { - if len(input) == 0 { - return nil, fmt.Errorf("Bad: UserAssignedIdentityId cannot be an empty string") - } - - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - userAssignedIdentityId := UserAssignedIdentityId{ - Subscription: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, - } - - if name, err := id.PopSegment("userAssignedIdentities"); err != nil { - return nil, fmt.Errorf("Bad: missing userAssignedIdentities segment in ID (%q)", input) - } else { - userAssignedIdentityId.Name = name - } - - return &userAssignedIdentityId, nil -} diff --git a/azurerm/internal/services/msi/parse/user_assigned_identity.go b/azurerm/internal/services/msi/parse/user_assigned_identity.go new file mode 100644 index 000000000000..c6c34ed1ab13 --- /dev/null +++ b/azurerm/internal/services/msi/parse/user_assigned_identity.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type UserAssignedIdentityId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewUserAssignedIdentityID(subscriptionId, resourceGroup, name string) UserAssignedIdentityId { + return UserAssignedIdentityId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id UserAssignedIdentityId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "User Assigned Identity", segmentsStr) +} + +func (id UserAssignedIdentityId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.ManagedIdentity/userAssignedIdentities/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// UserAssignedIdentityID parses a UserAssignedIdentity ID into an UserAssignedIdentityId struct +func UserAssignedIdentityID(input string) (*UserAssignedIdentityId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := UserAssignedIdentityId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("userAssignedIdentities"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/msi/parse/user_assigned_identity_test.go b/azurerm/internal/services/msi/parse/user_assigned_identity_test.go new file mode 100644 index 000000000000..fdd9fc956019 --- /dev/null +++ b/azurerm/internal/services/msi/parse/user_assigned_identity_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = UserAssignedIdentityId{} + +func TestUserAssignedIdentityIDFormatter(t *testing.T) { + actual := NewUserAssignedIdentityID("12345678-1234-9876-4563-123456789012", "group1", "identity1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestUserAssignedIdentityID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *UserAssignedIdentityId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity1", + Expected: &UserAssignedIdentityId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "identity1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.MANAGEDIDENTITY/USERASSIGNEDIDENTITIES/IDENTITY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := UserAssignedIdentityID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/msi/resourceids.go b/azurerm/internal/services/msi/resourceids.go new file mode 100644 index 000000000000..d5a4a3962f77 --- /dev/null +++ b/azurerm/internal/services/msi/resourceids.go @@ -0,0 +1,3 @@ +package msi + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=UserAssignedIdentity -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity1 diff --git a/azurerm/internal/services/msi/tests/user_assigned_identity_data_source_test.go b/azurerm/internal/services/msi/tests/user_assigned_identity_data_source_test.go deleted file mode 100644 index 8d6c80e86db0..000000000000 --- a/azurerm/internal/services/msi/tests/user_assigned_identity_data_source_test.go +++ /dev/null @@ -1,89 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" -) - -func TestAccDataSourceAzureRMUserAssignedIdentity_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_user_assigned_identity", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMUserAssignedIdentity_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("acctest%s-uai", data.RandomString)), - resource.TestCheckResourceAttr(data.ResourceName, "resource_group_name", fmt.Sprintf("acctestRG-%d", data.RandomInteger)), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestMatchResourceAttr(data.ResourceName, "principal_id", validate.UUIDRegExp), - resource.TestMatchResourceAttr(data.ResourceName, "client_id", validate.UUIDRegExp), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - testEqualResourceAttr(data.ResourceName, "azurerm_user_assigned_identity.test", "principal_id"), - testEqualResourceAttr(data.ResourceName, "azurerm_user_assigned_identity.test", "client_id"), - ), - }, - }, - }) -} - -func testEqualResourceAttr(dataSourceName string, resourceName string, attrName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - // Ensure we have enough information in state to look up in API - ds, ok := s.RootModule().Resources[dataSourceName] - if !ok { - return fmt.Errorf("Not found: %s", dataSourceName) - } - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - dsAttr := ds.Primary.Attributes[attrName] - rsAttr := rs.Primary.Attributes[attrName] - - if dsAttr != rsAttr { - return fmt.Errorf("Attributes not equal: %s, %s", dsAttr, rsAttr) - } - - return nil - } -} - -func testAccDataSourceAzureRMUserAssignedIdentity_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_user_assigned_identity" "test" { - name = "acctest%s-uai" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - tags = { - "foo" = "bar" - } -} - -data "azurerm_user_assigned_identity" "test" { - name = azurerm_user_assigned_identity.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/msi/tests/user_assigned_identity_resource_test.go b/azurerm/internal/services/msi/tests/user_assigned_identity_resource_test.go deleted file mode 100644 index 6fd5ec555770..000000000000 --- a/azurerm/internal/services/msi/tests/user_assigned_identity_resource_test.go +++ /dev/null @@ -1,147 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "net/http" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMUserAssignedIdentity_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_user_assigned_identity", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMUserAssignedIdentityDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMUserAssignedIdentity_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMUserAssignedIdentityExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "principal_id", validate.UUIDRegExp), - resource.TestMatchResourceAttr(data.ResourceName, "client_id", validate.UUIDRegExp), - ), - }, - data.ImportStep(), - }, - }) -} -func TestAccAzureRMUserAssignedIdentity_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_user_assigned_identity", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMUserAssignedIdentityDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMUserAssignedIdentity_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMUserAssignedIdentityExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "principal_id", validate.UUIDRegExp), - resource.TestMatchResourceAttr(data.ResourceName, "client_id", validate.UUIDRegExp), - ), - }, - { - Config: testAccAzureRMUserAssignedIdentity_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_user_assigned_identity"), - }, - }, - }) -} - -func testCheckAzureRMUserAssignedIdentityExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSI.UserAssignedIdentitiesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for virtual machine: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on userAssignedIdentitiesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: User Assigned Identity %q (resource group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMUserAssignedIdentityDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSI.UserAssignedIdentitiesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_virtual_machine" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("User Assigned Identity still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMUserAssignedIdentity_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_user_assigned_identity" "test" { - name = "acctest%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMUserAssignedIdentity_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_user_assigned_identity" "import" { - name = azurerm_user_assigned_identity.test.name - resource_group_name = azurerm_user_assigned_identity.test.resource_group_name - location = azurerm_user_assigned_identity.test.location -} -`, testAccAzureRMUserAssignedIdentity_basic(data)) -} diff --git a/azurerm/internal/services/msi/user_assigned_identity_data_source.go b/azurerm/internal/services/msi/user_assigned_identity_data_source.go index 4a3a2ea270a3..9e812eeb51e6 100644 --- a/azurerm/internal/services/msi/user_assigned_identity_data_source.go +++ b/azurerm/internal/services/msi/user_assigned_identity_data_source.go @@ -7,6 +7,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/msi/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" @@ -50,25 +52,23 @@ func dataSourceArmUserAssignedIdentity() *schema.Resource { func dataSourceArmUserAssignedIdentityRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSI.UserAssignedIdentitiesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewUserAssignedIdentityID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) - resp, err := client.Get(ctx, resourceGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("User Assigned Identity %q was not found in Resource Group %q", name, resourceGroup) + return fmt.Errorf("User Assigned Identity %q was not found in Resource Group %q", id.Name, id.ResourceGroup) } - return fmt.Errorf("Error making Read request on User Assigned Identity %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("retrieving User Assigned Identity %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.SetId(*resp.ID) + d.SetId(id.ID()) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } + d.Set("location", location.NormalizeNilable(resp.Location)) if props := resp.IdentityProperties; props != nil { if principalId := props.PrincipalID; principalId != nil { diff --git a/azurerm/internal/services/msi/user_assigned_identity_data_source_test.go b/azurerm/internal/services/msi/user_assigned_identity_data_source_test.go new file mode 100644 index 000000000000..2adedf2e5914 --- /dev/null +++ b/azurerm/internal/services/msi/user_assigned_identity_data_source_test.go @@ -0,0 +1,67 @@ +package msi_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type UserAssignedIdentityDataSource struct{} + +func TestAccDataSourceAzureRMUserAssignedIdentity_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_user_assigned_identity", "test") + d := UserAssignedIdentityDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: d.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctest%s-uai", data.RandomString)), + check.That(data.ResourceName).Key("resource_group_name").HasValue(fmt.Sprintf("acctestRG-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(azure.NormalizeLocation(data.Locations.Primary)), + check.That(data.ResourceName).Key("principal_id").MatchesRegex(validate.UUIDRegExp), + check.That(data.ResourceName).Key("client_id").MatchesRegex(validate.UUIDRegExp), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("principal_id").MatchesOtherKey( + check.That("azurerm_user_assigned_identity.test").Key("principal_id"), + ), + check.That(data.ResourceName).Key("client_id").MatchesOtherKey( + check.That("azurerm_user_assigned_identity.test").Key("client_id"), + ), + ), + }, + }) +} + +func (d UserAssignedIdentityDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_user_assigned_identity" "test" { + name = "acctest%s-uai" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + "foo" = "bar" + } +} + +data "azurerm_user_assigned_identity" "test" { + name = azurerm_user_assigned_identity.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/msi/user_assigned_identity_resource.go b/azurerm/internal/services/msi/user_assigned_identity_resource.go index 6527a759f060..b284d41774a3 100644 --- a/azurerm/internal/services/msi/user_assigned_identity_resource.go +++ b/azurerm/internal/services/msi/user_assigned_identity_resource.go @@ -11,7 +11,10 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/msi/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) @@ -22,9 +25,10 @@ func resourceArmUserAssignedIdentity() *schema.Resource { Read: resourceArmUserAssignedIdentityRead, Update: resourceArmUserAssignedIdentityCreateUpdate, Delete: resourceArmUserAssignedIdentityDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.UserAssignedIdentityID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -62,50 +66,40 @@ func resourceArmUserAssignedIdentity() *schema.Resource { func resourceArmUserAssignedIdentityCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSI.UserAssignedIdentitiesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - log.Printf("[INFO] preparing arguments for Azure ARM user identity creation.") + log.Printf("[INFO] preparing arguments for User Assigned Identity create/update.") - name := d.Get("name").(string) location := d.Get("location").(string) - resGroup := d.Get("resource_group_name").(string) t := d.Get("tags").(map[string]interface{}) + resourceId := parse.NewUserAssignedIdentityID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.Name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing User Assigned Identity %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("checking for presence of existing User Assigned Identity %q (Resource Group %q): %+v", resourceId.Name, resourceId.ResourceGroup, err) } } if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_user_assigned_identity", *existing.ID) + return tf.ImportAsExistsError("azurerm_user_assigned_identity", resourceId.ID()) } } identity := msi.Identity{ - Name: &name, - Location: &location, + Name: utils.String(resourceId.Name), + Location: utils.String(location), Tags: tags.Expand(t), } - if _, err := client.CreateOrUpdate(ctx, resGroup, name, identity); err != nil { - return fmt.Errorf("Error Creating/Updating User Assigned Identity %q (Resource Group %q): %+v", name, resGroup, err) - } - - read, err := client.Get(ctx, resGroup, name) - if err != nil { - return err - } - - if read.ID == nil { - return fmt.Errorf("Cannot read User Assigned Identity %q ID (resource group %q) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.Name, identity); err != nil { + return fmt.Errorf("creating/updating User Assigned Identity %q (Resource Group %q): %+v", resourceId.Name, resourceId.ResourceGroup, err) } - d.SetId(*read.ID) - + d.SetId(resourceId.ID()) return resourceArmUserAssignedIdentityRead(d, meta) } @@ -114,25 +108,23 @@ func resourceArmUserAssignedIdentityRead(d *schema.ResourceData, meta interface{ ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.UserAssignedIdentityID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["userAssignedIdentities"] - resp, err := client.Get(ctx, resGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on User Assigned Identity %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("retrieving User Assigned Identity %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) - d.Set("location", resp.Location) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) if props := resp.IdentityProperties; props != nil { if principalId := props.PrincipalID; principalId != nil { @@ -152,17 +144,13 @@ func resourceArmUserAssignedIdentityDelete(d *schema.ResourceData, meta interfac ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.UserAssignedIdentityID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["userAssignedIdentities"] - - _, err = client.Delete(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("Error deleting User Assigned Identity %q (Resource Group %q): %+v", name, resGroup, err) + if _, err = client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { + return fmt.Errorf("deleting User Assigned Identity %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/msi/user_assigned_identity_resource_test.go b/azurerm/internal/services/msi/user_assigned_identity_resource_test.go new file mode 100644 index 000000000000..29971b038abe --- /dev/null +++ b/azurerm/internal/services/msi/user_assigned_identity_resource_test.go @@ -0,0 +1,98 @@ +package msi_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/msi/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type UserAssignedIdentityResource struct{} + +func TestAccAzureRMUserAssignedIdentity_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_user_assigned_identity", "test") + r := UserAssignedIdentityResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("principal_id").MatchesRegex(validate.UUIDRegExp), + check.That(data.ResourceName).Key("client_id").MatchesRegex(validate.UUIDRegExp), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMUserAssignedIdentity_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_user_assigned_identity", "test") + r := UserAssignedIdentityResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("principal_id").MatchesRegex(validate.UUIDRegExp), + check.That(data.ResourceName).Key("client_id").MatchesRegex(validate.UUIDRegExp), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (r UserAssignedIdentityResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.UserAssignedIdentityID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSI.UserAssignedIdentitiesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving User Assigned Identity %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.IdentityProperties != nil), nil +} + +func (r UserAssignedIdentityResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_user_assigned_identity" "test" { + name = "acctest%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r UserAssignedIdentityResource) requiresImport(data acceptance.TestData) string { + template := r.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_user_assigned_identity" "import" { + name = azurerm_user_assigned_identity.test.name + resource_group_name = azurerm_user_assigned_identity.test.resource_group_name + location = azurerm_user_assigned_identity.test.location +} +`, template) +} diff --git a/azurerm/internal/services/msi/validate/identity.go b/azurerm/internal/services/msi/validate/identity.go deleted file mode 100644 index 8990f57a1891..000000000000 --- a/azurerm/internal/services/msi/validate/identity.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/msi/parse" -) - -func UserAssignedIdentityId(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.UserAssignedIdentityID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/msi/validate/user_assigned_identity_id.go b/azurerm/internal/services/msi/validate/user_assigned_identity_id.go new file mode 100644 index 000000000000..27d09ebd4e5a --- /dev/null +++ b/azurerm/internal/services/msi/validate/user_assigned_identity_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/msi/parse" +) + +func UserAssignedIdentityID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.UserAssignedIdentityID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/msi/validate/user_assigned_identity_id_test.go b/azurerm/internal/services/msi/validate/user_assigned_identity_id_test.go new file mode 100644 index 000000000000..4aa8b4c915d2 --- /dev/null +++ b/azurerm/internal/services/msi/validate/user_assigned_identity_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestUserAssignedIdentityID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.MANAGEDIDENTITY/USERASSIGNEDIDENTITIES/IDENTITY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := UserAssignedIdentityID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/helper/sql_extended_auditing.go b/azurerm/internal/services/mssql/helper/sql_extended_auditing.go index dd111abbfd8c..84efeb949007 100644 --- a/azurerm/internal/services/mssql/helper/sql_extended_auditing.go +++ b/azurerm/internal/services/mssql/helper/sql_extended_auditing.go @@ -45,7 +45,7 @@ func ExtendedAuditingSchema() *schema.Schema { } } -func ExpandAzureRmSqlServerBlobAuditingPolicies(input []interface{}) *sql.ExtendedServerBlobAuditingPolicyProperties { +func ExpandSqlServerBlobAuditingPolicies(input []interface{}) *sql.ExtendedServerBlobAuditingPolicyProperties { if len(input) == 0 || input[0] == nil { return &sql.ExtendedServerBlobAuditingPolicyProperties{ State: sql.BlobAuditingPolicyStateDisabled, @@ -72,7 +72,7 @@ func ExpandAzureRmSqlServerBlobAuditingPolicies(input []interface{}) *sql.Extend return &ExtendedServerBlobAuditingPolicyProperties } -func FlattenAzureRmSqlServerBlobAuditingPolicies(extendedServerBlobAuditingPolicy *sql.ExtendedServerBlobAuditingPolicy, d *schema.ResourceData) []interface{} { +func FlattenSqlServerBlobAuditingPolicies(extendedServerBlobAuditingPolicy *sql.ExtendedServerBlobAuditingPolicy, d *schema.ResourceData) []interface{} { if extendedServerBlobAuditingPolicy == nil || extendedServerBlobAuditingPolicy.State == sql.BlobAuditingPolicyStateDisabled { return []interface{}{} } @@ -104,7 +104,7 @@ func FlattenAzureRmSqlServerBlobAuditingPolicies(extendedServerBlobAuditingPolic } } -func ExpandAzureRmMsSqlDBBlobAuditingPolicies(input []interface{}) *sql.ExtendedDatabaseBlobAuditingPolicyProperties { +func ExpandMsSqlDBBlobAuditingPolicies(input []interface{}) *sql.ExtendedDatabaseBlobAuditingPolicyProperties { if len(input) == 0 || input[0] == nil { return &sql.ExtendedDatabaseBlobAuditingPolicyProperties{ State: sql.BlobAuditingPolicyStateDisabled, @@ -131,7 +131,7 @@ func ExpandAzureRmMsSqlDBBlobAuditingPolicies(input []interface{}) *sql.Extended return &ExtendedDatabaseBlobAuditingPolicyProperties } -func FlattenAzureRmMsSqlDBBlobAuditingPolicies(extendedDatabaseBlobAuditingPolicy *sql.ExtendedDatabaseBlobAuditingPolicy, d *schema.ResourceData) []interface{} { +func FlattenMsSqlDBBlobAuditingPolicies(extendedDatabaseBlobAuditingPolicy *sql.ExtendedDatabaseBlobAuditingPolicy, d *schema.ResourceData) []interface{} { if extendedDatabaseBlobAuditingPolicy == nil || extendedDatabaseBlobAuditingPolicy.State == sql.BlobAuditingPolicyStateDisabled { return []interface{}{} } diff --git a/azurerm/internal/services/mssql/mssql_database_data_source.go b/azurerm/internal/services/mssql/mssql_database_data_source.go index a658dc6fafce..e5cddf3d6bb3 100644 --- a/azurerm/internal/services/mssql/mssql_database_data_source.go +++ b/azurerm/internal/services/mssql/mssql_database_data_source.go @@ -6,6 +6,7 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" @@ -15,9 +16,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMsSqlDatabase() *schema.Resource { +func dataSourceMsSqlDatabase() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMsSqlDatabaseRead, + Read: dataSourceMsSqlDatabaseRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -33,7 +34,7 @@ func dataSourceArmMsSqlDatabase() *schema.Resource { "server_id": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.MsSqlServerID, + ValidateFunc: validate.ServerID, }, "collation": { @@ -81,14 +82,14 @@ func dataSourceArmMsSqlDatabase() *schema.Resource { } } -func dataSourceArmMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) mssqlServerId := d.Get("server_id").(string) - serverId, err := parse.MsSqlServerID(mssqlServerId) + serverId, err := parse.ServerID(mssqlServerId) if err != nil { return err } diff --git a/azurerm/internal/services/mssql/mssql_database_data_source_test.go b/azurerm/internal/services/mssql/mssql_database_data_source_test.go new file mode 100644 index 000000000000..8442a1621793 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_database_data_source_test.go @@ -0,0 +1,71 @@ +package mssql_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MsSqlDatabaseDataSource struct{} + +func TestAccDataSourceMsSqlDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mssql_database", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: MsSqlDatabaseDataSource{}.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctest-db-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("server_id").Exists(), + ), + }, + }) +} + +func TestAccDataSourceMsSqlDatabase_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mssql_database", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: MsSqlDatabaseDataSource{}.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctest-db-%d", data.RandomInteger)), + check.That(data.ResourceName).Key("server_id").Exists(), + check.That(data.ResourceName).Key("collation").HasValue("SQL_AltDiction_CP850_CI_AI"), + check.That(data.ResourceName).Key("license_type").HasValue("BasePrice"), + check.That(data.ResourceName).Key("max_size_gb").HasValue("1"), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), + ), + }, + }) +} + +func (MsSqlDatabaseDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +data "azurerm_mssql_database" "test" { + name = azurerm_mssql_database.test.name + server_id = azurerm_sql_server.test.id +} + +`, MsSqlDatabaseResource{}.basic(data)) +} + +func (MsSqlDatabaseDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +data "azurerm_mssql_database" "test" { + name = azurerm_mssql_database.test.name + server_id = azurerm_sql_server.test.id +} + +`, MsSqlDatabaseResource{}.complete(data)) +} diff --git a/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource.go b/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource.go index 5c28eb1ad8f9..c74be00c1c17 100644 --- a/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource.go +++ b/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource.go @@ -8,6 +8,7 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" @@ -17,15 +18,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMsSqlDatabaseExtendedAuditingPolicy() *schema.Resource { +func resourceMsSqlDatabaseExtendedAuditingPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmMsSqlDatabaseExtendedAuditingPolicyCreateUpdate, - Read: resourceArmMsSqlDatabaseExtendedAuditingPolicyRead, - Update: resourceArmMsSqlDatabaseExtendedAuditingPolicyCreateUpdate, - Delete: resourceArmMsSqlDatabaseExtendedAuditingPolicyDelete, + Create: resourceMsSqlDatabaseExtendedAuditingPolicyCreateUpdate, + Read: resourceMsSqlDatabaseExtendedAuditingPolicyRead, + Update: resourceMsSqlDatabaseExtendedAuditingPolicyCreateUpdate, + Delete: resourceMsSqlDatabaseExtendedAuditingPolicyDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MssqlDatabaseExtendedAuditingPolicyID(id) + _, err := parse.DatabaseExtendedAuditingPolicyID(id) return err }), @@ -41,7 +42,7 @@ func resourceArmMsSqlDatabaseExtendedAuditingPolicy() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MsSqlDatabaseID, + ValidateFunc: validate.DatabaseID, }, "storage_endpoint": { @@ -73,23 +74,23 @@ func resourceArmMsSqlDatabaseExtendedAuditingPolicy() *schema.Resource { } } -func resourceArmMsSqlDatabaseExtendedAuditingPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseExtendedAuditingPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for MsSql Database Extended Auditing Policy creation.") - dbId, err := parse.MsSqlDatabaseID(d.Get("database_id").(string)) + dbId, err := parse.DatabaseID(d.Get("database_id").(string)) if err != nil { return err } if d.IsNewResource() { - existing, err := client.Get(ctx, dbId.ResourceGroup, dbId.MsSqlServer, dbId.Name) + existing, err := client.Get(ctx, dbId.ResourceGroup, dbId.ServerName, dbId.Name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Failed to check for presence of existing Database %q Sql Auditing (MsSql Server %q / Resource Group %q): %s", dbId.Name, dbId.MsSqlServer, dbId.ResourceGroup, err) + return fmt.Errorf("Failed to check for presence of existing Database %q Sql Auditing (MsSql Server %q / Resource Group %q): %s", dbId.Name, dbId.ServerName, dbId.ResourceGroup, err) } } @@ -116,47 +117,47 @@ func resourceArmMsSqlDatabaseExtendedAuditingPolicyCreateUpdate(d *schema.Resour params.ExtendedDatabaseBlobAuditingPolicyProperties.StorageAccountAccessKey = utils.String(v.(string)) } - if _, err = client.CreateOrUpdate(ctx, dbId.ResourceGroup, dbId.MsSqlServer, dbId.Name, params); err != nil { - return fmt.Errorf("creating MsSql Database %q Extended Auditing Policy (Sql Server %q / Resource Group %q): %+v", dbId.Name, dbId.MsSqlServer, dbId.ResourceGroup, err) + if _, err = client.CreateOrUpdate(ctx, dbId.ResourceGroup, dbId.ServerName, dbId.Name, params); err != nil { + return fmt.Errorf("creating MsSql Database %q Extended Auditing Policy (Sql Server %q / Resource Group %q): %+v", dbId.Name, dbId.ServerName, dbId.ResourceGroup, err) } - read, err := client.Get(ctx, dbId.ResourceGroup, dbId.MsSqlServer, dbId.Name) + read, err := client.Get(ctx, dbId.ResourceGroup, dbId.ServerName, dbId.Name) if err != nil { - return fmt.Errorf("retrieving MsSql Database %q Extended Auditing Policy (MsSql Server Name %q / Resource Group %q): %+v", dbId.Name, dbId.MsSqlServer, dbId.ResourceGroup, err) + return fmt.Errorf("retrieving MsSql Database %q Extended Auditing Policy (MsSql Server Name %q / Resource Group %q): %+v", dbId.Name, dbId.ServerName, dbId.ResourceGroup, err) } if read.ID == nil || *read.ID == "" { - return fmt.Errorf("reading MsSql Database %q Extended Auditing Policy (MsSql Server Name %q / Resource Group %q) ID is empty or nil", dbId.Name, dbId.MsSqlServer, dbId.ResourceGroup) + return fmt.Errorf("reading MsSql Database %q Extended Auditing Policy (MsSql Server Name %q / Resource Group %q) ID is empty or nil", dbId.Name, dbId.ServerName, dbId.ResourceGroup) } d.SetId(*read.ID) - return resourceArmMsSqlDatabaseExtendedAuditingPolicyRead(d, meta) + return resourceMsSqlDatabaseExtendedAuditingPolicyRead(d, meta) } -func resourceArmMsSqlDatabaseExtendedAuditingPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseExtendedAuditingPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient dbClient := meta.(*clients.Client).MSSQL.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MssqlDatabaseExtendedAuditingPolicyID(d.Id()) + id, err := parse.DatabaseExtendedAuditingPolicyID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.MsDBName) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("reading MsSql Database %s Extended Auditing Policy (MsSql Server Name %q / Resource Group %q): %s", id.MsDBName, id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("reading MsSql Database %s Extended Auditing Policy (MsSql Server Name %q / Resource Group %q): %s", id.DatabaseName, id.ServerName, id.ResourceGroup, err) } - dbResp, err := dbClient.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.MsDBName) + dbResp, err := dbClient.Get(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName) if err != nil || *dbResp.ID == "" { - return fmt.Errorf("reading MsSql Database %q ID is empty or nil(Resource Group %q): %s", id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("reading MsSql Database %q ID is empty or nil(Resource Group %q): %s", id.ServerName, id.ResourceGroup, err) } d.Set("database_id", dbResp.ID) @@ -170,12 +171,12 @@ func resourceArmMsSqlDatabaseExtendedAuditingPolicyRead(d *schema.ResourceData, return nil } -func resourceArmMsSqlDatabaseExtendedAuditingPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseExtendedAuditingPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MssqlDatabaseExtendedAuditingPolicyID(d.Id()) + id, err := parse.DatabaseExtendedAuditingPolicyID(d.Id()) if err != nil { return err } @@ -190,8 +191,8 @@ func resourceArmMsSqlDatabaseExtendedAuditingPolicyDelete(d *schema.ResourceData }, } - if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.MsSqlServer, id.MsDBName, params); err != nil { - return fmt.Errorf("deleting MsSql Database %q Extended Auditing Policy( MsSql Server %q / Resource Group %q): %+v", id.MsDBName, id.MsSqlServer, id.ResourceGroup, err) + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName, params); err != nil { + return fmt.Errorf("deleting MsSql Database %q Extended Auditing Policy( MsSql Server %q / Resource Group %q): %+v", id.DatabaseName, id.ServerName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource_test.go b/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource_test.go new file mode 100644 index 000000000000..53cf875a715c --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_database_extended_auditing_policy_resource_test.go @@ -0,0 +1,296 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlDatabaseExtendedAuditingPolicyResource struct{} + +func TestAccMsSqlDatabaseExtendedAuditingPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") + r := MsSqlDatabaseExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlDatabaseExtendedAuditingPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") + r := MsSqlDatabaseExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMsSqlDatabaseExtendedAuditingPolicy_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") + r := MsSqlDatabaseExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlDatabaseExtendedAuditingPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") + r := MsSqlDatabaseExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + { + + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + { + + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlDatabaseExtendedAuditingPolicy_storageAccBehindFireWall(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") + r := MsSqlDatabaseExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.storageAccountBehindFireWall(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func (MsSqlDatabaseExtendedAuditingPolicyResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabaseExtendedAuditingPolicyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.DatabaseExtendedBlobAuditingPoliciesClient.Get(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Virtual Machine %q (Server %q, Resource Group %q) does not exist", id.DatabaseName, id.ServerName, id.ResourceGroup) + } + + return nil, fmt.Errorf("reading SQL Database ExtendedAuditingPolicy %q (Server %q, Resource Group %q): %v", id.DatabaseName, id.ServerName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MsSqlDatabaseExtendedAuditingPolicyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "AdminPassword123!" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[1]d" + server_id = azurerm_mssql_server.test.id +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r MsSqlDatabaseExtendedAuditingPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database_extended_auditing_policy" "test" { + database_id = azurerm_mssql_database.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key +} +`, r.template(data)) +} + +func (r MsSqlDatabaseExtendedAuditingPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database_extended_auditing_policy" "import" { + database_id = azurerm_mssql_database.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key +} +`, r.template(data)) +} + +func (r MsSqlDatabaseExtendedAuditingPolicyResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database_extended_auditing_policy" "test" { + database_id = azurerm_mssql_database.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + storage_account_access_key_is_secondary = false + retention_in_days = 6 +} +`, r.template(data)) +} + +func (r MsSqlDatabaseExtendedAuditingPolicyResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test2" { + name = "unlikely23exst2acc2%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database_extended_auditing_policy" "test" { + database_id = azurerm_mssql_database.test.id + storage_endpoint = azurerm_storage_account.test2.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test2.primary_access_key + storage_account_access_key_is_secondary = true + retention_in_days = 3 +} +`, r.template(data), data.RandomString) +} + +func (MsSqlDatabaseExtendedAuditingPolicyResource) storageAccountBehindFireWall(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "AdminPassword123!" + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[1]d" + server_id = azurerm_mssql_server.test.id +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%[1]d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + service_endpoints = ["Microsoft.Storage"] +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + network_rules { + default_action = "Deny" + ip_rules = ["127.0.0.1"] + virtual_network_subnet_ids = [azurerm_subnet.test.id] + } +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_mssql_server.test.identity.0.principal_id +} + +resource "azurerm_mssql_database_extended_auditing_policy" "test" { + database_id = azurerm_mssql_database.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/mssql/mssql_database_resource.go b/azurerm/internal/services/mssql/mssql_database_resource.go index ec33e990f805..ee907f0ad16c 100644 --- a/azurerm/internal/services/mssql/mssql_database_resource.go +++ b/azurerm/internal/services/mssql/mssql_database_resource.go @@ -12,6 +12,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" @@ -26,14 +27,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMsSqlDatabase() *schema.Resource { +func resourceMsSqlDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmMsSqlDatabaseCreateUpdate, - Read: resourceArmMsSqlDatabaseRead, - Update: resourceArmMsSqlDatabaseCreateUpdate, - Delete: resourceArmMsSqlDatabaseDelete, + Create: resourceMsSqlDatabaseCreateUpdate, + Read: resourceMsSqlDatabaseRead, + Update: resourceMsSqlDatabaseCreateUpdate, + Delete: resourceMsSqlDatabaseDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MsSqlDatabaseID(id) + _, err := parse.DatabaseID(id) return err }), @@ -56,14 +58,14 @@ func resourceArmMsSqlDatabase() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MsSqlServerID, + ValidateFunc: validate.ServerID, }, "auto_pause_delay_in_minutes": { Type: schema.TypeInt, Optional: true, Computed: true, - ValidateFunc: validate.MsSqlDatabaseAutoPauseDelay, + ValidateFunc: validate.DatabaseAutoPauseDelay, }, "create_mode": { @@ -90,13 +92,13 @@ func resourceArmMsSqlDatabase() *schema.Resource { Optional: true, Computed: true, ForceNew: true, - ValidateFunc: validate.MsSqlDBCollation(), + ValidateFunc: validate.DatabaseCollation(), }, "elastic_pool_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.MsSqlElasticPoolID, + ValidateFunc: validate.ElasticPoolID, }, "extended_auditing_policy": helper.ExtendedAuditingSchema(), @@ -140,13 +142,13 @@ func resourceArmMsSqlDatabase() *schema.Resource { "recover_database_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.MsSqlRecoverableDatabaseID, + ValidateFunc: validate.RecoverableDatabaseID, }, "restore_dropped_database_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.MsSqlRestorableDatabaseID, + ValidateFunc: validate.RestorableDatabaseID, }, "read_replica_count": { @@ -175,7 +177,7 @@ func resourceArmMsSqlDatabase() *schema.Resource { Type: schema.TypeString, Optional: true, Computed: true, - ValidateFunc: validate.MsSqlDBSkuName(), + ValidateFunc: validate.DatabaseSkuName(), DiffSuppressFunc: suppress.CaseDifference, }, @@ -184,7 +186,7 @@ func resourceArmMsSqlDatabase() *schema.Resource { Optional: true, ForceNew: true, Computed: true, - ValidateFunc: validate.MsSqlDatabaseID, + ValidateFunc: validate.DatabaseID, }, "zone_redundant": { @@ -291,7 +293,7 @@ func resourceArmMsSqlDatabase() *schema.Resource { } } -func resourceArmMsSqlDatabaseCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabasesClient auditingClient := meta.(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient serverClient := meta.(*clients.Client).MSSQL.ServersClient @@ -305,7 +307,7 @@ func resourceArmMsSqlDatabaseCreateUpdate(d *schema.ResourceData, meta interface name := d.Get("name").(string) sqlServerId := d.Get("server_id").(string) - serverId, _ := parse.MsSqlServerID(sqlServerId) + serverId, _ := parse.ServerID(sqlServerId) if d.IsNewResource() { existing, err := client.Get(ctx, serverId.ResourceGroup, serverId.Name, name) @@ -430,13 +432,13 @@ func resourceArmMsSqlDatabaseCreateUpdate(d *schema.ResourceData, meta interface d.SetId(*read.ID) - if _, err = threatClient.CreateOrUpdate(ctx, serverId.ResourceGroup, serverId.Name, name, *expandArmMsSqlServerThreatDetectionPolicy(d, location)); err != nil { + if _, err = threatClient.CreateOrUpdate(ctx, serverId.ResourceGroup, serverId.Name, name, *expandMsSqlServerThreatDetectionPolicy(d, location)); err != nil { return fmt.Errorf("setting database threat detection policy: %+v", err) } if createMode != string(sql.CreateModeOnlineSecondary) && createMode != string(sql.CreateModeSecondary) { auditingProps := sql.ExtendedDatabaseBlobAuditingPolicy{ - ExtendedDatabaseBlobAuditingPolicyProperties: helper.ExpandAzureRmMsSqlDBBlobAuditingPolicies(auditingPolicies), + ExtendedDatabaseBlobAuditingPolicyProperties: helper.ExpandMsSqlDBBlobAuditingPolicies(auditingPolicies), } if _, err = auditingClient.CreateOrUpdate(ctx, serverId.ResourceGroup, serverId.Name, name, auditingProps); err != nil { return fmt.Errorf("failure in issuing create/update request for SQL Database %q Blob Auditing Policies(SQL Server %q/ Resource Group %q): %+v", name, serverId.Name, serverId.ResourceGroup, err) @@ -486,10 +488,10 @@ func resourceArmMsSqlDatabaseCreateUpdate(d *schema.ResourceData, meta interface } } - return resourceArmMsSqlDatabaseRead(d, meta) + return resourceMsSqlDatabaseRead(d, meta) } -func resourceArmMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabasesClient threatClient := meta.(*clients.Client).MSSQL.DatabaseThreatDetectionPoliciesClient auditingClient := meta.(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient @@ -498,27 +500,27 @@ func resourceArmMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) erro ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MsSqlDatabaseID(d.Id()) + id, err := parse.DatabaseID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("reading MsSql Database %s (MsSql Server Name %q / Resource Group %q): %s", id.Name, id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("reading MsSql Database %s (MsSql Server Name %q / Resource Group %q): %s", id.Name, id.ServerName, id.ResourceGroup, err) } d.Set("name", resp.Name) serverClient := meta.(*clients.Client).MSSQL.ServersClient - serverResp, err := serverClient.Get(ctx, id.ResourceGroup, id.MsSqlServer) + serverResp, err := serverClient.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil || *serverResp.ID == "" { - return fmt.Errorf("making Read request on MsSql Server %q (Resource Group %q): %s", id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("making Read request on MsSql Server %q (Resource Group %q): %s", id.ServerName, id.ResourceGroup, err) } d.Set("server_id", serverResp.ID) @@ -545,37 +547,37 @@ func resourceArmMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) erro d.Set("zone_redundant", props.ZoneRedundant) } - threat, err := threatClient.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) + threat, err := threatClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err == nil { - if err := d.Set("threat_detection_policy", flattenArmMsSqlServerThreatDetectionPolicy(d, threat)); err != nil { + if err := d.Set("threat_detection_policy", flattenMsSqlServerThreatDetectionPolicy(d, threat)); err != nil { return fmt.Errorf("setting `threat_detection_policy`: %+v", err) } } - auditingResp, err := auditingClient.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) + auditingResp, err := auditingClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { return fmt.Errorf("failure in reading SQL Database %q: %v Blob Auditing Policies", id.Name, err) } - flattenBlobAuditing := helper.FlattenAzureRmMsSqlDBBlobAuditingPolicies(&auditingResp, d) + flattenBlobAuditing := helper.FlattenMsSqlDBBlobAuditingPolicies(&auditingResp, d) if err := d.Set("extended_auditing_policy", flattenBlobAuditing); err != nil { return fmt.Errorf("failure in setting `extended_auditing_policy`: %+v", err) } // Hyper Scale SKU's do not currently support LRP and do not honour normal SRP operations if !strings.HasPrefix(skuName, "HS") && !strings.HasPrefix(skuName, "DW") { - longTermPolicy, err := longTermRetentionClient.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) + longTermPolicy, err := longTermRetentionClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { - return fmt.Errorf("Error retrieving Long Term Policies for Database %q (Sql Server %q ;Resource Group %q): %+v", id.Name, id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Long Term Policies for Database %q (Sql Server %q ;Resource Group %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } flattenlongTermPolicy := helper.FlattenLongTermRetentionPolicy(&longTermPolicy, d) if err := d.Set("long_term_retention_policy", flattenlongTermPolicy); err != nil { return fmt.Errorf("failure in setting `long_term_retention_policy`: %+v", err) } - shortTermPolicy, err := shortTermRetentionClient.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) + shortTermPolicy, err := shortTermRetentionClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { - return fmt.Errorf("Error retrieving Short Term Policies for Database %q (Sql Server %q ;Resource Group %q): %+v", id.Name, id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("Error retrieving Short Term Policies for Database %q (Sql Server %q ;Resource Group %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } flattenShortTermPolicy := helper.FlattenShortTermRetentionPolicy(&shortTermPolicy, d) @@ -592,32 +594,32 @@ func resourceArmMsSqlDatabaseRead(d *schema.ResourceData, meta interface{}) erro return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMsSqlDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabasesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MsSqlDatabaseID(d.Id()) + id, err := parse.DatabaseID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { - return fmt.Errorf("deleting MsSql Database %q ( MsSql Server %q / Resource Group %q): %+v", id.Name, id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("deleting MsSql Database %q ( MsSql Server %q / Resource Group %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("waiting for MsSql Database %q ( MsSql Server %q / Resource Group %q) to be deleted: %+v", id.Name, id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("waiting for MsSql Database %q ( MsSql Server %q / Resource Group %q) to be deleted: %+v", id.Name, id.ServerName, id.ResourceGroup, err) } return nil } -func flattenArmMsSqlServerThreatDetectionPolicy(d *schema.ResourceData, policy sql.DatabaseSecurityAlertPolicy) []interface{} { +func flattenMsSqlServerThreatDetectionPolicy(d *schema.ResourceData, policy sql.DatabaseSecurityAlertPolicy) []interface{} { // The SQL database threat detection API always returns the default value even if never set. // If the values are on their default one, threat it as not set. properties := policy.DatabaseSecurityAlertPolicyProperties @@ -666,7 +668,7 @@ func flattenArmMsSqlServerThreatDetectionPolicy(d *schema.ResourceData, policy s return []interface{}{threatDetectionPolicy} } -func expandArmMsSqlServerThreatDetectionPolicy(d *schema.ResourceData, location string) *sql.DatabaseSecurityAlertPolicy { +func expandMsSqlServerThreatDetectionPolicy(d *schema.ResourceData, location string) *sql.DatabaseSecurityAlertPolicy { policy := sql.DatabaseSecurityAlertPolicy{ Location: utils.String(location), DatabaseSecurityAlertPolicyProperties: &sql.DatabaseSecurityAlertPolicyProperties{ diff --git a/azurerm/internal/services/mssql/mssql_database_resource_test.go b/azurerm/internal/services/mssql/mssql_database_resource_test.go new file mode 100644 index 000000000000..349771e1baca --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_database_resource_test.go @@ -0,0 +1,1184 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlDatabaseResource struct{} + +func TestAccMsSqlDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMsSqlDatabase_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("collation").HasValue("SQL_AltDiction_CP850_CI_AI"), + check.That(data.ResourceName).Key("collation").HasValue("SQL_AltDiction_CP850_CI_AI"), + check.That(data.ResourceName).Key("license_type").HasValue("BasePrice"), + check.That(data.ResourceName).Key("max_size_gb").HasValue("1"), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), + ), + }, + data.ImportStep("sample_name"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("license_type").HasValue("LicenseIncluded"), + check.That(data.ResourceName).Key("max_size_gb").HasValue("2"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Staging"), + ), + }, + data.ImportStep("sample_name"), + }) +} + +func TestAccMsSqlDatabase_elasticPool(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.elasticPool(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("elastic_pool_id").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("ElasticPool"), + ), + }, + data.ImportStep(), + { + Config: r.elasticPoolDisassociation(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_GP(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gp(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_GP_Serverless(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gpServerless(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("auto_pause_delay_in_minutes").HasValue("70"), + check.That(data.ResourceName).Key("min_capacity").HasValue("0.75"), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_S_Gen5_2"), + ), + }, + data.ImportStep(), + { + Config: r.gpServerlessUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("auto_pause_delay_in_minutes").HasValue("90"), + check.That(data.ResourceName).Key("min_capacity").HasValue("1.25"), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_S_Gen5_2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_BC(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.bc(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_scale").HasValue("true"), + check.That(data.ResourceName).Key("sku_name").HasValue("BC_Gen5_2"), + check.That(data.ResourceName).Key("zone_redundant").HasValue("true"), + ), + }, + data.ImportStep(), + { + Config: r.bcUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_scale").HasValue("false"), + check.That(data.ResourceName).Key("sku_name").HasValue("BC_Gen5_2"), + check.That(data.ResourceName).Key("zone_redundant").HasValue("false"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_HS(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.hs(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_replica_count").HasValue("2"), + check.That(data.ResourceName).Key("sku_name").HasValue("HS_Gen5_2"), + ), + }, + data.ImportStep(), + { + Config: r.hsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("read_replica_count").HasValue("4"), + check.That(data.ResourceName).Key("sku_name").HasValue("HS_Gen5_2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_createCopyMode(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "copy") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.createCopyMode(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("collation").HasValue("SQL_AltDiction_CP850_CI_AI"), + check.That(data.ResourceName).Key("license_type").HasValue("BasePrice"), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + ), + }, + data.ImportStep("create_mode", "creation_source_database_id"), + }) +} + +func TestAccMsSqlDatabase_createPITRMode(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + + { + PreConfig: func() { time.Sleep(7 * time.Minute) }, + Config: r.createPITRMode(data), + Check: resource.ComposeTestCheckFunc( + check.That("azurerm_mssql_database.pitr").ExistsInAzure(r), + ), + }, + + data.ImportStep("create_mode", "creation_source_database_id", "restore_point_in_time"), + }) +} + +func TestAccMsSqlDatabase_createSecondaryMode(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "secondary") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.createSecondaryMode(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("collation").HasValue("SQL_AltDiction_CP850_CI_AI"), + check.That(data.ResourceName).Key("license_type").HasValue("BasePrice"), + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + ), + }, + data.ImportStep("create_mode", "creation_source_database_id", "sample_name"), + }) +} + +func TestAccMsSqlDatabase_createRestoreMode(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.createRestoreMode(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("create_mode", "creation_source_database_id"), + + { + PreConfig: func() { time.Sleep(8 * time.Minute) }, + Config: r.createRestoreModeDBDeleted(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + + data.ImportStep(), + + { + PreConfig: func() { time.Sleep(8 * time.Minute) }, + Config: r.createRestoreModeDBRestored(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That("azurerm_mssql_database.restore").ExistsInAzure(r), + ), + }, + + data.ImportStep("create_mode", "restore_dropped_database_id"), + }) +} + +func TestAccMsSqlDatabase_threatDetectionPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.threatDetectionPolicy(data, "Enabled"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("threat_detection_policy.#").HasValue("1"), + check.That(data.ResourceName).Key("threat_detection_policy.0.state").HasValue("Enabled"), + check.That(data.ResourceName).Key("threat_detection_policy.0.retention_days").HasValue("15"), + check.That(data.ResourceName).Key("threat_detection_policy.0.disabled_alerts.#").HasValue("1"), + check.That(data.ResourceName).Key("threat_detection_policy.0.email_account_admins").HasValue("Enabled"), + ), + }, + data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), + { + Config: r.threatDetectionPolicy(data, "Disabled"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("threat_detection_policy.#").HasValue("1"), + check.That(data.ResourceName).Key("threat_detection_policy.0.state").HasValue("Disabled"), + ), + }, + data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), + }) +} + +func TestAccMsSqlDatabase_withBlobAuditingPolices(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withBlobAuditingPolices(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("extended_auditing_policy.0.storage_account_access_key"), + { + Config: r.withBlobAuditingPolicesUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("extended_auditing_policy.0.storage_account_access_key"), + { + Config: r.withBlobAuditingPolicesDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_updateSku(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updateSku(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updateSku2(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_minCapacity0(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.minCapacity0(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_withLongTermRetentionPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withLongTermRetentionPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withLongTermRetentionPolicyUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabase_withShortTermRetentionPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withShortTermRetentionPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withShortTermRetentionPolicyUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func (MsSqlDatabaseResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabaseID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.DatabasesClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Database %q (Server %q, Resource Group %q) does not exist", id.Name, id.ServerName, id.ResourceGroup) + } + + return nil, fmt.Errorf("reading SQL Database %q (Server %q, Resource Group %q): %v", id.Name, id.ServerName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MsSqlDatabaseResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r MsSqlDatabaseResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "import" { + name = azurerm_mssql_database.test.name + server_id = azurerm_sql_server.test.id +} +`, r.basic(data)) +} + +func (r MsSqlDatabaseResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + collation = "SQL_AltDiction_CP850_CI_AI" + license_type = "BasePrice" + max_size_gb = 1 + sample_name = "AdventureWorksLT" + sku_name = "GP_Gen5_2" + + tags = { + ENV = "Test" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + collation = "SQL_AltDiction_CP850_CI_AI" + license_type = "LicenseIncluded" + max_size_gb = 2 + sku_name = "GP_Gen5_2" + + tags = { + ENV = "Staging" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) elasticPool(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_gb = 5 + + sku { + name = "GP_Gen5" + tier = "GeneralPurpose" + capacity = 4 + family = "Gen5" + } + + per_database_settings { + min_capacity = 0.25 + max_capacity = 4 + } +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + elastic_pool_id = azurerm_mssql_elasticpool.test.id + sku_name = "ElasticPool" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) elasticPoolDisassociation(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_gb = 5 + + sku { + name = "GP_Gen5" + tier = "GeneralPurpose" + capacity = 4 + family = "Gen5" + } + + per_database_settings { + min_capacity = 0.25 + max_capacity = 4 + } +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + sku_name = "GP_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) gp(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + sku_name = "GP_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) gpServerless(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + auto_pause_delay_in_minutes = 70 + min_capacity = 0.75 + sku_name = "GP_S_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) gpServerlessUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + auto_pause_delay_in_minutes = 90 + min_capacity = 1.25 + sku_name = "GP_S_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) hs(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + read_replica_count = 2 + sku_name = "HS_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) hsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + read_replica_count = 4 + sku_name = "HS_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) bc(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + read_scale = true + sku_name = "BC_Gen5_2" + zone_redundant = true +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) bcUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + read_scale = false + sku_name = "BC_Gen5_2" + zone_redundant = false +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) createCopyMode(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "copy" { + name = "acctest-dbc-%[2]d" + server_id = azurerm_sql_server.test.id + create_mode = "Copy" + creation_source_database_id = azurerm_mssql_database.test.id +} +`, r.complete(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) createPITRMode(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "pitr" { + name = "acctest-dbp-%[2]d" + server_id = azurerm_sql_server.test.id + create_mode = "PointInTimeRestore" + restore_point_in_time = "%[3]s" + creation_source_database_id = azurerm_mssql_database.test.id + +} +`, r.basic(data), data.RandomInteger, time.Now().Add(time.Duration(7)*time.Minute).UTC().Format(time.RFC3339)) +} + +func (r MsSqlDatabaseResource) createSecondaryMode(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_resource_group" "second" { + name = "acctestRG-mssql2-%[2]d" + location = "%[3]s" +} + +resource "azurerm_sql_server" "second" { + name = "acctest-sqlserver2-%[2]d" + resource_group_name = azurerm_resource_group.second.name + location = azurerm_resource_group.second.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + +resource "azurerm_mssql_database" "secondary" { + name = "acctest-dbs-%[2]d" + server_id = azurerm_sql_server.second.id + create_mode = "Secondary" + creation_source_database_id = azurerm_mssql_database.test.id + +} +`, r.complete(data), data.RandomInteger, data.Locations.Secondary) +} + +func (MsSqlDatabaseResource) createRestoreMode(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[1]d" + server_id = azurerm_mssql_server.test.id +} + +resource "azurerm_mssql_database" "copy" { + name = "acctest-dbc-%[1]d" + server_id = azurerm_mssql_server.test.id + create_mode = "Copy" + creation_source_database_id = azurerm_mssql_database.test.id +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (MsSqlDatabaseResource) createRestoreModeDBDeleted(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[1]d" + server_id = azurerm_mssql_server.test.id +} + +`, data.RandomInteger, data.Locations.Primary) +} + +func (MsSqlDatabaseResource) createRestoreModeDBRestored(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[1]d" + server_id = azurerm_mssql_server.test.id +} + +resource "azurerm_mssql_database" "restore" { + name = "acctest-dbr-%[1]d" + server_id = azurerm_mssql_server.test.id + create_mode = "Restore" + restore_dropped_database_id = azurerm_mssql_server.test.restorable_dropped_database_ids[0] +} + +`, data.RandomInteger, data.Locations.Primary) +} + +func (r MsSqlDatabaseResource) threatDetectionPolicy(data acceptance.TestData, state string) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "test%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + collation = "SQL_AltDiction_CP850_CI_AI" + license_type = "BasePrice" + max_size_gb = 1 + sample_name = "AdventureWorksLT" + sku_name = "GP_Gen5_2" + + threat_detection_policy { + retention_days = 15 + state = "%[3]s" + disabled_alerts = ["Sql_Injection"] + email_account_admins = "Enabled" + storage_account_access_key = azurerm_storage_account.test.primary_access_key + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + use_server_default = "Disabled" + } + + tags = { + ENV = "Test" + } +} +`, r.template(data), data.RandomInteger, state) +} + +func (r MsSqlDatabaseResource) withBlobAuditingPolices(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + extended_auditing_policy { + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + storage_account_access_key_is_secondary = true + retention_in_days = 6 + } +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) withBlobAuditingPolicesUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + extended_auditing_policy { + storage_endpoint = azurerm_storage_account.test2.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test2.primary_access_key + storage_account_access_key_is_secondary = false + retention_in_days = 3 + } +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) withBlobAuditingPolicesDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + extended_auditing_policy = [] +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) updateSku(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + sku_name = "HS_Gen5_2" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) updateSku2(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + sku_name = "HS_Gen5_4" +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) minCapacity0(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_sql_server.test.id + + min_capacity = 0 +} +`, r.template(data), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) withLongTermRetentionPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + long_term_retention_policy { + weekly_retention = "P1W" + monthly_retention = "P1M" + yearly_retention = "P1Y" + week_of_year = 1 + } +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) withLongTermRetentionPolicyUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + long_term_retention_policy { + weekly_retention = "P1W" + yearly_retention = "P1Y" + week_of_year = 2 + } +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) withShortTermRetentionPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + short_term_retention_policy { + retention_days = 8 + } +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} + +func (r MsSqlDatabaseResource) withShortTermRetentionPolicyUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test" { + name = "acctest%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_account" "test2" { + name = "acctest2%[2]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[3]d" + server_id = azurerm_sql_server.test.id + short_term_retention_policy { + retention_days = 10 + } +} +`, r.template(data), data.RandomIntOfLength(15), data.RandomInteger) +} diff --git a/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource.go b/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource.go index 8e09873a1179..9e47426d01b8 100644 --- a/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource.go +++ b/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource.go @@ -8,21 +8,26 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaseline() *schema.Resource { +func resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaseline() *schema.Resource { return &schema.Resource{ - Create: resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate, - Read: resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineRead, - Update: resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate, - Delete: resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate, + Read: resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineRead, + Update: resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate, + Delete: resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DatabaseVulnerabilityAssessmentRuleBaselineID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -83,7 +88,7 @@ func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaseline() *schema.Resou } } -func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabaseVulnerabilityAssessmentRuleBaselinesClient vulnerabilityClient := meta.(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) @@ -119,28 +124,24 @@ func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineCreateUpdate(d * d.SetId(*result.ID) - return resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineRead(d, meta) + return resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineRead(d, meta) } -func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabaseVulnerabilityAssessmentRuleBaselinesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] Reading Azure ARM Vulnerability Assessment Rule Baselines.") - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseVulnerabilityAssessmentRuleBaselineID(d.Id()) if err != nil { return err } - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - databaseName := id.Path["databases"] - ruleId := id.Path["rules"] - baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(id.Path["baselines"]) + baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(id.BaselineName) - result, err := client.Get(ctx, resourceGroupName, serverName, databaseName, ruleId, baselineName) + result, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName, id.RuleName, baselineName) if err != nil { if utils.ResponseWasNotFound(result.Response) { log.Printf("[WARN] Vulnerability Assessment Rule Baseline %s not found", id) @@ -152,14 +153,14 @@ func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineRead(d *schema.R } vulnerabilityAssessmentClient := meta.(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient - vulnerabilityAssessment, err := vulnerabilityAssessmentClient.Get(ctx, resourceGroupName, serverName) + vulnerabilityAssessment, err := vulnerabilityAssessmentClient.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil { return fmt.Errorf("Error retrieving Server Vulnerability Assessment Settings: %+v", err) } d.Set("server_vulnerability_assessment_id", vulnerabilityAssessment.ID) - d.Set("database_name", databaseName) - d.Set("rule_id", ruleId) + d.Set("database_name", id.DatabaseName) + d.Set("rule_id", id.RuleName) d.Set("baseline_name", baselineName) if baselineResults := result.BaselineResults; baselineResults != nil { @@ -169,25 +170,21 @@ func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineRead(d *schema.R return nil } -func resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaselineDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaselineDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.DatabaseVulnerabilityAssessmentRuleBaselinesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] Deleting Azure ARM Vulnerability Assessment Rule Baselines.") - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseVulnerabilityAssessmentRuleBaselineID(d.Id()) if err != nil { return err } - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - databaseName := id.Path["databases"] - ruleId := id.Path["rules"] - baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(id.Path["baselines"]) + baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(id.BaselineName) - result, err := client.Delete(ctx, resourceGroupName, serverName, databaseName, ruleId, baselineName) + result, err := client.Delete(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName, id.RuleName, baselineName) if err != nil { if utils.ResponseWasNotFound(result) { log.Printf("[DEBUG] Vulnerability Assessment Rule Baseline %s not found", id) diff --git a/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource_test.go b/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource_test.go new file mode 100644 index 000000000000..463d095fe3e3 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_database_vulnerability_assessment_rule_baseline_resource_test.go @@ -0,0 +1,218 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource struct{} + +func TestAccMsSqlDatabaseVulnerabilityAssessmentRuleBaseline_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_vulnerability_assessment_rule_baseline", "test") + r := MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlDatabaseVulnerabilityAssessmentRuleBaseline_primary(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database_vulnerability_assessment_rule_baseline", "test") + r := MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.primary(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabaseVulnerabilityAssessmentRuleBaselineID(state.ID) + if err != nil { + return nil, err + } + + baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(id.BaselineName) + resp, err := client.MSSQL.DatabaseVulnerabilityAssessmentRuleBaselinesClient.Get(ctx, id.ResourceGroup, id.ServerName, id.DatabaseName, id.RuleName, baselineName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("Baseline %q not found (Resource Group %q, Server %q, Database %q, Rule %q)", + id.BaselineName, id.ResourceGroup, id.ServerName, id.DatabaseName, id.RuleName) + } + return nil, fmt.Errorf("reading Baseline %q (Resource Group %q, Server %q, Database %q, Rule %q): %v", + id.BaselineName, id.ResourceGroup, id.ServerName, id.DatabaseName, id.RuleName, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database_vulnerability_assessment_rule_baseline" "test" { + server_vulnerability_assessment_id = azurerm_mssql_server_vulnerability_assessment.test.id + + database_name = azurerm_sql_database.test.name + rule_id = "VA2111" + baseline_name = "default" + + baseline_result { + result = [ + "SCHEMA", + "dbo", + "CONTROL", + "SQL_USER", + "adminuser1" + ] + } + + baseline_result { + result = [ + "SCHEMA", + "dbo", + "CONTROL", + "SQL_USER", + "adminuser2" + ] + } +} +`, r.database(data)) +} + +func (r MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource) primary(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database_vulnerability_assessment_rule_baseline" "test" { + server_vulnerability_assessment_id = azurerm_mssql_server_vulnerability_assessment.test.id + + database_name = azurerm_sql_database.test.name + rule_id = "VA2065" + baseline_name = "master" + + baseline_result { + result = [ + "clientip", + "255.255.255.255", + "255.255.255.255" + ] + } +} +`, r.database(data)) +} + +func (r MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database_vulnerability_assessment_rule_baseline" "test" { + server_vulnerability_assessment_id = azurerm_mssql_server_vulnerability_assessment.test.id + + database_name = azurerm_sql_database.test.name + rule_id = "VA2065" + baseline_name = "master" + + baseline_result { + result = [ + "clientips", + "255.255.255.255", + "255.255.255.255" + ] + } + + baseline_result { + result = [ + "myip", + "255.255.255.0", + "255.255.255.0" + ] + } +} +`, r.database(data)) +} + +func (MsSqlDatabaseVulnerabilityAssessmentRuleBaselineResource) database(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + +resource "azurerm_storage_account" "test" { + name = "accsa%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = "%[2]s" + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctestsc%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_mssql_server_security_alert_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + state = "Enabled" +} + +resource "azurerm_mssql_server_vulnerability_assessment" "test" { + server_security_alert_policy_id = azurerm_mssql_server_security_alert_policy.test.id + storage_container_path = "${azurerm_storage_account.test.primary_blob_endpoint}${azurerm_storage_container.test.name}/" + storage_account_access_key = azurerm_storage_account.test.primary_access_key +} + +resource "azurerm_sql_database" "test" { + name = "acctestdb%[1]d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + location = azurerm_resource_group.test.location + edition = "Standard" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/mssql/mssql_elasticpool_data_source.go b/azurerm/internal/services/mssql/mssql_elasticpool_data_source.go index 8fdfb3eeeed9..6f9a62765aa6 100644 --- a/azurerm/internal/services/mssql/mssql_elasticpool_data_source.go +++ b/azurerm/internal/services/mssql/mssql_elasticpool_data_source.go @@ -5,6 +5,7 @@ import ( "time" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" @@ -12,9 +13,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMsSqlElasticpool() *schema.Resource { +func dataSourceMsSqlElasticpool() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMsSqlElasticpoolRead, + Read: dataSourceMsSqlElasticpoolRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -70,7 +71,7 @@ func dataSourceArmMsSqlElasticpool() *schema.Resource { } } -func dataSourceArmMsSqlElasticpoolRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMsSqlElasticpoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ElasticPoolsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mssql/mssql_elasticpool_data_source_test.go b/azurerm/internal/services/mssql/mssql_elasticpool_data_source_test.go new file mode 100644 index 000000000000..9965adbca5c9 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_elasticpool_data_source_test.go @@ -0,0 +1,84 @@ +package mssql_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MsSqlElasticPoolDataSource struct{} + +func TestAccDataSourceMsSqlElasticPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mssql_elasticpool", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: MsSqlElasticPoolDataSource{}.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("server_name").Exists(), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("license_type").HasValue("LicenseIncluded"), + check.That(data.ResourceName).Key("max_size_gb").HasValue("50"), + check.That(data.ResourceName).Key("per_db_min_capacity").HasValue("0"), + check.That(data.ResourceName).Key("per_db_max_capacity").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("zone_redundant").HasValue("false"), + ), + }, + }) +} + +func (MsSqlElasticPoolDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctest%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "4dm1n157r470r" + administrator_login_password = "4-v3ry-53cr37-p455w0rd" +} + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-dtu-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_gb = 50 + zone_redundant = false + + sku { + name = "GP_Gen5" + tier = "GeneralPurpose" + capacity = 4 + family = "Gen5" + } + + per_database_settings { + min_capacity = 0 + max_capacity = 4 + } +} + +data "azurerm_mssql_elasticpool" "test" { + name = azurerm_mssql_elasticpool.test.name + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/mssql/mssql_elasticpool_resource.go b/azurerm/internal/services/mssql/mssql_elasticpool_resource.go index 024d4599540d..6b23ea24ef6c 100644 --- a/azurerm/internal/services/mssql/mssql_elasticpool_resource.go +++ b/azurerm/internal/services/mssql/mssql_elasticpool_resource.go @@ -9,25 +9,29 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMsSqlElasticPool() *schema.Resource { +func resourceMsSqlElasticPool() *schema.Resource { return &schema.Resource{ - Create: resourceArmMsSqlElasticPoolCreateUpdate, - Read: resourceArmMsSqlElasticPoolRead, - Update: resourceArmMsSqlElasticPoolCreateUpdate, - Delete: resourceArmMsSqlElasticPoolDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceMsSqlElasticPoolCreateUpdate, + Read: resourceMsSqlElasticPoolRead, + Update: resourceMsSqlElasticPoolCreateUpdate, + Delete: resourceMsSqlElasticPoolDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ElasticPoolID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -173,7 +177,7 @@ func resourceArmMsSqlElasticPool() *schema.Resource { } } -func resourceArmMsSqlElasticPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlElasticPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ElasticPoolsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -198,7 +202,7 @@ func resourceArmMsSqlElasticPoolCreateUpdate(d *schema.ResourceData, meta interf } location := azure.NormalizeLocation(d.Get("location").(string)) - sku := expandAzureRmMsSqlElasticPoolSku(d) + sku := expandMsSqlElasticPoolSku(d) t := d.Get("tags").(map[string]interface{}) elasticPool := sql.ElasticPool{ @@ -208,7 +212,7 @@ func resourceArmMsSqlElasticPoolCreateUpdate(d *schema.ResourceData, meta interf Tags: tags.Expand(t), ElasticPoolProperties: &sql.ElasticPoolProperties{ LicenseType: sql.ElasticPoolLicenseType(d.Get("license_type").(string)), - PerDatabaseSettings: expandAzureRmMsSqlElasticPoolPerDatabaseSettings(d), + PerDatabaseSettings: expandMsSqlElasticPoolPerDatabaseSettings(d), ZoneRedundant: utils.Bool(d.Get("zone_redundant").(bool)), }, } @@ -241,20 +245,20 @@ func resourceArmMsSqlElasticPoolCreateUpdate(d *schema.ResourceData, meta interf d.SetId(*read.ID) - return resourceArmMsSqlElasticPoolRead(d, meta) + return resourceMsSqlElasticPoolRead(d, meta) } -func resourceArmMsSqlElasticPoolRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlElasticPoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ElasticPoolsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - elasticPool, err := parse.MSSqlElasticPoolID(d.Id()) + elasticPool, err := parse.ElasticPoolID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, elasticPool.ResourceGroup, elasticPool.MsSqlServer, elasticPool.Name) + resp, err := client.Get(ctx, elasticPool.ResourceGroup, elasticPool.ServerName, elasticPool.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") @@ -270,9 +274,9 @@ func resourceArmMsSqlElasticPoolRead(d *schema.ResourceData, meta interface{}) e d.Set("location", azure.NormalizeLocation(*location)) } - d.Set("server_name", elasticPool.MsSqlServer) + d.Set("server_name", elasticPool.ServerName) - if err := d.Set("sku", flattenAzureRmMsSqlElasticPoolSku(resp.Sku)); err != nil { + if err := d.Set("sku", flattenMsSqlElasticPoolSku(resp.Sku)); err != nil { return fmt.Errorf("Error setting `sku`: %+v", err) } @@ -288,7 +292,7 @@ func resourceArmMsSqlElasticPoolRead(d *schema.ResourceData, meta interface{}) e d.Set("zone_redundant", properties.ZoneRedundant) d.Set("license_type", string(properties.LicenseType)) - if err := d.Set("per_database_settings", flattenAzureRmMsSqlElasticPoolPerDatabaseSettings(properties.PerDatabaseSettings)); err != nil { + if err := d.Set("per_database_settings", flattenMsSqlElasticPoolPerDatabaseSettings(properties.PerDatabaseSettings)); err != nil { return fmt.Errorf("Error setting `per_database_settings`: %+v", err) } } @@ -296,28 +300,28 @@ func resourceArmMsSqlElasticPoolRead(d *schema.ResourceData, meta interface{}) e return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMsSqlElasticPoolDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlElasticPoolDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ElasticPoolsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - elasticPool, err := parse.MSSqlElasticPoolID(d.Id()) + elasticPool, err := parse.ElasticPoolID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, elasticPool.ResourceGroup, elasticPool.MsSqlServer, elasticPool.Name) + future, err := client.Delete(ctx, elasticPool.ResourceGroup, elasticPool.ServerName, elasticPool.Name) if err != nil { - return fmt.Errorf("deleting ElasticPool %q (Server %q / Resource Group %q): %+v", elasticPool.Name, elasticPool.MsSqlServer, elasticPool.ResourceGroup, err) + return fmt.Errorf("deleting ElasticPool %q (Server %q / Resource Group %q): %+v", elasticPool.Name, elasticPool.ServerName, elasticPool.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of ElasticPool %q (Server %q / Resource Group %q): %+v", elasticPool.Name, elasticPool.MsSqlServer, elasticPool.ResourceGroup, err) + return fmt.Errorf("waiting for deletion of ElasticPool %q (Server %q / Resource Group %q): %+v", elasticPool.Name, elasticPool.ServerName, elasticPool.ResourceGroup, err) } return nil } -func expandAzureRmMsSqlElasticPoolPerDatabaseSettings(d *schema.ResourceData) *sql.ElasticPoolPerDatabaseSettings { +func expandMsSqlElasticPoolPerDatabaseSettings(d *schema.ResourceData) *sql.ElasticPoolPerDatabaseSettings { perDatabaseSettings := d.Get("per_database_settings").([]interface{}) perDatabaseSetting := perDatabaseSettings[0].(map[string]interface{}) @@ -330,7 +334,7 @@ func expandAzureRmMsSqlElasticPoolPerDatabaseSettings(d *schema.ResourceData) *s } } -func expandAzureRmMsSqlElasticPoolSku(d *schema.ResourceData) *sql.Sku { +func expandMsSqlElasticPoolSku(d *schema.ResourceData) *sql.Sku { skus := d.Get("sku").([]interface{}) sku := skus[0].(map[string]interface{}) @@ -347,7 +351,7 @@ func expandAzureRmMsSqlElasticPoolSku(d *schema.ResourceData) *sql.Sku { } } -func flattenAzureRmMsSqlElasticPoolSku(input *sql.Sku) []interface{} { +func flattenMsSqlElasticPoolSku(input *sql.Sku) []interface{} { if input == nil { return []interface{}{} } @@ -373,7 +377,7 @@ func flattenAzureRmMsSqlElasticPoolSku(input *sql.Sku) []interface{} { return []interface{}{values} } -func flattenAzureRmMsSqlElasticPoolPerDatabaseSettings(resp *sql.ElasticPoolPerDatabaseSettings) []interface{} { +func flattenMsSqlElasticPoolPerDatabaseSettings(resp *sql.ElasticPoolPerDatabaseSettings) []interface{} { perDatabaseSettings := map[string]interface{}{} if minCapacity := resp.MinCapacity; minCapacity != nil { diff --git a/azurerm/internal/services/mssql/mssql_elasticpool_resource_test.go b/azurerm/internal/services/mssql/mssql_elasticpool_resource_test.go new file mode 100644 index 000000000000..8f26d624b9fd --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_elasticpool_resource_test.go @@ -0,0 +1,419 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/sql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlElasticPoolResource struct{} + +func TestAccMsSqlElasticPool_basicDTU(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDTU(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDTU(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMsSqlElasticPool_standardDTU(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standardDTU(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_premiumDTUZoneRedundant(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.premiumDTUZoneRedundant(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "PremiumPool"), + resource.TestCheckResourceAttr(data.ResourceName, "zone_redundant", "true"), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_basicVCore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicVCore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_basicVCoreMaxSizeBytes(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicVCoreMaxSizeBytes(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_resizeDTU(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standardDTU(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + { + Config: r.resizeDTU(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_resizeVCore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicVCore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + { + Config: r.resizeVCore(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + +func TestAccMsSqlElasticPool_licenseType(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") + r := MsSqlElasticPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.licenseType(data, "LicenseIncluded"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(data.ResourceName, "license_type", "LicenseIncluded"), + ), + }, + data.ImportStep(), + { + Config: r.licenseType(data, "BasePrice"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestCheckResourceAttr(data.ResourceName, "license_type", "BasePrice"), + ), + }, + data.ImportStep(), + }) +} + +func (MsSqlElasticPoolResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ElasticPoolID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.ElasticPoolsClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Elastic Pool %q (Server %q, Resource Group %q) does not exist", id.Name, id.ServerName, id.ResourceGroup) + } + return nil, fmt.Errorf("reading SQL Elastic Pool %q (Server %q, Resource Group %q): %v", id.Name, id.ServerName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r MsSqlElasticPoolResource) basicDTU(data acceptance.TestData) string { + return r.templateDTU(data, "BasicPool", "Basic", 50, 4.8828125, 0, 5, false) +} + +func (r MsSqlElasticPoolResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mssql_elasticpool" "import" { + name = azurerm_mssql_elasticpool.test.name + resource_group_name = azurerm_mssql_elasticpool.test.resource_group_name + location = azurerm_mssql_elasticpool.test.location + server_name = azurerm_mssql_elasticpool.test.server_name + max_size_gb = 4.8828125 + + sku { + name = "BasicPool" + tier = "Basic" + capacity = 50 + } + + per_database_settings { + min_capacity = 0 + max_capacity = 5 + } +} +`, r.templateDTU(data, "BasicPool", "Basic", 50, 4.8828125, 0, 5, false)) +} + +func (r MsSqlElasticPoolResource) premiumDTUZoneRedundant(data acceptance.TestData) string { + return r.templateDTU(data, "PremiumPool", "Premium", 125, 50, 0, 50, true) +} + +func (r MsSqlElasticPoolResource) standardDTU(data acceptance.TestData) string { + return r.templateDTU(data, "StandardPool", "Standard", 50, 50, 0, 50, false) +} + +func (r MsSqlElasticPoolResource) resizeDTU(data acceptance.TestData) string { + return r.templateDTU(data, "StandardPool", "Standard", 100, 100, 50, 100, false) +} + +func (r MsSqlElasticPoolResource) basicVCore(data acceptance.TestData) string { + return r.templateVCore(data, "GP_Gen5", "GeneralPurpose", 4, "Gen5", 0.25, 4) +} + +func (r MsSqlElasticPoolResource) basicVCoreMaxSizeBytes(data acceptance.TestData) string { + return r.templateVCoreMaxSizeBytes(data, "GP_Gen5", "GeneralPurpose", 4, "Gen5", 0.25, 4) +} + +func (r MsSqlElasticPoolResource) resizeVCore(data acceptance.TestData) string { + return r.templateVCore(data, "GP_Gen5", "GeneralPurpose", 8, "Gen5", 0, 8) +} + +func (MsSqlElasticPoolResource) templateDTU(data acceptance.TestData, skuName string, skuTier string, skuCapacity int, maxSizeGB float64, databaseSettingsMin int, databaseSettingsMax int, zoneRedundant bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctest%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "4dm1n157r470r" + administrator_login_password = "4-v3ry-53cr37-p455w0rd" +} + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-dtu-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_gb = %.7[6]f + zone_redundant = %[9]t + + sku { + name = "%[3]s" + tier = "%[4]s" + capacity = %[5]d + } + + per_database_settings { + min_capacity = %[7]d + max_capacity = %[8]d + } +} +`, data.RandomInteger, data.Locations.Primary, skuName, skuTier, skuCapacity, maxSizeGB, databaseSettingsMin, databaseSettingsMax, zoneRedundant) +} + +func (MsSqlElasticPoolResource) templateVCore(data acceptance.TestData, skuName string, skuTier string, skuCapacity int, skuFamily string, databaseSettingsMin float64, databaseSettingsMax float64) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctest%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "4dm1n157r470r" + administrator_login_password = "4-v3ry-53cr37-p455w0rd" +} + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-vcore-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_gb = 5 + + sku { + name = "%[3]s" + tier = "%[4]s" + capacity = %[5]d + family = "%[6]s" + } + + per_database_settings { + min_capacity = %.2[7]f + max_capacity = %.2[8]f + } +} +`, data.RandomInteger, data.Locations.Primary, skuName, skuTier, skuCapacity, skuFamily, databaseSettingsMin, databaseSettingsMax) +} + +func (MsSqlElasticPoolResource) templateVCoreMaxSizeBytes(data acceptance.TestData, skuName string, skuTier string, skuCapacity int, skuFamily string, databaseSettingsMin float64, databaseSettingsMax float64) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctest%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "4dm1n157r470r" + administrator_login_password = "4-v3ry-53cr37-p455w0rd" +} + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-vcore-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_bytes = 214748364800 + + sku { + name = "%[3]s" + tier = "%[4]s" + capacity = %[5]d + family = "%[6]s" + } + + per_database_settings { + min_capacity = %.2[7]f + max_capacity = %.2[8]f + } +} +`, data.RandomInteger, data.Locations.Primary, skuName, skuTier, skuCapacity, skuFamily, databaseSettingsMin, databaseSettingsMax) +} + +func (MsSqlElasticPoolResource) licenseType(data acceptance.TestData, licenseType string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctest%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "4dm1n157r470r" + administrator_login_password = "4-v3ry-53cr37-p455w0rd" +} + +resource "azurerm_mssql_elasticpool" "test" { + name = "acctest-pool-dtu-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + server_name = azurerm_sql_server.test.name + max_size_gb = 50 + zone_redundant = false + license_type = "%[3]s" + + sku { + name = "GP_Gen5" + tier = "GeneralPurpose" + capacity = 4 + family = "Gen5" + } + + per_database_settings { + min_capacity = 0 + max_capacity = 4 + } + +} +`, data.RandomInteger, data.Locations.Primary, licenseType) +} diff --git a/azurerm/internal/services/mssql/mssql_server_data_source.go b/azurerm/internal/services/mssql/mssql_server_data_source.go index 46678e0273ab..23b90d8895ae 100644 --- a/azurerm/internal/services/mssql/mssql_server_data_source.go +++ b/azurerm/internal/services/mssql/mssql_server_data_source.go @@ -5,6 +5,7 @@ import ( "time" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" @@ -15,7 +16,7 @@ import ( func dataSourceMsSqlServer() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMsSqlServerRead, + Read: dataSourceMsSqlServerRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -80,7 +81,7 @@ func dataSourceMsSqlServer() *schema.Resource { } } -func dataSourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMsSqlServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServersClient restorableDroppedDatabasesClient := meta.(*clients.Client).MSSQL.RestorableDroppedDatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -110,7 +111,7 @@ func dataSourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) erro d.Set("fully_qualified_domain_name", props.FullyQualifiedDomainName) } - if err := d.Set("identity", flattenAzureRmSqlServerIdentity(resp.Identity)); err != nil { + if err := d.Set("identity", flattenSqlServerIdentity(resp.Identity)); err != nil { return fmt.Errorf("setting `identity`: %+v", err) } @@ -118,7 +119,7 @@ func dataSourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) erro if err != nil { return fmt.Errorf("listing SQL Server %s Restorable Dropped Databases: %v", name, err) } - if err := d.Set("restorable_dropped_database_ids", flattenAzureRmSqlServerRestorableDatabases(restorableResp)); err != nil { + if err := d.Set("restorable_dropped_database_ids", flattenSqlServerRestorableDatabases(restorableResp)); err != nil { return fmt.Errorf("setting `restorable_dropped_database_ids`: %+v", err) } diff --git a/azurerm/internal/services/mssql/mssql_server_data_source_test.go b/azurerm/internal/services/mssql/mssql_server_data_source_test.go new file mode 100644 index 000000000000..ddc7c7016455 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_server_data_source_test.go @@ -0,0 +1,69 @@ +package mssql_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MsSqlServerDataSource struct{} + +func TestAccDataSourceMsSqlServer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mssql_server", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: MsSqlServerDataSource{}.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestsqlserver%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + ), + }, + }) +} + +func TestAccDataSourceMsSqlServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mssql_server", "test") + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: MsSqlServerDataSource{}.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(fmt.Sprintf("acctestsqlserver%d", data.RandomInteger)), + check.That(data.ResourceName).Key("location").HasValue(data.Locations.Primary), + check.That(data.ResourceName).Key("version").Exists(), + check.That(data.ResourceName).Key("administrator_login").Exists(), + check.That(data.ResourceName).Key("fully_qualified_domain_name").Exists(), + check.That(data.ResourceName).Key("tags.%").Exists(), + ), + }, + }) +} + +func (MsSqlServerDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +data "azurerm_mssql_server" "test" { + name = azurerm_mssql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} + +`, MsSqlServerResource{}.basic(data)) +} + +func (MsSqlServerDataSource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +data "azurerm_mssql_server" "test" { + name = azurerm_mssql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} + +`, MsSqlServerResource{}.complete(data)) +} diff --git a/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource.go b/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource.go index a44eb71b8cbc..66af30b192e7 100644 --- a/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource.go +++ b/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource.go @@ -8,6 +8,7 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" @@ -17,15 +18,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMsSqlServerExtendedAuditingPolicy() *schema.Resource { +func resourceMsSqlServerExtendedAuditingPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmMsSqlServerExtendedAuditingPolicyCreateUpdate, - Read: resourceArmMsSqlServerExtendedAuditingPolicyRead, - Update: resourceArmMsSqlServerExtendedAuditingPolicyCreateUpdate, - Delete: resourceArmMsSqlServerExtendedAuditingPolicyDelete, + Create: resourceMsSqlServerExtendedAuditingPolicyCreateUpdate, + Read: resourceMsSqlServerExtendedAuditingPolicyRead, + Update: resourceMsSqlServerExtendedAuditingPolicyCreateUpdate, + Delete: resourceMsSqlServerExtendedAuditingPolicyDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MssqlServerExtendedAuditingPolicyID(id) + _, err := parse.ServerExtendedAuditingPolicyID(id) return err }), @@ -41,7 +42,7 @@ func resourceArmMsSqlServerExtendedAuditingPolicy() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MsSqlServerID, + ValidateFunc: validate.ServerID, }, "storage_endpoint": { @@ -73,14 +74,14 @@ func resourceArmMsSqlServerExtendedAuditingPolicy() *schema.Resource { } } -func resourceArmMsSqlServerExtendedAuditingPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerExtendedAuditingPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] preparing arguments for MsSql Server Extended Auditing Policy creation.") - serverId, err := parse.MsSqlServerID(d.Get("server_id").(string)) + serverId, err := parse.ServerID(d.Get("server_id").(string)) if err != nil { return err } @@ -136,32 +137,32 @@ func resourceArmMsSqlServerExtendedAuditingPolicyCreateUpdate(d *schema.Resource d.SetId(*read.ID) - return resourceArmMsSqlServerExtendedAuditingPolicyRead(d, meta) + return resourceMsSqlServerExtendedAuditingPolicyRead(d, meta) } -func resourceArmMsSqlServerExtendedAuditingPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerExtendedAuditingPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient serverClient := meta.(*clients.Client).MSSQL.ServersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MssqlServerExtendedAuditingPolicyID(d.Id()) + id, err := parse.ServerExtendedAuditingPolicyID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("reading MsSql Server %s Extended Auditing Policy (Resource Group %q): %s", id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("reading MsSql Server %s Extended Auditing Policy (Resource Group %q): %s", id.ServerName, id.ResourceGroup, err) } - serverResp, err := serverClient.Get(ctx, id.ResourceGroup, id.MsSqlServer) + serverResp, err := serverClient.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil || serverResp.ID == nil || *serverResp.ID == "" { - return fmt.Errorf("reading MsSql Server %q ID is empty or nil(Resource Group %q): %s", id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("reading MsSql Server %q ID is empty or nil(Resource Group %q): %s", id.ServerName, id.ResourceGroup, err) } d.Set("server_id", serverResp.ID) @@ -175,12 +176,12 @@ func resourceArmMsSqlServerExtendedAuditingPolicyRead(d *schema.ResourceData, me return nil } -func resourceArmMsSqlServerExtendedAuditingPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerExtendedAuditingPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MssqlServerExtendedAuditingPolicyID(d.Id()) + id, err := parse.ServerExtendedAuditingPolicyID(d.Id()) if err != nil { return err } @@ -195,13 +196,13 @@ func resourceArmMsSqlServerExtendedAuditingPolicyDelete(d *schema.ResourceData, }, } - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.MsSqlServer, params) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.ServerName, params) if err != nil { - return fmt.Errorf("deleting MsSql Server %q Extended Auditing Policy(Resource Group %q): %+v", id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("deleting MsSql Server %q Extended Auditing Policy(Resource Group %q): %+v", id.ServerName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of MsSql Server %q Extended Auditing Policy (Resource Group %q): %+v", id.MsSqlServer, id.ResourceGroup, err) + return fmt.Errorf("waiting for deletion of MsSql Server %q Extended Auditing Policy (Resource Group %q): %+v", id.ServerName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource_test.go b/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource_test.go new file mode 100644 index 000000000000..2390841f6018 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_server_extended_auditing_policy_resource_test.go @@ -0,0 +1,285 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlServerExtendedAuditingPolicyResource struct{} + +func TestAccMsSqlServerExtendedAuditingPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") + r := MsSqlServerExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlServerExtendedAuditingPolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") + r := MsSqlServerExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMsSqlServerExtendedAuditingPolicy_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") + r := MsSqlServerExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlServerExtendedAuditingPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") + r := MsSqlServerExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlServerExtendedAuditingPolicy_storageAccBehindFireWall(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") + r := MsSqlServerExtendedAuditingPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.storageAccountBehindFireWall(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func (MsSqlServerExtendedAuditingPolicyResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerExtendedAuditingPolicyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.ServerExtendedBlobAuditingPoliciesClient.Get(ctx, id.ResourceGroup, id.ServerName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Server Extended Auditing Policy for server %q (Resource Group %q) does not exist", id.ServerName, id.ResourceGroup) + } + + return nil, fmt.Errorf("reading SQL Server Extended Auditing Policy for server %q (Resource Group %q): %v", id.ServerName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MsSqlServerExtendedAuditingPolicyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "AdminPassword123!" +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r MsSqlServerExtendedAuditingPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_extended_auditing_policy" "test" { + server_id = azurerm_mssql_server.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key +} +`, r.template(data)) +} + +func (r MsSqlServerExtendedAuditingPolicyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_extended_auditing_policy" "import" { + server_id = azurerm_mssql_server.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key +} +`, r.template(data)) +} + +func (r MsSqlServerExtendedAuditingPolicyResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_extended_auditing_policy" "test" { + server_id = azurerm_mssql_server.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + storage_account_access_key_is_secondary = false + retention_in_days = 6 +} +`, r.template(data)) +} + +func (r MsSqlServerExtendedAuditingPolicyResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_storage_account" "test2" { + name = "unlikely23exst2acc2%[2]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_server_extended_auditing_policy" "test" { + server_id = azurerm_mssql_server.test.id + storage_endpoint = azurerm_storage_account.test2.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test2.primary_access_key + storage_account_access_key_is_secondary = true + retention_in_days = 3 +} +`, r.template(data), data.RandomString) +} + +func (MsSqlServerExtendedAuditingPolicyResource) storageAccountBehindFireWall(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctest-sqlserver-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "AdminPassword123!" + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%[1]d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + service_endpoints = ["Microsoft.Storage"] +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + network_rules { + default_action = "Deny" + ip_rules = ["127.0.0.1"] + virtual_network_subnet_ids = [azurerm_subnet.test.id] + } +} + +resource "azurerm_role_assignment" "test" { + scope = azurerm_storage_account.test.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_mssql_server.test.identity.0.principal_id +} + +resource "azurerm_mssql_server_extended_auditing_policy" "test" { + server_id = azurerm_mssql_server.test.id + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + + depends_on = [ + azurerm_role_assignment.test, + ] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} diff --git a/azurerm/internal/services/mssql/mssql_server_resource.go b/azurerm/internal/services/mssql/mssql_server_resource.go index e928ee9f0a94..d314f758d267 100644 --- a/azurerm/internal/services/mssql/mssql_server_resource.go +++ b/azurerm/internal/services/mssql/mssql_server_resource.go @@ -10,25 +10,29 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" uuid "github.com/satori/go.uuid" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/helper" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMsSqlServer() *schema.Resource { +func resourceMsSqlServer() *schema.Resource { return &schema.Resource{ - Create: resourceArmMsSqlServerCreateUpdate, - Read: resourceArmMsSqlServerRead, - Update: resourceArmMsSqlServerCreateUpdate, - Delete: resourceArmMsSqlServerDelete, + Create: resourceMsSqlServerCreateUpdate, + Read: resourceMsSqlServerRead, + Update: resourceMsSqlServerCreateUpdate, + Delete: resourceMsSqlServerDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ServerID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(60 * time.Minute), @@ -169,11 +173,12 @@ func resourceArmMsSqlServer() *schema.Resource { "tags": tags.Schema(), }, - CustomizeDiff: mssqlMinimumTLSVersionDiff, + + CustomizeDiff: msSqlMinimumTLSVersionDiff, } } -func resourceArmMsSqlServerCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServersClient auditingClient := meta.(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient connectionClient := meta.(*clients.Client).MSSQL.ServerConnectionPoliciesClient @@ -214,7 +219,7 @@ func resourceArmMsSqlServerCreateUpdate(d *schema.ResourceData, meta interface{} } if _, ok := d.GetOk("identity"); ok { - sqlServerIdentity := expandAzureRmSqlServerIdentity(d) + sqlServerIdentity := expandSqlServerIdentity(d) props.Identity = sqlServerIdentity } @@ -261,7 +266,7 @@ func resourceArmMsSqlServerCreateUpdate(d *schema.ResourceData, meta interface{} return fmt.Errorf("waiting for SQL Server %q AAD admin (Resource Group %q) to be deleted: %+v", name, resGroup, err) } - if adminParams := expandAzureRmMsSqlServerAdministrator(d.Get("azuread_administrator").([]interface{})); adminParams != nil { + if adminParams := expandMsSqlServerAdministrator(d.Get("azuread_administrator").([]interface{})); adminParams != nil { adminFuture, err := adminClient.CreateOrUpdate(ctx, resGroup, name, *adminParams) if err != nil { return fmt.Errorf("creating SQL Server %q AAD admin (Resource Group %q): %+v", name, resGroup, err) @@ -283,7 +288,7 @@ func resourceArmMsSqlServerCreateUpdate(d *schema.ResourceData, meta interface{} } auditingProps := sql.ExtendedServerBlobAuditingPolicy{ - ExtendedServerBlobAuditingPolicyProperties: helper.ExpandAzureRmSqlServerBlobAuditingPolicies(d.Get("extended_auditing_policy").([]interface{})), + ExtendedServerBlobAuditingPolicyProperties: helper.ExpandSqlServerBlobAuditingPolicies(d.Get("extended_auditing_policy").([]interface{})), } auditingFuture, err := auditingClient.CreateOrUpdate(ctx, resGroup, name, auditingProps) @@ -295,10 +300,10 @@ func resourceArmMsSqlServerCreateUpdate(d *schema.ResourceData, meta interface{} return fmt.Errorf("waiting for creation of SQL Server %q Blob Auditing Policies(Resource Group %q): %+v", name, resGroup, err) } - return resourceArmMsSqlServerRead(d, meta) + return resourceMsSqlServerRead(d, meta) } -func resourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServersClient auditingClient := meta.(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient connectionClient := meta.(*clients.Client).MSSQL.ServerConnectionPoliciesClient @@ -332,7 +337,7 @@ func resourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) error d.Set("location", azure.NormalizeLocation(*location)) } - if err := d.Set("identity", flattenAzureRmSqlServerIdentity(resp.Identity)); err != nil { + if err := d.Set("identity", flattenSqlServerIdentity(resp.Identity)); err != nil { return fmt.Errorf("setting `identity`: %+v", err) } @@ -350,7 +355,7 @@ func resourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) error return fmt.Errorf("reading SQL Server %s AAD admin: %v", name, err) } } else { - if err := d.Set("azuread_administrator", flatternAzureRmMsSqlServerAdministrator(adminResp)); err != nil { + if err := d.Set("azuread_administrator", flatternMsSqlServerAdministrator(adminResp)); err != nil { return fmt.Errorf("setting `azuread_administrator`: %+v", err) } } @@ -369,7 +374,7 @@ func resourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) error return fmt.Errorf("reading SQL Server %s Blob Auditing Policies: %v ", name, err) } - if err := d.Set("extended_auditing_policy", helper.FlattenAzureRmSqlServerBlobAuditingPolicies(&auditingResp, d)); err != nil { + if err := d.Set("extended_auditing_policy", helper.FlattenSqlServerBlobAuditingPolicies(&auditingResp, d)); err != nil { return fmt.Errorf("setting `extended_auditing_policy`: %+v", err) } @@ -377,14 +382,14 @@ func resourceArmMsSqlServerRead(d *schema.ResourceData, meta interface{}) error if err != nil { return fmt.Errorf("listing SQL Server %s Restorable Dropped Databases: %v", name, err) } - if err := d.Set("restorable_dropped_database_ids", flattenAzureRmSqlServerRestorableDatabases(restorableResp)); err != nil { + if err := d.Set("restorable_dropped_database_ids", flattenSqlServerRestorableDatabases(restorableResp)); err != nil { return fmt.Errorf("setting `restorable_dropped_database_ids`: %+v", err) } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMsSqlServerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -405,7 +410,7 @@ func resourceArmMsSqlServerDelete(d *schema.ResourceData, meta interface{}) erro return future.WaitForCompletionRef(ctx, client.Client) } -func expandAzureRmSqlServerIdentity(d *schema.ResourceData) *sql.ResourceIdentity { +func expandSqlServerIdentity(d *schema.ResourceData) *sql.ResourceIdentity { identities := d.Get("identity").([]interface{}) if len(identities) == 0 { return &sql.ResourceIdentity{} @@ -416,7 +421,8 @@ func expandAzureRmSqlServerIdentity(d *schema.ResourceData) *sql.ResourceIdentit Type: identityType, } } -func flattenAzureRmSqlServerIdentity(identity *sql.ResourceIdentity) []interface{} { + +func flattenSqlServerIdentity(identity *sql.ResourceIdentity) []interface{} { if identity == nil { return []interface{}{} } @@ -432,7 +438,7 @@ func flattenAzureRmSqlServerIdentity(identity *sql.ResourceIdentity) []interface return []interface{}{result} } -func expandAzureRmMsSqlServerAdministrator(input []interface{}) *sql.ServerAzureADAdministrator { +func expandMsSqlServerAdministrator(input []interface{}) *sql.ServerAzureADAdministrator { if len(input) == 0 || input[0] == nil { return nil } @@ -456,7 +462,7 @@ func expandAzureRmMsSqlServerAdministrator(input []interface{}) *sql.ServerAzure return &adminParams } -func flatternAzureRmMsSqlServerAdministrator(admin sql.ServerAzureADAdministrator) []interface{} { +func flatternMsSqlServerAdministrator(admin sql.ServerAzureADAdministrator) []interface{} { var login, sid, tid string if admin.Login != nil { login = *admin.Login @@ -479,7 +485,7 @@ func flatternAzureRmMsSqlServerAdministrator(admin sql.ServerAzureADAdministrato } } -func flattenAzureRmSqlServerRestorableDatabases(resp sql.RestorableDroppedDatabaseListResult) []string { +func flattenSqlServerRestorableDatabases(resp sql.RestorableDroppedDatabaseListResult) []string { if resp.Value == nil || len(*resp.Value) == 0 { return []string{} } @@ -494,7 +500,7 @@ func flattenAzureRmSqlServerRestorableDatabases(resp sql.RestorableDroppedDataba return res } -func mssqlMinimumTLSVersionDiff(d *schema.ResourceDiff, _ interface{}) (err error) { +func msSqlMinimumTLSVersionDiff(d *schema.ResourceDiff, _ interface{}) (err error) { old, new := d.GetChange("minimum_tls_version") if old != "" && new == "" { err = fmt.Errorf("`minimum_tls_version` cannot be removed once set, please set a valid value for this property") diff --git a/azurerm/internal/services/mssql/mssql_server_resource_test.go b/azurerm/internal/services/mssql/mssql_server_resource_test.go new file mode 100644 index 000000000000..3f551e7c06a5 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_server_resource_test.go @@ -0,0 +1,608 @@ +package mssql_test + +import ( + "context" + "fmt" + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlServerResource struct{} + +func TestAccMsSqlServer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccMsSqlServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), + }) +} + +func TestAccMsSqlServer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMsSqlServer_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), + { + Config: r.completeUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), + { + Config: r.basicWithMinimumTLSVersion(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccMsSqlServer_identity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.identity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), + }) +} + +func TestAccMsSqlServer_azureadAdmin(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.aadAdmin(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.aadAdminUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccMsSqlServer_blobAuditingPolicies_withFirewall(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), + { + Config: r.blobAuditingPoliciesWithFirewall(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), + }) +} + +func TestAccMsSqlServer_customDiff(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") + r := MsSqlServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithMinimumTLSVersion(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + ExpectError: regexp.MustCompile("`minimum_tls_version` cannot be removed once set, please set a valid value for this property"), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func (MsSqlServerResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.ServersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Server %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) + } + return nil, fmt.Errorf("reading SQL Server %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MsSqlServerResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + extended_auditing_policy = [] +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (MsSqlServerResource) basicWithMinimumTLSVersion(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + minimum_tls_version = "1.2" + extended_auditing_policy = [] +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r MsSqlServerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mssql_server" "import" { + name = azurerm_mssql_server.test.name + resource_group_name = azurerm_mssql_server.test.resource_group_name + location = azurerm_mssql_server.test.location + version = azurerm_mssql_server.test.version + administrator_login = azurerm_mssql_server.test.administrator_login + administrator_login_password = azurerm_mssql_server.test.administrator_login_password +} +`, r.basic(data)) +} + +func (MsSqlServerResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + address_space = ["10.5.0.0/16"] +} + +resource "azurerm_subnet" "service" { + name = "acctestsnetservice-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.5.1.0/24"] + + enforce_private_link_service_network_policies = true +} + +resource "azurerm_subnet" "endpoint" { + name = "acctestsnetendpoint-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.5.2.0/24"] + + enforce_private_link_endpoint_network_policies = true +} + +resource "azurerm_storage_account" "test" { + name = "acctesta%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + minimum_tls_version = "1.2" + + public_network_access_enabled = true + + extended_auditing_policy { + storage_account_access_key = azurerm_storage_account.test.primary_access_key + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key_is_secondary = true + retention_in_days = 6 + } + + tags = { + ENV = "Staging" + database = "NotProd" + } +} + +resource "azurerm_private_dns_zone" "finance" { + name = "privatelink.sql.database.azure.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_endpoint" "test" { + name = "acctest-privatelink-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + subnet_id = azurerm_subnet.endpoint.id + + private_service_connection { + name = "acctest-privatelink-mssc-%[1]d" + private_connection_resource_id = azurerm_mssql_server.test.id + subresource_names = ["sqlServer"] + is_manual_connection = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(15)) +} + +func (MsSqlServerResource) completeUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + address_space = ["10.5.0.0/16"] +} + +resource "azurerm_subnet" "service" { + name = "acctestsnetservice-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.5.1.0/24"] + + enforce_private_link_service_network_policies = true +} + +resource "azurerm_subnet" "endpoint" { + name = "acctestsnetendpoint-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.5.2.0/24"] + + enforce_private_link_endpoint_network_policies = true +} + +resource "azurerm_storage_account" "testb" { + name = "acctestb%[3]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + minimum_tls_version = "1.0" + + public_network_access_enabled = false + + extended_auditing_policy { + storage_account_access_key = azurerm_storage_account.testb.primary_access_key + storage_endpoint = azurerm_storage_account.testb.primary_blob_endpoint + storage_account_access_key_is_secondary = false + retention_in_days = 11 + } + + tags = { + DB = "NotProd" + } +} + +resource "azurerm_private_dns_zone" "finance" { + name = "privatelink.sql.database.azure.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_endpoint" "test" { + name = "acctest-privatelink-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + subnet_id = azurerm_subnet.endpoint.id + + private_service_connection { + name = "acctest-privatelink-mssc-%[1]d" + private_connection_resource_id = azurerm_mssql_server.test.id + subresource_names = ["sqlServer"] + is_manual_connection = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(15)) +} + +func (MsSqlServerResource) identity(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (MsSqlServerResource) aadAdmin(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +data "azuread_service_principal" "test" { + application_id = "%[3]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + + azuread_administrator { + login_username = "AzureAD Admin" + object_id = data.azuread_service_principal.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, os.Getenv("ARM_CLIENT_ID")) +} + +func (MsSqlServerResource) aadAdminUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +data "azuread_service_principal" "test" { + application_id = "%[3]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + + azuread_administrator { + login_username = "AzureAD Admin2" + object_id = data.azuread_service_principal.test.id + } +} +`, data.RandomInteger, data.Locations.Primary, os.Getenv("ARM_CLIENT_ID")) +} + +func (MsSqlServerResource) blobAuditingPoliciesWithFirewall(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%[1]d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + service_endpoints = ["Microsoft.Storage"] +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%[3]s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" + + network_rules { + default_action = "Allow" + ip_rules = ["127.0.0.1"] + virtual_network_subnet_ids = [azurerm_subnet.test.id] + } +} + +data "azuread_service_principal" "test" { + application_id = "%[4]s" +} + +resource "azurerm_mssql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "missadministrator" + administrator_login_password = "thisIsKat11" + + azuread_administrator { + login_username = "AzureAD Admin2" + object_id = data.azuread_service_principal.test.id + } + + extended_auditing_policy { + storage_account_access_key = azurerm_storage_account.test.primary_access_key + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key_is_secondary = true + retention_in_days = 6 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, os.Getenv("ARM_CLIENT_ID")) +} diff --git a/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource.go b/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource.go index 4f2e9fc649fe..7d9de68d250a 100644 --- a/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource.go +++ b/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource.go @@ -8,23 +8,28 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -// todo 3.0 - this may want to be put into the mssql_server resource now that it exists. +// TODO 3.0 - this may want to be put into the mssql_server resource now that it exists. -func resourceArmMssqlServerSecurityAlertPolicy() *schema.Resource { +func resourceMsSqlServerSecurityAlertPolicy() *schema.Resource { return &schema.Resource{ - Create: resourceArmMssqlServerSecurityAlertPolicyCreateUpdate, - Read: resourceArmMssqlServerSecurityAlertPolicyRead, - Update: resourceArmMssqlServerSecurityAlertPolicyCreateUpdate, - Delete: resourceArmMssqlServerSecurityAlertPolicyDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceMsSqlServerSecurityAlertPolicyCreateUpdate, + Read: resourceMsSqlServerSecurityAlertPolicyRead, + Update: resourceMsSqlServerSecurityAlertPolicyCreateUpdate, + Delete: resourceMsSqlServerSecurityAlertPolicyDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ServerSecurityAlertPolicyID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -107,7 +112,7 @@ func resourceArmMssqlServerSecurityAlertPolicy() *schema.Resource { } } -func resourceArmMssqlServerSecurityAlertPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerSecurityAlertPolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -139,25 +144,22 @@ func resourceArmMssqlServerSecurityAlertPolicyCreateUpdate(d *schema.ResourceDat d.SetId(*result.ID) - return resourceArmMssqlServerSecurityAlertPolicyRead(d, meta) + return resourceMsSqlServerSecurityAlertPolicyRead(d, meta) } -func resourceArmMssqlServerSecurityAlertPolicyRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerSecurityAlertPolicyRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] reading mssql server security alert policy") - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ServerSecurityAlertPolicyID(d.Id()) if err != nil { return err } - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - - result, err := client.Get(ctx, resourceGroupName, serverName) + result, err := client.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil { if utils.ResponseWasNotFound(result.Response) { log.Printf("[WARN] mssql server security alert policy %v not found", id) @@ -168,8 +170,8 @@ func resourceArmMssqlServerSecurityAlertPolicyRead(d *schema.ResourceData, meta return fmt.Errorf("error making read request to mssql server security alert policy: %+v", err) } - d.Set("resource_group_name", resourceGroupName) - d.Set("server_name", serverName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("server_name", id.ServerName) if props := result.SecurityAlertPolicyProperties; props != nil { d.Set("state", string(props.State)) @@ -216,37 +218,34 @@ func resourceArmMssqlServerSecurityAlertPolicyRead(d *schema.ResourceData, meta return nil } -func resourceArmMssqlServerSecurityAlertPolicyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerSecurityAlertPolicyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] deleting mssql server security alert policy.") - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ServerSecurityAlertPolicyID(d.Id()) if err != nil { return err } - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - disabledPolicy := sql.ServerSecurityAlertPolicy{ SecurityAlertPolicyProperties: &sql.SecurityAlertPolicyProperties{ State: sql.SecurityAlertPolicyStateDisabled, }, } - future, err := client.CreateOrUpdate(ctx, resourceGroupName, serverName, disabledPolicy) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.ServerName, disabledPolicy) if err != nil { return fmt.Errorf("error updataing mssql server security alert policy: %v", err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("error waiting for creation/update of mssql server security alert policy (server %q, resource group %q): %+v", serverName, resourceGroupName, err) + return fmt.Errorf("error waiting for creation/update of mssql server security alert policy (server %q, resource group %q): %+v", id.ServerName, id.ResourceGroup, err) } - if _, err = client.Get(ctx, resourceGroupName, serverName); err != nil { + if _, err = client.Get(ctx, id.ResourceGroup, id.ServerName); err != nil { return fmt.Errorf("error deleting mssql server security alert policy: %v", err) } diff --git a/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource_test.go b/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource_test.go new file mode 100644 index 000000000000..c350bb842120 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_server_security_alert_policy_resource_test.go @@ -0,0 +1,137 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlServerSecurityAlertPolicyResource struct{} + +func TestAccMsSqlServerSecurityAlertPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_security_alert_policy", "test") + r := MsSqlServerSecurityAlertPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccMsSqlServerSecurityAlertPolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_security_alert_policy", "test") + r := MsSqlServerSecurityAlertPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func (MsSqlServerSecurityAlertPolicyResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerSecurityAlertPolicyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.ServerSecurityAlertPoliciesClient.Get(ctx, id.ResourceGroup, id.ServerName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Security Alert Policy for server %q (Resource Group %q) does not exist", id.ServerName, id.ResourceGroup) + } + return nil, fmt.Errorf("reading SQL Security Alert Policy for server %q (Resource Group %q): %v", id.ServerName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r MsSqlServerSecurityAlertPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_security_alert_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + state = "Enabled" + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + retention_days = 20 + + disabled_alerts = [ + "Sql_Injection", + "Data_Exfiltration" + ] + +} +`, r.server(data)) +} + +func (r MsSqlServerSecurityAlertPolicyResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_security_alert_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + state = "Enabled" + email_account_admins = true + retention_days = 30 +} +`, r.server(data)) +} + +func (MsSqlServerSecurityAlertPolicyResource) server(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + +resource "azurerm_storage_account" "test" { + name = "accsa%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource.go b/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource.go index c6e22a6a04cf..f96e06a896d0 100644 --- a/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource.go +++ b/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource.go @@ -8,21 +8,26 @@ import ( "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMssqlServerVulnerabilityAssessment() *schema.Resource { +func resourceMsSqlServerVulnerabilityAssessment() *schema.Resource { return &schema.Resource{ - Create: resourceArmMssqlServerVulnerabilityAssessmentCreateUpdate, - Read: resourceArmMssqlServerVulnerabilityAssessmentRead, - Update: resourceArmMssqlServerVulnerabilityAssessmentCreateUpdate, - Delete: resourceArmMssqlServerVulnerabilityAssessmentDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceMsSqlServerVulnerabilityAssessmentCreateUpdate, + Read: resourceMsSqlServerVulnerabilityAssessmentRead, + Update: resourceMsSqlServerVulnerabilityAssessmentCreateUpdate, + Delete: resourceMsSqlServerVulnerabilityAssessmentDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ServerVulnerabilityAssessmentID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -93,21 +98,19 @@ func resourceArmMssqlServerVulnerabilityAssessment() *schema.Resource { } } -func resourceArmMssqlServerVulnerabilityAssessmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerVulnerabilityAssessmentCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - policyId, err := azure.ParseAzureResourceID(d.Get("server_security_alert_policy_id").(string)) + policyId, err := parse.ServerSecurityAlertPolicyID(d.Get("server_security_alert_policy_id").(string)) if err != nil { return err } - resourceGroupName := policyId.ResourceGroup - serverName := policyId.Path["servers"] policyClient := meta.(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient - policy, err := policyClient.Get(ctx, resourceGroupName, serverName) + policy, err := policyClient.Get(ctx, policyId.ResourceGroup, policyId.ServerName) if err != nil { return fmt.Errorf("Error retrieving Security Alert Policy: %+v", err) } @@ -139,36 +142,33 @@ func resourceArmMssqlServerVulnerabilityAssessmentCreateUpdate(d *schema.Resourc props.RecurringScans = expandRecurringScans(d) } - result, err := client.CreateOrUpdate(ctx, resourceGroupName, serverName, vulnerabilityAssessment) + result, err := client.CreateOrUpdate(ctx, policyId.ResourceGroup, policyId.ServerName, vulnerabilityAssessment) if err != nil { return fmt.Errorf("error updataing mssql server vulnerability assessment: %v", err) } if result.ID == nil { - return fmt.Errorf("error reading mssql server vulnerability assessment id (server %q, resource group %q)", serverName, resourceGroupName) + return fmt.Errorf("error reading mssql server vulnerability assessment id (server %q, resource group %q)", policyId.ServerName, policyId.ResourceGroup) } d.SetId(*result.ID) - return resourceArmMssqlServerVulnerabilityAssessmentRead(d, meta) + return resourceMsSqlServerVulnerabilityAssessmentRead(d, meta) } -func resourceArmMssqlServerVulnerabilityAssessmentRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerVulnerabilityAssessmentRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] reading mssql server vulnerability assessment") - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ServerVulnerabilityAssessmentID(d.Id()) if err != nil { return err } - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - - result, err := client.Get(ctx, resourceGroupName, serverName) + result, err := client.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil { if utils.ResponseWasNotFound(result.Response) { log.Printf("[WARN] mssql server vulnerability assessment %v not found", id) @@ -180,7 +180,7 @@ func resourceArmMssqlServerVulnerabilityAssessmentRead(d *schema.ResourceData, m } policyClient := meta.(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient - policy, err := policyClient.Get(ctx, resourceGroupName, serverName) + policy, err := policyClient.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil { return fmt.Errorf("Error retrieving Security Alert Policy by ID: %+v", err) } @@ -204,22 +204,19 @@ func resourceArmMssqlServerVulnerabilityAssessmentRead(d *schema.ResourceData, m return nil } -func resourceArmMssqlServerVulnerabilityAssessmentDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlServerVulnerabilityAssessmentDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() log.Printf("[INFO] deleting mssql server vulnerability assessment.") - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ServerVulnerabilityAssessmentID(d.Id()) if err != nil { return err } - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - - if _, err = client.Delete(ctx, resourceGroupName, serverName); err != nil { + if _, err = client.Delete(ctx, id.ResourceGroup, id.ServerName); err != nil { return fmt.Errorf("error deleting mssql server vulnerability assessment: %v", err) } diff --git a/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource_test.go b/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource_test.go new file mode 100644 index 000000000000..1cfca1f39003 --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_server_vulnerability_assessment_resource_test.go @@ -0,0 +1,148 @@ +package mssql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlServerVulnerabilityAssessmentResource struct{} + +func TestAccAzureRMMssqlServerVulnerabilityAssessment_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_vulnerability_assessment", "test") + r := MsSqlServerVulnerabilityAssessmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func TestAccAzureRMMssqlServerVulnerabilityAssessment_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_server_vulnerability_assessment", "test") + r := MsSqlServerVulnerabilityAssessmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("storage_account_access_key"), + }) +} + +func (MsSqlServerVulnerabilityAssessmentResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerVulnerabilityAssessmentID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.ServerVulnerabilityAssessmentsClient.Get(ctx, id.ResourceGroup, id.ServerName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Vulnerability Assessment for server %q (Resource Group %q) does not exist", id.ServerName, id.ResourceGroup) + } + + return nil, fmt.Errorf("reading SQL Vulnerability Assessment for server %q (Resource Group %q): %v", id.ServerName, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r MsSqlServerVulnerabilityAssessmentResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_vulnerability_assessment" "test" { + server_security_alert_policy_id = azurerm_mssql_server_security_alert_policy.test.id + storage_container_path = "${azurerm_storage_account.test.primary_blob_endpoint}${azurerm_storage_container.test.name}/" + storage_account_access_key = azurerm_storage_account.test.primary_access_key +} +`, r.server(data)) +} + +func (r MsSqlServerVulnerabilityAssessmentResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_server_vulnerability_assessment" "test" { + server_security_alert_policy_id = azurerm_mssql_server_security_alert_policy.test.id + storage_container_path = "${azurerm_storage_account.test.primary_blob_endpoint}${azurerm_storage_container.test.name}/" + storage_account_access_key = azurerm_storage_account.test.primary_access_key + + recurring_scans { + enabled = true + email_subscription_admins = true + emails = [ + "email@example1.com", + "email@example2.com" + ] + } +} +`, r.server(data)) +} + +func (MsSqlServerVulnerabilityAssessmentResource) server(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_sql_server" "test" { + name = "acctestsqlserver%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + version = "12.0" + administrator_login = "mradministrator" + administrator_login_password = "thisIsDog11" +} + +resource "azurerm_storage_account" "test" { + name = "accsa%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = "%[2]s" + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "acctestsc%[1]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_mssql_server_security_alert_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_sql_server.test.name + state = "Enabled" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/mssql/mssql_virtual_machine_resource.go b/azurerm/internal/services/mssql/mssql_virtual_machine_resource.go index 04409e5c4b83..fe9419d7183b 100644 --- a/azurerm/internal/services/mssql/mssql_virtual_machine_resource.go +++ b/azurerm/internal/services/mssql/mssql_virtual_machine_resource.go @@ -10,9 +10,11 @@ import ( "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" parseCompute "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" + computeValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/helper" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/validate" @@ -22,15 +24,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMsSqlVirtualMachine() *schema.Resource { +func resourceMsSqlVirtualMachine() *schema.Resource { return &schema.Resource{ - Create: resourceArmMsSqlVirtualMachineCreateUpdate, - Read: resourceArmMsSqlVirtualMachineRead, - Update: resourceArmMsSqlVirtualMachineCreateUpdate, - Delete: resourceArmMsSqlVirtualMachineDelete, + Create: resourceMsSqlVirtualMachineCreateUpdate, + Read: resourceMsSqlVirtualMachineRead, + Update: resourceMsSqlVirtualMachineCreateUpdate, + Delete: resourceMsSqlVirtualMachineDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MssqlVmID(id) + _, err := parse.SqlVirtualMachineID(id) return err }), @@ -46,7 +48,7 @@ func resourceArmMsSqlVirtualMachine() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.VMID, + ValidateFunc: computeValidate.VirtualMachineID, }, "sql_license_type": { @@ -169,7 +171,7 @@ func resourceArmMsSqlVirtualMachine() *schema.Resource { Type: schema.TypeString, Optional: true, Sensitive: true, - ValidateFunc: validate.MsSqlVMLoginUserName, + ValidateFunc: validate.SqlVirtualMachineLoginUserName, }, "storage_configuration": { @@ -208,7 +210,7 @@ func resourceArmMsSqlVirtualMachine() *schema.Resource { } } -func resourceArmMsSqlVirtualMachineCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlVirtualMachineCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.VirtualMachinesClient vmclient := meta.(*clients.Client).Compute.VMClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -248,8 +250,8 @@ func resourceArmMsSqlVirtualMachineCreateUpdate(d *schema.ResourceData, meta int VirtualMachineResourceID: utils.String(d.Get("virtual_machine_id").(string)), SQLServerLicenseType: sqlvirtualmachine.SQLServerLicenseType(d.Get("sql_license_type").(string)), SQLManagement: sqlvirtualmachine.Full, - AutoPatchingSettings: expandArmSqlVirtualMachineAutoPatchingSettings(d.Get("auto_patching").([]interface{})), - KeyVaultCredentialSettings: expandArmSqlVirtualMachineKeyVaultCredential(d.Get("key_vault_credential").([]interface{})), + AutoPatchingSettings: expandSqlVirtualMachineAutoPatchingSettings(d.Get("auto_patching").([]interface{})), + KeyVaultCredentialSettings: expandSqlVirtualMachineKeyVaultCredential(d.Get("key_vault_credential").([]interface{})), ServerConfigurationsManagementSettings: &sqlvirtualmachine.ServerConfigurationsManagementSettings{ AdditionalFeaturesServerConfigurations: &sqlvirtualmachine.AdditionalFeaturesServerConfigurations{ IsRServicesEnabled: utils.Bool(d.Get("r_services_enabled").(bool)), @@ -261,7 +263,7 @@ func resourceArmMsSqlVirtualMachineCreateUpdate(d *schema.ResourceData, meta int SQLAuthUpdateUserName: utils.String(d.Get("sql_connectivity_update_username").(string)), }, }, - StorageConfigurationSettings: expandArmSqlVirtualMachineStorageConfigurationSettings(d.Get("storage_configuration").([]interface{})), + StorageConfigurationSettings: expandSqlVirtualMachineStorageConfigurationSettings(d.Get("storage_configuration").([]interface{})), }, Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } @@ -283,15 +285,15 @@ func resourceArmMsSqlVirtualMachineCreateUpdate(d *schema.ResourceData, meta int } d.SetId(*resp.ID) - return resourceArmMsSqlVirtualMachineRead(d, meta) + return resourceMsSqlVirtualMachineRead(d, meta) } -func resourceArmMsSqlVirtualMachineRead(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlVirtualMachineRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.VirtualMachinesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MssqlVmID(d.Id()) + id, err := parse.SqlVirtualMachineID(d.Id()) if err != nil { return err } @@ -309,11 +311,11 @@ func resourceArmMsSqlVirtualMachineRead(d *schema.ResourceData, meta interface{} if props := resp.Properties; props != nil { d.Set("virtual_machine_id", props.VirtualMachineResourceID) d.Set("sql_license_type", string(props.SQLServerLicenseType)) - if err := d.Set("auto_patching", flattenArmSqlVirtualMachineAutoPatching(props.AutoPatchingSettings)); err != nil { + if err := d.Set("auto_patching", flattenSqlVirtualMachineAutoPatching(props.AutoPatchingSettings)); err != nil { return fmt.Errorf("setting `auto_patching`: %+v", err) } - if err := d.Set("key_vault_credential", flattenArmSqlVirtualMachineKeyVaultCredential(props.KeyVaultCredentialSettings, d)); err != nil { + if err := d.Set("key_vault_credential", flattenSqlVirtualMachineKeyVaultCredential(props.KeyVaultCredentialSettings, d)); err != nil { return fmt.Errorf("setting `key_vault_credential`: %+v", err) } @@ -334,19 +336,19 @@ func resourceArmMsSqlVirtualMachineRead(d *schema.ResourceData, meta interface{} storageWorkloadType = string(props.ServerConfigurationsManagementSettings.SQLWorkloadTypeUpdateSettings.SQLWorkloadType) } - if err := d.Set("storage_configuration", flattenArmSqlVirtualMachineStorageConfigurationSettings(props.StorageConfigurationSettings, storageWorkloadType)); err != nil { + if err := d.Set("storage_configuration", flattenSqlVirtualMachineStorageConfigurationSettings(props.StorageConfigurationSettings, storageWorkloadType)); err != nil { return fmt.Errorf("error setting `storage_configuration`: %+v", err) } } return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMsSqlVirtualMachineDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMsSqlVirtualMachineDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MSSQL.VirtualMachinesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MssqlVmID(d.Id()) + id, err := parse.SqlVirtualMachineID(d.Id()) if err != nil { return err } @@ -365,7 +367,7 @@ func resourceArmMsSqlVirtualMachineDelete(d *schema.ResourceData, meta interface return nil } -func expandArmSqlVirtualMachineAutoPatchingSettings(input []interface{}) *sqlvirtualmachine.AutoPatchingSettings { +func expandSqlVirtualMachineAutoPatchingSettings(input []interface{}) *sqlvirtualmachine.AutoPatchingSettings { if len(input) == 0 { return nil } @@ -379,8 +381,8 @@ func expandArmSqlVirtualMachineAutoPatchingSettings(input []interface{}) *sqlvir } } -func flattenArmSqlVirtualMachineAutoPatching(autoPatching *sqlvirtualmachine.AutoPatchingSettings) []interface{} { - if autoPatching == nil || !*autoPatching.Enable { +func flattenSqlVirtualMachineAutoPatching(autoPatching *sqlvirtualmachine.AutoPatchingSettings) []interface{} { + if autoPatching == nil || autoPatching.Enable == nil || !*autoPatching.Enable { return []interface{}{} } @@ -403,7 +405,7 @@ func flattenArmSqlVirtualMachineAutoPatching(autoPatching *sqlvirtualmachine.Aut } } -func expandArmSqlVirtualMachineKeyVaultCredential(input []interface{}) *sqlvirtualmachine.KeyVaultCredentialSettings { +func expandSqlVirtualMachineKeyVaultCredential(input []interface{}) *sqlvirtualmachine.KeyVaultCredentialSettings { if len(input) == 0 { return nil } @@ -418,7 +420,7 @@ func expandArmSqlVirtualMachineKeyVaultCredential(input []interface{}) *sqlvirtu } } -func flattenArmSqlVirtualMachineKeyVaultCredential(keyVault *sqlvirtualmachine.KeyVaultCredentialSettings, d *schema.ResourceData) []interface{} { +func flattenSqlVirtualMachineKeyVaultCredential(keyVault *sqlvirtualmachine.KeyVaultCredentialSettings, d *schema.ResourceData) []interface{} { if keyVault == nil || !*keyVault.Enable { return []interface{}{} } @@ -464,7 +466,7 @@ func mssqlVMCredentialNameDiffSuppressFunc(_, old, new string, _ *schema.Resourc return false } -func expandArmSqlVirtualMachineStorageConfigurationSettings(input []interface{}) *sqlvirtualmachine.StorageConfigurationSettings { +func expandSqlVirtualMachineStorageConfigurationSettings(input []interface{}) *sqlvirtualmachine.StorageConfigurationSettings { if len(input) == 0 || input[0] == nil { return nil } @@ -473,13 +475,13 @@ func expandArmSqlVirtualMachineStorageConfigurationSettings(input []interface{}) return &sqlvirtualmachine.StorageConfigurationSettings{ DiskConfigurationType: sqlvirtualmachine.DiskConfigurationType(storageSettings["disk_type"].(string)), StorageWorkloadType: sqlvirtualmachine.StorageWorkloadType(storageSettings["storage_workload_type"].(string)), - SQLDataSettings: expandArmSqlVirtualMachineDataStorageSettings(storageSettings["data_settings"].([]interface{})), - SQLLogSettings: expandArmSqlVirtualMachineDataStorageSettings(storageSettings["log_settings"].([]interface{})), - SQLTempDbSettings: expandArmSqlVirtualMachineDataStorageSettings(storageSettings["temp_db_settings"].([]interface{})), + SQLDataSettings: expandSqlVirtualMachineDataStorageSettings(storageSettings["data_settings"].([]interface{})), + SQLLogSettings: expandSqlVirtualMachineDataStorageSettings(storageSettings["log_settings"].([]interface{})), + SQLTempDbSettings: expandSqlVirtualMachineDataStorageSettings(storageSettings["temp_db_settings"].([]interface{})), } } -func flattenArmSqlVirtualMachineStorageConfigurationSettings(input *sqlvirtualmachine.StorageConfigurationSettings, storageWorkloadType string) []interface{} { +func flattenSqlVirtualMachineStorageConfigurationSettings(input *sqlvirtualmachine.StorageConfigurationSettings, storageWorkloadType string) []interface{} { if input == nil { return []interface{}{} } @@ -488,26 +490,26 @@ func flattenArmSqlVirtualMachineStorageConfigurationSettings(input *sqlvirtualma map[string]interface{}{ "disk_type": string(input.DiskConfigurationType), "storage_workload_type": storageWorkloadType, - "data_settings": flattenArmSqlVirtualMachineStorageSettings(input.SQLDataSettings), - "log_settings": flattenArmSqlVirtualMachineStorageSettings(input.SQLLogSettings), - "temp_db_settings": flattenArmSqlVirtualMachineStorageSettings(input.SQLTempDbSettings), + "data_settings": flattenSqlVirtualMachineStorageSettings(input.SQLDataSettings), + "log_settings": flattenSqlVirtualMachineStorageSettings(input.SQLLogSettings), + "temp_db_settings": flattenSqlVirtualMachineStorageSettings(input.SQLTempDbSettings), }, } } -func expandArmSqlVirtualMachineDataStorageSettings(input []interface{}) *sqlvirtualmachine.SQLStorageSettings { +func expandSqlVirtualMachineDataStorageSettings(input []interface{}) *sqlvirtualmachine.SQLStorageSettings { if len(input) == 0 || input[0] == nil { return nil } dataStorageSettings := input[0].(map[string]interface{}) return &sqlvirtualmachine.SQLStorageSettings{ - Luns: expandArmSqlVirtualMachineStorageSettingsLuns(dataStorageSettings["luns"].([]interface{})), + Luns: expandSqlVirtualMachineStorageSettingsLuns(dataStorageSettings["luns"].([]interface{})), DefaultFilePath: utils.String(dataStorageSettings["default_file_path"].(string)), } } -func expandArmSqlVirtualMachineStorageSettingsLuns(input []interface{}) *[]int32 { +func expandSqlVirtualMachineStorageSettingsLuns(input []interface{}) *[]int32 { expandedLuns := make([]int32, len(input)) for i := range input { if input[i] != nil { @@ -518,7 +520,7 @@ func expandArmSqlVirtualMachineStorageSettingsLuns(input []interface{}) *[]int32 return &expandedLuns } -func flattenArmSqlVirtualMachineStorageSettings(input *sqlvirtualmachine.SQLStorageSettings) []interface{} { +func flattenSqlVirtualMachineStorageSettings(input *sqlvirtualmachine.SQLStorageSettings) []interface{} { if input == nil || input.Luns == nil { return []interface{}{} } diff --git a/azurerm/internal/services/mssql/mssql_virtual_machine_resource_test.go b/azurerm/internal/services/mssql/mssql_virtual_machine_resource_test.go new file mode 100644 index 000000000000..73d398cc841d --- /dev/null +++ b/azurerm/internal/services/mssql/mssql_virtual_machine_resource_test.go @@ -0,0 +1,577 @@ +package mssql_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MsSqlVirtualMachineResource struct{} + +func TestAccMsSqlVirtualMachine_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") + r := MsSqlVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlVirtualMachine_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") + r := MsSqlVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMsSqlVirtualMachine_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") + r := MsSqlVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("sql_connectivity_update_password", "sql_connectivity_update_username"), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("sql_connectivity_update_password", "sql_connectivity_update_username"), + }) +} + +func TestAccMsSqlVirtualMachine_updateAutoPatching(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") + r := MsSqlVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withAutoPatching(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withAutoPatchingUpdated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMsSqlVirtualMachine_updateKeyVault(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") + r := MsSqlVirtualMachineResource{} + value, err := uuid.GenerateUUID() + if err != nil { + t.Fatal(err) + } + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withKeyVault(data, value), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("r_services_enabled").MatchesRegex(regexp.MustCompile("/*:acctestkv")), + ), + }, + data.ImportStep("key_vault_credential.0.key_vault_url", "key_vault_credential.0.service_principal_name", "key_vault_credential.0.service_principal_secret"), + + { + Config: r.withKeyVaultUpdated(data, value), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("r_services_enabled").MatchesRegex(regexp.MustCompile("/*:acctestkv2")), + ), + }, + data.ImportStep("key_vault_credential.0.key_vault_url", "key_vault_credential.0.service_principal_name", "key_vault_credential.0.service_principal_secret"), + }) +} + +func TestAccMsSqlVirtualMachine_storageConfigurationSettings(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") + r := MsSqlVirtualMachineResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.storageConfigurationSettings(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (MsSqlVirtualMachineResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SqlVirtualMachineID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.MSSQL.VirtualMachinesClient.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, fmt.Errorf("SQL Virtual Machine %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) + } + return nil, fmt.Errorf("reading SQL Virtual Machine %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MsSqlVirtualMachineResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mssql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VN-%[1]d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-SN-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.0.0/24" +} + +resource "azurerm_subnet_network_security_group_association" "test" { + subnet_id = azurerm_subnet.test.id + network_security_group_id = azurerm_network_security_group.test.id +} + +resource "azurerm_public_ip" "vm" { + name = "acctest-PIP-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" +} + +resource "azurerm_network_security_group" "test" { + name = "acctest-NSG-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_network_security_rule" "RDPRule" { + name = "RDPRule" + resource_group_name = azurerm_resource_group.test.name + priority = 1000 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = 3389 + source_address_prefix = "167.220.255.0/25" + destination_address_prefix = "*" + network_security_group_name = azurerm_network_security_group.test.name +} + +resource "azurerm_network_security_rule" "MSSQLRule" { + name = "MSSQLRule" + resource_group_name = azurerm_resource_group.test.name + priority = 1001 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = 1433 + source_address_prefix = "167.220.255.0/25" + destination_address_prefix = "*" + network_security_group_name = azurerm_network_security_group.test.name +} + +resource "azurerm_network_interface" "test" { + name = "acctest-NIC-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "testconfiguration1" + subnet_id = azurerm_subnet.test.id + private_ip_address_allocation = "Dynamic" + public_ip_address_id = azurerm_public_ip.vm.id + } +} + +resource "azurerm_virtual_machine" "test" { + name = "acctest-VM-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + network_interface_ids = [azurerm_network_interface.test.id] + vm_size = "Standard_F2s" + + storage_image_reference { + publisher = "MicrosoftSQLServer" + offer = "SQL2017-WS2016" + sku = "SQLDEV" + version = "latest" + } + + storage_os_disk { + name = "acctvm-%[1]dOSDisk" + caching = "ReadOnly" + create_option = "FromImage" + managed_disk_type = "Premium_LRS" + } + + os_profile { + computer_name = "winhost01" + admin_username = "testadmin" + admin_password = "Password1234!" + } + + os_profile_windows_config { + timezone = "Pacific Standard Time" + provision_vm_agent = true + enable_automatic_upgrades = true + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (r MsSqlVirtualMachineResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" +} +`, r.template(data)) +} + +func (r MsSqlVirtualMachineResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_virtual_machine" "import" { + virtual_machine_id = azurerm_mssql_virtual_machine.test.virtual_machine_id + sql_license_type = azurerm_mssql_virtual_machine.test.sql_license_type +} +`, r.basic(data)) +} + +func (r MsSqlVirtualMachineResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + r_services_enabled = true + sql_connectivity_port = 1433 + sql_connectivity_type = "PRIVATE" + sql_connectivity_update_password = "Password1234!" + sql_connectivity_update_username = "sqllogin" +} +`, r.template(data)) +} + +func (r MsSqlVirtualMachineResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + r_services_enabled = false + sql_connectivity_port = 1533 + sql_connectivity_type = "PUBLIC" + sql_connectivity_update_password = "Password12344321!" + sql_connectivity_update_username = "sqlloginupdate" +} +`, r.template(data)) +} + +func (r MsSqlVirtualMachineResource) withAutoPatching(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + + auto_patching { + day_of_week = "Sunday" + maintenance_window_duration_in_minutes = 60 + maintenance_window_starting_hour = 2 + } +} +`, r.template(data)) +} + +func (r MsSqlVirtualMachineResource) withAutoPatchingUpdated(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + + auto_patching { + day_of_week = "Monday" + maintenance_window_duration_in_minutes = 90 + maintenance_window_starting_hour = 4 + } +} +`, r.template(data)) +} + +func (r MsSqlVirtualMachineResource) withKeyVault(data acceptance.TestData, value string) string { + return fmt.Sprintf(` +%[1]s + +data "azurerm_client_config" "current" {} + +resource "azurerm_key_vault" "test" { + name = "acckv-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "premium" + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + "delete", + "get", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] + } + + tags = { + environment = "Production" + } +} + +resource "azurerm_key_vault_key" "generated" { + name = "key-%[2]d" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] +} + +resource "azuread_application" "test" { + name = "acctestspa%[2]d" +} + +resource "azuread_service_principal" "test" { + application_id = azuread_application.test.application_id +} + +resource "azuread_service_principal_password" "test" { + service_principal_id = azuread_service_principal.test.id + value = "%[3]s" + end_date = "2021-01-01T01:02:03Z" +} + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + key_vault_credential { + name = "acctestkv" + key_vault_url = azurerm_key_vault_key.generated.id + service_principal_name = azuread_service_principal.test.display_name + service_principal_secret = azuread_service_principal_password.test.value + } +} +`, r.template(data), data.RandomInteger, value) +} + +func (r MsSqlVirtualMachineResource) withKeyVaultUpdated(data acceptance.TestData, value string) string { + return fmt.Sprintf(` +%[1]s + +data "azurerm_client_config" "current" {} + +resource "azurerm_key_vault" "test" { + name = "acckv-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "premium" + + access_policy { + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "create", + "delete", + "get", + "update", + ] + + secret_permissions = [ + "get", + "delete", + "set", + ] + } + + tags = { + environment = "Production" + } +} + +resource "azurerm_key_vault_key" "generated" { + name = "key-%[2]d" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + + key_opts = [ + "decrypt", + "encrypt", + "sign", + "unwrapKey", + "verify", + "wrapKey", + ] +} + +resource "azuread_application" "test" { + name = "acctestspa%[2]d" +} + +resource "azuread_service_principal" "test" { + application_id = azuread_application.test.application_id +} + +resource "azuread_service_principal_password" "test" { + service_principal_id = azuread_service_principal.test.id + value = "%[3]s" + end_date = "2021-01-01T01:02:03Z" +} + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + key_vault_credential { + name = "acctestkv2" + key_vault_url = azurerm_key_vault_key.generated.id + service_principal_name = azuread_service_principal.test.display_name + service_principal_secret = azuread_service_principal_password.test.value + } +} +`, r.template(data), data.RandomInteger, value) +} + +func (r MsSqlVirtualMachineResource) storageConfigurationSettings(data acceptance.TestData) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_managed_disk" "test" { + name = "accmd-sqlvm-%[2]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + storage_account_type = "Standard_LRS" + create_option = "Empty" + disk_size_gb = 10 +} + +resource "azurerm_virtual_machine_data_disk_attachment" "test" { + managed_disk_id = azurerm_managed_disk.test.id + virtual_machine_id = azurerm_virtual_machine.test.id + lun = "0" + caching = "None" +} + +resource "azurerm_mssql_virtual_machine" "test" { + virtual_machine_id = azurerm_virtual_machine.test.id + sql_license_type = "PAYG" + + storage_configuration { + disk_type = "NEW" + storage_workload_type = "OLTP" + + data_settings { + luns = [0] + default_file_path = "F:\\SQLData" + } + + log_settings { + luns = [0] + default_file_path = "F:\\SQLLog" + } + + temp_db_settings { + luns = [0] + default_file_path = "F:\\SQLTemp" + } + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/mssql/parse/database.go b/azurerm/internal/services/mssql/parse/database.go new file mode 100644 index 000000000000..0d55ca9f10b8 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/database.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabaseId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewDatabaseID(subscriptionId, resourceGroup, serverName, name string) DatabaseId { + return DatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id DatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database", segmentsStr) +} + +func (id DatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/databases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// DatabaseID parses a Database ID into an DatabaseId struct +func DatabaseID(input string) (*DatabaseId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("databases"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/database_extended_auditing_policy.go b/azurerm/internal/services/mssql/parse/database_extended_auditing_policy.go new file mode 100644 index 000000000000..5e9d1df2b489 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/database_extended_auditing_policy.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabaseExtendedAuditingPolicyId struct { + SubscriptionId string + ResourceGroup string + ServerName string + DatabaseName string + ExtendedAuditingSettingName string +} + +func NewDatabaseExtendedAuditingPolicyID(subscriptionId, resourceGroup, serverName, databaseName, extendedAuditingSettingName string) DatabaseExtendedAuditingPolicyId { + return DatabaseExtendedAuditingPolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + DatabaseName: databaseName, + ExtendedAuditingSettingName: extendedAuditingSettingName, + } +} + +func (id DatabaseExtendedAuditingPolicyId) String() string { + segments := []string{ + fmt.Sprintf("Extended Auditing Setting Name %q", id.ExtendedAuditingSettingName), + fmt.Sprintf("Database Name %q", id.DatabaseName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database Extended Auditing Policy", segmentsStr) +} + +func (id DatabaseExtendedAuditingPolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/databases/%s/extendedAuditingSettings/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.DatabaseName, id.ExtendedAuditingSettingName) +} + +// DatabaseExtendedAuditingPolicyID parses a DatabaseExtendedAuditingPolicy ID into an DatabaseExtendedAuditingPolicyId struct +func DatabaseExtendedAuditingPolicyID(input string) (*DatabaseExtendedAuditingPolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabaseExtendedAuditingPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.DatabaseName, err = id.PopSegment("databases"); err != nil { + return nil, err + } + if resourceId.ExtendedAuditingSettingName, err = id.PopSegment("extendedAuditingSettings"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/database_extended_auditing_policy_test.go b/azurerm/internal/services/mssql/parse/database_extended_auditing_policy_test.go new file mode 100644 index 000000000000..667efa6b6601 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/database_extended_auditing_policy_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabaseExtendedAuditingPolicyId{} + +func TestDatabaseExtendedAuditingPolicyIDFormatter(t *testing.T) { + actual := NewDatabaseExtendedAuditingPolicyID("12345678-1234-9876-4563-123456789012", "group1", "server1", "database1", "default").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/extendedAuditingSettings/default" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabaseExtendedAuditingPolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabaseExtendedAuditingPolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/", + Error: true, + }, + + { + // missing ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/", + Error: true, + }, + + { + // missing value for ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/extendedAuditingSettings/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/extendedAuditingSettings/default", + Expected: &DatabaseExtendedAuditingPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + DatabaseName: "database1", + ExtendedAuditingSettingName: "default", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/DATABASES/DATABASE1/EXTENDEDAUDITINGSETTINGS/DEFAULT", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabaseExtendedAuditingPolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.DatabaseName != v.Expected.DatabaseName { + t.Fatalf("Expected %q but got %q for DatabaseName", v.Expected.DatabaseName, actual.DatabaseName) + } + if actual.ExtendedAuditingSettingName != v.Expected.ExtendedAuditingSettingName { + t.Fatalf("Expected %q but got %q for ExtendedAuditingSettingName", v.Expected.ExtendedAuditingSettingName, actual.ExtendedAuditingSettingName) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/database_test.go b/azurerm/internal/services/mssql/parse/database_test.go new file mode 100644 index 000000000000..ee1313115270 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/database_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabaseId{} + +func TestDatabaseIDFormatter(t *testing.T) { + actual := NewDatabaseID("12345678-1234-9876-4563-123456789012", "group1", "server1", "database1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabaseID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabaseId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1", + Expected: &DatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + Name: "database1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/DATABASES/DATABASE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabaseID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/database_vulnerability_assessment_rule_baseline.go b/azurerm/internal/services/mssql/parse/database_vulnerability_assessment_rule_baseline.go new file mode 100644 index 000000000000..938de0b5dea3 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/database_vulnerability_assessment_rule_baseline.go @@ -0,0 +1,93 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabaseVulnerabilityAssessmentRuleBaselineId struct { + SubscriptionId string + ResourceGroup string + ServerName string + DatabaseName string + VulnerabilityAssessmentName string + RuleName string + BaselineName string +} + +func NewDatabaseVulnerabilityAssessmentRuleBaselineID(subscriptionId, resourceGroup, serverName, databaseName, vulnerabilityAssessmentName, ruleName, baselineName string) DatabaseVulnerabilityAssessmentRuleBaselineId { + return DatabaseVulnerabilityAssessmentRuleBaselineId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + DatabaseName: databaseName, + VulnerabilityAssessmentName: vulnerabilityAssessmentName, + RuleName: ruleName, + BaselineName: baselineName, + } +} + +func (id DatabaseVulnerabilityAssessmentRuleBaselineId) String() string { + segments := []string{ + fmt.Sprintf("Baseline Name %q", id.BaselineName), + fmt.Sprintf("Rule Name %q", id.RuleName), + fmt.Sprintf("Vulnerability Assessment Name %q", id.VulnerabilityAssessmentName), + fmt.Sprintf("Database Name %q", id.DatabaseName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database Vulnerability Assessment Rule Baseline", segmentsStr) +} + +func (id DatabaseVulnerabilityAssessmentRuleBaselineId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/databases/%s/vulnerabilityAssessments/%s/rules/%s/baselines/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.DatabaseName, id.VulnerabilityAssessmentName, id.RuleName, id.BaselineName) +} + +// DatabaseVulnerabilityAssessmentRuleBaselineID parses a DatabaseVulnerabilityAssessmentRuleBaseline ID into an DatabaseVulnerabilityAssessmentRuleBaselineId struct +func DatabaseVulnerabilityAssessmentRuleBaselineID(input string) (*DatabaseVulnerabilityAssessmentRuleBaselineId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabaseVulnerabilityAssessmentRuleBaselineId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.DatabaseName, err = id.PopSegment("databases"); err != nil { + return nil, err + } + if resourceId.VulnerabilityAssessmentName, err = id.PopSegment("vulnerabilityAssessments"); err != nil { + return nil, err + } + if resourceId.RuleName, err = id.PopSegment("rules"); err != nil { + return nil, err + } + if resourceId.BaselineName, err = id.PopSegment("baselines"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/database_vulnerability_assessment_rule_baseline_test.go b/azurerm/internal/services/mssql/parse/database_vulnerability_assessment_rule_baseline_test.go new file mode 100644 index 000000000000..6202b283bdb4 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/database_vulnerability_assessment_rule_baseline_test.go @@ -0,0 +1,176 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabaseVulnerabilityAssessmentRuleBaselineId{} + +func TestDatabaseVulnerabilityAssessmentRuleBaselineIDFormatter(t *testing.T) { + actual := NewDatabaseVulnerabilityAssessmentRuleBaselineID("12345678-1234-9876-4563-123456789012", "group1", "server1", "database1", "default", "rule1", "baseline1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/baselines/baseline1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabaseVulnerabilityAssessmentRuleBaselineID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabaseVulnerabilityAssessmentRuleBaselineId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/", + Error: true, + }, + + { + // missing VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/", + Error: true, + }, + + { + // missing value for VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/", + Error: true, + }, + + { + // missing RuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/", + Error: true, + }, + + { + // missing value for RuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/", + Error: true, + }, + + { + // missing BaselineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/", + Error: true, + }, + + { + // missing value for BaselineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/baselines/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/baselines/baseline1", + Expected: &DatabaseVulnerabilityAssessmentRuleBaselineId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + DatabaseName: "database1", + VulnerabilityAssessmentName: "default", + RuleName: "rule1", + BaselineName: "baseline1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/DATABASES/DATABASE1/VULNERABILITYASSESSMENTS/DEFAULT/RULES/RULE1/BASELINES/BASELINE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabaseVulnerabilityAssessmentRuleBaselineID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.DatabaseName != v.Expected.DatabaseName { + t.Fatalf("Expected %q but got %q for DatabaseName", v.Expected.DatabaseName, actual.DatabaseName) + } + if actual.VulnerabilityAssessmentName != v.Expected.VulnerabilityAssessmentName { + t.Fatalf("Expected %q but got %q for VulnerabilityAssessmentName", v.Expected.VulnerabilityAssessmentName, actual.VulnerabilityAssessmentName) + } + if actual.RuleName != v.Expected.RuleName { + t.Fatalf("Expected %q but got %q for RuleName", v.Expected.RuleName, actual.RuleName) + } + if actual.BaselineName != v.Expected.BaselineName { + t.Fatalf("Expected %q but got %q for BaselineName", v.Expected.BaselineName, actual.BaselineName) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/elastic_pool.go b/azurerm/internal/services/mssql/parse/elastic_pool.go new file mode 100644 index 000000000000..16bcd11ce1c1 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/elastic_pool.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ElasticPoolId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewElasticPoolID(subscriptionId, resourceGroup, serverName, name string) ElasticPoolId { + return ElasticPoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id ElasticPoolId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Elastic Pool", segmentsStr) +} + +func (id ElasticPoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/elasticPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// ElasticPoolID parses a ElasticPool ID into an ElasticPoolId struct +func ElasticPoolID(input string) (*ElasticPoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ElasticPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("elasticPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/elastic_pool_test.go b/azurerm/internal/services/mssql/parse/elastic_pool_test.go new file mode 100644 index 000000000000..967a29e494d1 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/elastic_pool_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ElasticPoolId{} + +func TestElasticPoolIDFormatter(t *testing.T) { + actual := NewElasticPoolID("12345678-1234-9876-4563-123456789012", "group1", "server1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/elasticPools/pool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestElasticPoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ElasticPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/elasticPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/elasticPools/pool1", + Expected: &ElasticPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + Name: "pool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/ELASTICPOOLS/POOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ElasticPoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/mssql.go b/azurerm/internal/services/mssql/parse/mssql.go deleted file mode 100644 index fe715f64a2ae..000000000000 --- a/azurerm/internal/services/mssql/parse/mssql.go +++ /dev/null @@ -1,270 +0,0 @@ -package parse - -import ( - "fmt" - "strings" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MsSqlDatabaseId struct { - Name string - MsSqlServer string - ResourceGroup string -} - -type MsSqlServerId struct { - Name string - ResourceGroup string -} - -type MsSqlElasticPoolId struct { - Name string - MsSqlServer string - ResourceGroup string -} - -type MsSqlDatabaseExtendedAuditingPolicyId struct { - MsDBName string - MsSqlServer string - ResourceGroup string -} - -type MsSqlServerExtendedAuditingPolicyId struct { - MsSqlServer string - ResourceGroup string -} - -type MsSqlRestorableDBId struct { - Name string - MsSqlServer string - ResourceGroup string - RestoreName string -} - -type MsSqlRecoverableDBId struct { - Name string - MsSqlServer string - ResourceGroup string -} - -func NewMsSqlDatabaseID(resourceGroup, msSqlServer, name string) MsSqlDatabaseId { - return MsSqlDatabaseId{ - ResourceGroup: resourceGroup, - MsSqlServer: msSqlServer, - Name: name, - } -} - -func (id MsSqlDatabaseId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/databases/%s", subscriptionId, id.ResourceGroup, id.MsSqlServer, id.Name) -} - -func MsSqlDatabaseID(input string) (*MsSqlDatabaseId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("Unable to parse MsSql Database ID %q: %+v", input, err) - } - - database := MsSqlDatabaseId{ - ResourceGroup: id.ResourceGroup, - } - - if database.MsSqlServer, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if database.Name, err = id.PopSegment("databases"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &database, nil -} - -func MsSqlServerID(input string) (*MsSqlServerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("Unable to parse MsSql Server ID %q: %+v", input, err) - } - - server := MsSqlServerId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} - -func MSSqlElasticPoolID(input string) (*MsSqlElasticPoolId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("Unable to parse MsSql Elastic Pool ID %q: %+v", input, err) - } - - elasticPool := MsSqlElasticPoolId{ - ResourceGroup: id.ResourceGroup, - } - - if elasticPool.MsSqlServer, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if elasticPool.Name, err = id.PopSegment("elasticPools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &elasticPool, nil -} - -type MssqlVmId struct { - ResourceGroup string - Name string -} - -func MssqlVmID(input string) (*MssqlVmId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql VM ID %q: %+v", input, err) - } - - sqlvm := MssqlVmId{ - ResourceGroup: id.ResourceGroup, - } - - if sqlvm.Name, err = id.PopSegment("sqlVirtualMachines"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &sqlvm, nil -} - -func MssqlDatabaseExtendedAuditingPolicyID(input string) (*MsSqlDatabaseExtendedAuditingPolicyId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Database Extended Auditing Policy %q: %+v", input, err) - } - - sqlDatabaseExtendedAuditingPolicyId := MsSqlDatabaseExtendedAuditingPolicyId{ - ResourceGroup: id.ResourceGroup, - } - - if sqlDatabaseExtendedAuditingPolicyId.MsSqlServer, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if sqlDatabaseExtendedAuditingPolicyId.MsDBName, err = id.PopSegment("databases"); err != nil { - return nil, err - } - - if _, err = id.PopSegment("extendedAuditingSettings"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &sqlDatabaseExtendedAuditingPolicyId, nil -} - -func MssqlServerExtendedAuditingPolicyID(input string) (*MsSqlServerExtendedAuditingPolicyId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Server Extended Auditing Policy %q: %+v", input, err) - } - - sqlServerExtendedAuditingPolicyId := MsSqlServerExtendedAuditingPolicyId{ - ResourceGroup: id.ResourceGroup, - } - - if sqlServerExtendedAuditingPolicyId.MsSqlServer, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if _, err = id.PopSegment("extendedAuditingSettings"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &sqlServerExtendedAuditingPolicyId, nil -} - -func MssqlRestorableDBID(input string) (*MsSqlRestorableDBId, error) { - inputList := strings.Split(input, ",") - - if len(inputList) != 2 { - return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Restorable DB ID %q, please refer to '/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/restorableDroppedDatabases/sqlDB1,000000000000000000'", input) - } - - restorableDBId := MsSqlRestorableDBId{ - RestoreName: inputList[1], - } - - id, err := azure.ParseAzureResourceID(inputList[0]) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Restorable DB ID %q: %+v", input, err) - } - - restorableDBId.ResourceGroup = id.ResourceGroup - - if restorableDBId.MsSqlServer, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if restorableDBId.Name, err = id.PopSegment("restorableDroppedDatabases"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(inputList[0]); err != nil { - return nil, err - } - - return &restorableDBId, nil -} - -func MssqlRecoverableDBID(input string) (*MsSqlRecoverableDBId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Recoverable DB ID %q: %+v", input, err) - } - - recoverableDBId := MsSqlRecoverableDBId{ - ResourceGroup: id.ResourceGroup, - } - - if recoverableDBId.MsSqlServer, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if recoverableDBId.Name, err = id.PopSegment("recoverabledatabases"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &recoverableDBId, nil -} diff --git a/azurerm/internal/services/mssql/parse/mssql_test.go b/azurerm/internal/services/mssql/parse/mssql_test.go deleted file mode 100644 index 09b6b45a950f..000000000000 --- a/azurerm/internal/services/mssql/parse/mssql_test.go +++ /dev/null @@ -1,575 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = MsSqlDatabaseId{} - -func TestMsSqlDatabaseID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MsSqlDatabaseId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Sql Server Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/", - Expected: nil, - }, - { - Name: "Missing Sql Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1", - Expected: nil, - }, - { - Name: "Missing Sql Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases", - Expected: nil, - }, - { - Name: "Sql Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases/sqlDB1", - Expected: &MsSqlDatabaseId{ - Name: "sqlDB1", - MsSqlServer: "sqlServer1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/Databases/sqlDB1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MsSqlDatabaseID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.MsSqlServer != v.Expected.MsSqlServer { - t.Fatalf("Expected %q but got %q for Sql Server", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestMsSqlServerID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MsSqlServerId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Sql Server Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/", - Expected: nil, - }, - { - Name: "Sql Server", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1", - Expected: &MsSqlServerId{ - Name: "sqlServer1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/Servers/sqlServer1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MsSqlServerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestMsSqlVmID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MssqlVmId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Mssql VM Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/", - Expected: nil, - }, - { - Name: "Mssql VM ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/mssqlvm1", - Expected: &MssqlVmId{ - Name: "mssqlvm1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.SqlVirtualMachine/SqlVirtualMachines/mssqlvm1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MssqlVmID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestMssqlDatabaseExtendedAuditingPolicy(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MsSqlDatabaseExtendedAuditingPolicyId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Sql Server Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/", - Expected: nil, - }, - { - Name: "Missing Sql Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1", - Expected: nil, - }, - { - Name: "Missing Sql Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases", - Expected: nil, - }, - { - Name: "Missing Extended Auditing Policy", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases/db1", - Expected: nil, - }, - { - Name: "Missing Extended Auditing Policy Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases/db1/extendedAuditingSettings", - Expected: nil, - }, - { - Name: "Extended Auditing Policy", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases/db1/extendedAuditingSettings/default", - Expected: &MsSqlDatabaseExtendedAuditingPolicyId{ - ResourceGroup: "resGroup1", - MsSqlServer: "sqlServer1", - MsDBName: "db1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/databases/db1/ExtendedAuditingSettings/default", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MssqlDatabaseExtendedAuditingPolicyID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.MsDBName != v.Expected.MsDBName { - t.Fatalf("Expected %q but got %q for DB Name", v.Expected.MsDBName, actual.MsDBName) - } - - if actual.MsSqlServer != v.Expected.MsSqlServer { - t.Fatalf("Expected %q but got %q for Server Name", v.Expected.MsSqlServer, actual.MsSqlServer) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestMssqlServerExtendedAuditingPolicy(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MsSqlServerExtendedAuditingPolicyId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Sql Server Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/", - Expected: nil, - }, - { - Name: "Missing Extended Auditing Policy", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1", - Expected: nil, - }, - { - Name: "Missing Extended Auditing Policy Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/extendedAuditingSettings", - Expected: nil, - }, - { - Name: "Extended Auditing Policy", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/extendedAuditingSettings/default", - Expected: &MsSqlServerExtendedAuditingPolicyId{ - ResourceGroup: "resGroup1", - MsSqlServer: "sqlServer1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/ExtendedAuditingSettings/default", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MssqlServerExtendedAuditingPolicyID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.MsSqlServer != v.Expected.MsSqlServer { - t.Fatalf("Expected %q but got %q for Server Name", v.Expected.MsSqlServer, actual.MsSqlServer) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestMsSqlRestoreDBID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MsSqlRestorableDBId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Restore Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000,000000000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/,000000000000000000", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/,000000000000000000", - Expected: nil, - }, - { - Name: "Missing Sql Server Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/,000000000000000000", - Expected: nil, - }, - { - Name: "Missing Sql Restorable Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1,000000000000000000", - Expected: nil, - }, - { - Name: "Missing Sql Restorable Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/restorableDroppedDatabases,000000000000000000", - Expected: nil, - }, - { - Name: "Sql Restorable Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/restorableDroppedDatabases/sqlDB1,000000000000000000", - Expected: &MsSqlRestorableDBId{ - Name: "sqlDB1", - MsSqlServer: "sqlServer1", - ResourceGroup: "resGroup1", - RestoreName: "000000000000000000", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/RestorableDroppedDatabases/sqlDB1,000000000000000000", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MssqlRestorableDBID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.RestoreName != v.Expected.RestoreName { - t.Fatalf("Expected %q but got %q for Restore Name", v.Expected.Name, actual.Name) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.MsSqlServer != v.Expected.MsSqlServer { - t.Fatalf("Expected %q but got %q for Sql Server", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestMssqlRecoverableDBID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MsSqlRecoverableDBId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Sql Server Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/", - Expected: nil, - }, - { - Name: "Missing Sql Recoverable Database", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1", - Expected: nil, - }, - { - Name: "Missing Sql Recoverable Database Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/recoverabledatabases", - Expected: nil, - }, - { - Name: "Sql Database ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/recoverabledatabases/sqlDB1", - Expected: &MsSqlRecoverableDBId{ - Name: "sqlDB1", - MsSqlServer: "sqlServer1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/Recoverabledatabases/sqlDB1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MssqlRecoverableDBID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.MsSqlServer != v.Expected.MsSqlServer { - t.Fatalf("Expected %q but got %q for Sql Server", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/mssql/parse/recoverable_database.go b/azurerm/internal/services/mssql/parse/recoverable_database.go new file mode 100644 index 000000000000..4a4fcf1d3a05 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/recoverable_database.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type RecoverableDatabaseId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewRecoverableDatabaseID(subscriptionId, resourceGroup, serverName, name string) RecoverableDatabaseId { + return RecoverableDatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id RecoverableDatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Recoverable Database", segmentsStr) +} + +func (id RecoverableDatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/recoverabledatabases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// RecoverableDatabaseID parses a RecoverableDatabase ID into an RecoverableDatabaseId struct +func RecoverableDatabaseID(input string) (*RecoverableDatabaseId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := RecoverableDatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("recoverabledatabases"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/recoverable_database_test.go b/azurerm/internal/services/mssql/parse/recoverable_database_test.go new file mode 100644 index 000000000000..adf8627d75e5 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/recoverable_database_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = RecoverableDatabaseId{} + +func TestRecoverableDatabaseIDFormatter(t *testing.T) { + actual := NewRecoverableDatabaseID("12345678-1234-9876-4563-123456789012", "group1", "server1", "database1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/recoverabledatabases/database1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestRecoverableDatabaseID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *RecoverableDatabaseId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/recoverabledatabases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/recoverabledatabases/database1", + Expected: &RecoverableDatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + Name: "database1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/RECOVERABLEDATABASES/DATABASE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := RecoverableDatabaseID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/restorable_dropped_database.go b/azurerm/internal/services/mssql/parse/restorable_dropped_database.go new file mode 100644 index 000000000000..5e4b2ce54578 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/restorable_dropped_database.go @@ -0,0 +1,48 @@ +package parse + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type RestorableDroppedDatabaseId struct { + Name string + MsSqlServer string + ResourceGroup string + RestoreName string +} + +func RestorableDroppedDatabaseID(input string) (*RestorableDroppedDatabaseId, error) { + inputList := strings.Split(input, ",") + + if len(inputList) != 2 { + return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Restorable DB ID %q, please refer to '/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/restorableDroppedDatabases/sqlDB1,000000000000000000'", input) + } + + restorableDBId := RestorableDroppedDatabaseId{ + RestoreName: inputList[1], + } + + id, err := azure.ParseAzureResourceID(inputList[0]) + if err != nil { + return nil, fmt.Errorf("[ERROR] Unable to parse Microsoft Sql Restorable DB ID %q: %+v", input, err) + } + + restorableDBId.ResourceGroup = id.ResourceGroup + + if restorableDBId.MsSqlServer, err = id.PopSegment("servers"); err != nil { + return nil, err + } + + if restorableDBId.Name, err = id.PopSegment("restorableDroppedDatabases"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(inputList[0]); err != nil { + return nil, err + } + + return &restorableDBId, nil +} diff --git a/azurerm/internal/services/mssql/parse/restorable_dropped_database_test.go b/azurerm/internal/services/mssql/parse/restorable_dropped_database_test.go new file mode 100644 index 000000000000..0d67fd8380c7 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/restorable_dropped_database_test.go @@ -0,0 +1,96 @@ +package parse + +import "testing" + +func TestMsSqlRestoreDBID(t *testing.T) { + testData := []struct { + Name string + Input string + Expected *RestorableDroppedDatabaseId + }{ + { + Name: "Empty", + Input: "", + Expected: nil, + }, + { + Name: "No Restore Name", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + Expected: nil, + }, + { + Name: "No Resource Groups Segment", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000,000000000000000000", + Expected: nil, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/,000000000000000000", + Expected: nil, + }, + { + Name: "Resource Group ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/,000000000000000000", + Expected: nil, + }, + { + Name: "Missing Sql Server Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/,000000000000000000", + Expected: nil, + }, + { + Name: "Missing Sql Restorable Database", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1,000000000000000000", + Expected: nil, + }, + { + Name: "Missing Sql Restorable Database Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/restorableDroppedDatabases,000000000000000000", + Expected: nil, + }, + { + Name: "Sql Restorable Database ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/restorableDroppedDatabases/sqlDB1,000000000000000000", + Expected: &RestorableDroppedDatabaseId{ + Name: "sqlDB1", + MsSqlServer: "sqlServer1", + ResourceGroup: "resGroup1", + RestoreName: "000000000000000000", + }, + }, + { + Name: "Wrong Casing", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Sql/servers/sqlServer1/RestorableDroppedDatabases/sqlDB1,000000000000000000", + Expected: nil, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Name) + + actual, err := RestorableDroppedDatabaseID(v.Input) + if err != nil { + if v.Expected == nil { + continue + } + + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.RestoreName != v.Expected.RestoreName { + t.Fatalf("Expected %q but got %q for Restore Name", v.Expected.Name, actual.Name) + } + + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + + if actual.MsSqlServer != v.Expected.MsSqlServer { + t.Fatalf("Expected %q but got %q for Sql Server", v.Expected.Name, actual.Name) + } + + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/server.go b/azurerm/internal/services/mssql/parse/server.go new file mode 100644 index 000000000000..c172dbb68570 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServerID(subscriptionId, resourceGroup, name string) ServerId { + return ServerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server", segmentsStr) +} + +func (id ServerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServerID parses a Server ID into an ServerId struct +func ServerID(input string) (*ServerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("servers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/server_extended_auditing_policy.go b/azurerm/internal/services/mssql/parse/server_extended_auditing_policy.go new file mode 100644 index 000000000000..d42ca3f66418 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_extended_auditing_policy.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerExtendedAuditingPolicyId struct { + SubscriptionId string + ResourceGroup string + ServerName string + ExtendedAuditingSettingName string +} + +func NewServerExtendedAuditingPolicyID(subscriptionId, resourceGroup, serverName, extendedAuditingSettingName string) ServerExtendedAuditingPolicyId { + return ServerExtendedAuditingPolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + ExtendedAuditingSettingName: extendedAuditingSettingName, + } +} + +func (id ServerExtendedAuditingPolicyId) String() string { + segments := []string{ + fmt.Sprintf("Extended Auditing Setting Name %q", id.ExtendedAuditingSettingName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server Extended Auditing Policy", segmentsStr) +} + +func (id ServerExtendedAuditingPolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/extendedAuditingSettings/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.ExtendedAuditingSettingName) +} + +// ServerExtendedAuditingPolicyID parses a ServerExtendedAuditingPolicy ID into an ServerExtendedAuditingPolicyId struct +func ServerExtendedAuditingPolicyID(input string) (*ServerExtendedAuditingPolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerExtendedAuditingPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.ExtendedAuditingSettingName, err = id.PopSegment("extendedAuditingSettings"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/server_extended_auditing_policy_test.go b/azurerm/internal/services/mssql/parse/server_extended_auditing_policy_test.go new file mode 100644 index 000000000000..e93c4f90f577 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_extended_auditing_policy_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerExtendedAuditingPolicyId{} + +func TestServerExtendedAuditingPolicyIDFormatter(t *testing.T) { + actual := NewServerExtendedAuditingPolicyID("12345678-1234-9876-4563-123456789012", "group1", "server1", "default").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/extendedAuditingSettings/default" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerExtendedAuditingPolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerExtendedAuditingPolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/extendedAuditingSettings/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/extendedAuditingSettings/default", + Expected: &ServerExtendedAuditingPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + ExtendedAuditingSettingName: "default", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/EXTENDEDAUDITINGSETTINGS/DEFAULT", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerExtendedAuditingPolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.ExtendedAuditingSettingName != v.Expected.ExtendedAuditingSettingName { + t.Fatalf("Expected %q but got %q for ExtendedAuditingSettingName", v.Expected.ExtendedAuditingSettingName, actual.ExtendedAuditingSettingName) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/server_security_alert_policy.go b/azurerm/internal/services/mssql/parse/server_security_alert_policy.go new file mode 100644 index 000000000000..3361760b6801 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_security_alert_policy.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerSecurityAlertPolicyId struct { + SubscriptionId string + ResourceGroup string + ServerName string + SecurityAlertPolicyName string +} + +func NewServerSecurityAlertPolicyID(subscriptionId, resourceGroup, serverName, securityAlertPolicyName string) ServerSecurityAlertPolicyId { + return ServerSecurityAlertPolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + SecurityAlertPolicyName: securityAlertPolicyName, + } +} + +func (id ServerSecurityAlertPolicyId) String() string { + segments := []string{ + fmt.Sprintf("Security Alert Policy Name %q", id.SecurityAlertPolicyName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server Security Alert Policy", segmentsStr) +} + +func (id ServerSecurityAlertPolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/securityAlertPolicies/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.SecurityAlertPolicyName) +} + +// ServerSecurityAlertPolicyID parses a ServerSecurityAlertPolicy ID into an ServerSecurityAlertPolicyId struct +func ServerSecurityAlertPolicyID(input string) (*ServerSecurityAlertPolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerSecurityAlertPolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.SecurityAlertPolicyName, err = id.PopSegment("securityAlertPolicies"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/server_security_alert_policy_test.go b/azurerm/internal/services/mssql/parse/server_security_alert_policy_test.go new file mode 100644 index 000000000000..231dc8a69381 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_security_alert_policy_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerSecurityAlertPolicyId{} + +func TestServerSecurityAlertPolicyIDFormatter(t *testing.T) { + actual := NewServerSecurityAlertPolicyID("12345678-1234-9876-4563-123456789012", "group1", "server1", "Default").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/securityAlertPolicies/Default" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerSecurityAlertPolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerSecurityAlertPolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing SecurityAlertPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for SecurityAlertPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/securityAlertPolicies/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/securityAlertPolicies/Default", + Expected: &ServerSecurityAlertPolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + SecurityAlertPolicyName: "Default", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/SECURITYALERTPOLICIES/DEFAULT", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerSecurityAlertPolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.SecurityAlertPolicyName != v.Expected.SecurityAlertPolicyName { + t.Fatalf("Expected %q but got %q for SecurityAlertPolicyName", v.Expected.SecurityAlertPolicyName, actual.SecurityAlertPolicyName) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/server_test.go b/azurerm/internal/services/mssql/parse/server_test.go new file mode 100644 index 000000000000..05542cb96025 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerId{} + +func TestServerIDFormatter(t *testing.T) { + actual := NewServerID("12345678-1234-9876-4563-123456789012", "group1", "server1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1", + Expected: &ServerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "server1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/server_vulnerability_assessment.go b/azurerm/internal/services/mssql/parse/server_vulnerability_assessment.go new file mode 100644 index 000000000000..177d3e1b0169 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_vulnerability_assessment.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerVulnerabilityAssessmentId struct { + SubscriptionId string + ResourceGroup string + ServerName string + VulnerabilityAssessmentName string +} + +func NewServerVulnerabilityAssessmentID(subscriptionId, resourceGroup, serverName, vulnerabilityAssessmentName string) ServerVulnerabilityAssessmentId { + return ServerVulnerabilityAssessmentId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + VulnerabilityAssessmentName: vulnerabilityAssessmentName, + } +} + +func (id ServerVulnerabilityAssessmentId) String() string { + segments := []string{ + fmt.Sprintf("Vulnerability Assessment Name %q", id.VulnerabilityAssessmentName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server Vulnerability Assessment", segmentsStr) +} + +func (id ServerVulnerabilityAssessmentId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Sql/servers/%s/vulnerabilityAssessments/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.VulnerabilityAssessmentName) +} + +// ServerVulnerabilityAssessmentID parses a ServerVulnerabilityAssessment ID into an ServerVulnerabilityAssessmentId struct +func ServerVulnerabilityAssessmentID(input string) (*ServerVulnerabilityAssessmentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerVulnerabilityAssessmentId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.VulnerabilityAssessmentName, err = id.PopSegment("vulnerabilityAssessments"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/server_vulnerability_assessment_test.go b/azurerm/internal/services/mssql/parse/server_vulnerability_assessment_test.go new file mode 100644 index 000000000000..00d079c2a3a5 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/server_vulnerability_assessment_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerVulnerabilityAssessmentId{} + +func TestServerVulnerabilityAssessmentIDFormatter(t *testing.T) { + actual := NewServerVulnerabilityAssessmentID("12345678-1234-9876-4563-123456789012", "group1", "server1", "default").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/vulnerabilityAssessments/default" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerVulnerabilityAssessmentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerVulnerabilityAssessmentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Error: true, + }, + + { + // missing VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Error: true, + }, + + { + // missing value for VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/vulnerabilityAssessments/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/vulnerabilityAssessments/default", + Expected: &ServerVulnerabilityAssessmentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + ServerName: "server1", + VulnerabilityAssessmentName: "default", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/VULNERABILITYASSESSMENTS/DEFAULT", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerVulnerabilityAssessmentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.VulnerabilityAssessmentName != v.Expected.VulnerabilityAssessmentName { + t.Fatalf("Expected %q but got %q for VulnerabilityAssessmentName", v.Expected.VulnerabilityAssessmentName, actual.VulnerabilityAssessmentName) + } + } +} diff --git a/azurerm/internal/services/mssql/parse/sql_virtual_machine.go b/azurerm/internal/services/mssql/parse/sql_virtual_machine.go new file mode 100644 index 000000000000..9e9c6f9da383 --- /dev/null +++ b/azurerm/internal/services/mssql/parse/sql_virtual_machine.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SqlVirtualMachineId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewSqlVirtualMachineID(subscriptionId, resourceGroup, name string) SqlVirtualMachineId { + return SqlVirtualMachineId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id SqlVirtualMachineId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Sql Virtual Machine", segmentsStr) +} + +func (id SqlVirtualMachineId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// SqlVirtualMachineID parses a SqlVirtualMachine ID into an SqlVirtualMachineId struct +func SqlVirtualMachineID(input string) (*SqlVirtualMachineId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SqlVirtualMachineId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("sqlVirtualMachines"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mssql/parse/sql_virtual_machine_test.go b/azurerm/internal/services/mssql/parse/sql_virtual_machine_test.go new file mode 100644 index 000000000000..645541a2a42d --- /dev/null +++ b/azurerm/internal/services/mssql/parse/sql_virtual_machine_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SqlVirtualMachineId{} + +func TestSqlVirtualMachineIDFormatter(t *testing.T) { + actual := NewSqlVirtualMachineID("12345678-1234-9876-4563-123456789012", "group1", "virtualMachine1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/virtualMachine1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSqlVirtualMachineID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SqlVirtualMachineId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/virtualMachine1", + Expected: &SqlVirtualMachineId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "virtualMachine1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQLVIRTUALMACHINE/SQLVIRTUALMACHINES/VIRTUALMACHINE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SqlVirtualMachineID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mssql/registration.go b/azurerm/internal/services/mssql/registration.go index f7baed0e0e4b..2d18acf80ed3 100644 --- a/azurerm/internal/services/mssql/registration.go +++ b/azurerm/internal/services/mssql/registration.go @@ -21,8 +21,8 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_mssql_database": dataSourceArmMsSqlDatabase(), - "azurerm_mssql_elasticpool": dataSourceArmMsSqlElasticpool(), + "azurerm_mssql_database": dataSourceMsSqlDatabase(), + "azurerm_mssql_elasticpool": dataSourceMsSqlElasticpool(), "azurerm_mssql_server": dataSourceMsSqlServer(), } } @@ -30,14 +30,14 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_mssql_database": resourceArmMsSqlDatabase(), - "azurerm_mssql_database_extended_auditing_policy": resourceArmMsSqlDatabaseExtendedAuditingPolicy(), - "azurerm_mssql_database_vulnerability_assessment_rule_baseline": resourceArmMssqlDatabaseVulnerabilityAssessmentRuleBaseline(), - "azurerm_mssql_elasticpool": resourceArmMsSqlElasticPool(), - "azurerm_mssql_server": resourceArmMsSqlServer(), - "azurerm_mssql_server_extended_auditing_policy": resourceArmMsSqlServerExtendedAuditingPolicy(), - "azurerm_mssql_server_security_alert_policy": resourceArmMssqlServerSecurityAlertPolicy(), - "azurerm_mssql_server_vulnerability_assessment": resourceArmMssqlServerVulnerabilityAssessment(), - "azurerm_mssql_virtual_machine": resourceArmMsSqlVirtualMachine(), + "azurerm_mssql_database": resourceMsSqlDatabase(), + "azurerm_mssql_database_extended_auditing_policy": resourceMsSqlDatabaseExtendedAuditingPolicy(), + "azurerm_mssql_database_vulnerability_assessment_rule_baseline": resourceMsSqlDatabaseVulnerabilityAssessmentRuleBaseline(), + "azurerm_mssql_elasticpool": resourceMsSqlElasticPool(), + "azurerm_mssql_server": resourceMsSqlServer(), + "azurerm_mssql_server_extended_auditing_policy": resourceMsSqlServerExtendedAuditingPolicy(), + "azurerm_mssql_server_security_alert_policy": resourceMsSqlServerSecurityAlertPolicy(), + "azurerm_mssql_server_vulnerability_assessment": resourceMsSqlServerVulnerabilityAssessment(), + "azurerm_mssql_virtual_machine": resourceMsSqlVirtualMachine(), } } diff --git a/azurerm/internal/services/mssql/resourceids.go b/azurerm/internal/services/mssql/resourceids.go new file mode 100644 index 000000000000..81f03ada302f --- /dev/null +++ b/azurerm/internal/services/mssql/resourceids.go @@ -0,0 +1,12 @@ +package mssql + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Database -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DatabaseExtendedAuditingPolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/extendedAuditingSettings/default +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=DatabaseVulnerabilityAssessmentRuleBaseline -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/baselines/baseline1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ElasticPool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/elasticPools/pool1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=RecoverableDatabase -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/recoverabledatabases/database1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Server -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ServerExtendedAuditingPolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/extendedAuditingSettings/default +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ServerSecurityAlertPolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/securityAlertPolicies/Default +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ServerVulnerabilityAssessment -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/vulnerabilityAssessments/default +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SqlVirtualMachine -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/virtualMachine1 diff --git a/azurerm/internal/services/mssql/tests/mssql_database_data_source_test.go b/azurerm/internal/services/mssql/tests/mssql_database_data_source_test.go deleted file mode 100644 index dbb90fa44392..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_database_data_source_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMsSqlDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMsSqlDatabase_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMsSqlDatabase_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "SQL_AltDiction_CP850_CI_AI"), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "BasePrice"), - resource.TestCheckResourceAttr(data.ResourceName, "max_size_gb", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "Test"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMsSqlDatabase_basic(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_mssql_database" "test" { - name = azurerm_mssql_database.test.name - server_id = azurerm_sql_server.test.id -} - -`, template) -} - -func testAccDataSourceAzureRMMsSqlDatabase_complete(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_mssql_database" "test" { - name = azurerm_mssql_database.test.name - server_id = azurerm_sql_server.test.id -} - -`, template) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_database_extended_auditing_policy_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_database_extended_auditing_policy_resource_test.go deleted file mode 100644 index 1a181359d1fd..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_database_extended_auditing_policy_resource_test.go +++ /dev/null @@ -1,353 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_requiresImport), - }, - }) -} - -func TestAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_storageAccBehindFireWall(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_storageAccountBehindFireWall(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.MssqlDatabaseExtendedAuditingPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.MsDBName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("MsSql Database ExtendedAuditingPolicy %q (resource group: %q) does not exist", id.MsDBName, id.ResourceGroup) - } - - return fmt.Errorf("Get on MsSql Database ExtendedAuditingPolicy Client: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMsSqlDatabaseExtendedAuditingPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.DatabaseExtendedBlobAuditingPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_database_extended_auditing_policy" { - continue - } - - id, err := parse.MssqlDatabaseExtendedAuditingPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.MsDBName); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Get on MsSql Database ExtendedAuditingPolicy Client: %+v", err) - } - - if resp.ExtendedDatabaseBlobAuditingPolicyProperties != nil && resp.ExtendedDatabaseBlobAuditingPolicyProperties.State == sql.BlobAuditingPolicyStateEnabled { - return fmt.Errorf("`azurerm_mssql_database_extended_auditing_policy` is still enabled") - } - } - return nil - } - - return nil -} - -func testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "AdminPassword123!" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[1]d" - server_id = azurerm_mssql_server.test.id -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_basic(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database_extended_auditing_policy" "test" { - database_id = azurerm_mssql_database.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key -} -`, template) -} - -func testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database_extended_auditing_policy" "import" { - database_id = azurerm_mssql_database.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key -} -`, template) -} - -func testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_complete(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database_extended_auditing_policy" "test" { - database_id = azurerm_mssql_database.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - storage_account_access_key_is_secondary = false - retention_in_days = 6 -} -`, template) -} - -func testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_update(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test2" { - name = "unlikely23exst2acc2%[2]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database_extended_auditing_policy" "test" { - database_id = azurerm_mssql_database.test.id - storage_endpoint = azurerm_storage_account.test2.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test2.primary_access_key - storage_account_access_key_is_secondary = true - retention_in_days = 3 -} -`, template, data.RandomString) -} - -func testAccAzureRMMsSqlDatabaseExtendedAuditingPolicy_storageAccountBehindFireWall(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "AdminPassword123!" - identity { - type = "SystemAssigned" - } -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[1]d" - server_id = azurerm_mssql_server.test.id -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" - service_endpoints = ["Microsoft.Storage"] -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" - - network_rules { - default_action = "Deny" - ip_rules = ["127.0.0.1"] - virtual_network_subnet_ids = [azurerm_subnet.test.id] - } -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_storage_account.test.id - role_definition_name = "Storage Blob Data Contributor" - principal_id = azurerm_mssql_server.test.identity.0.principal_id -} - -resource "azurerm_mssql_database_extended_auditing_policy" "test" { - database_id = azurerm_mssql_database.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_database_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_database_resource_test.go deleted file mode 100644 index a03f3811dfa0..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_database_resource_test.go +++ /dev/null @@ -1,1312 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMsSqlDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMsSqlDatabase_requiresImport), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "SQL_AltDiction_CP850_CI_AI"), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "BasePrice"), - resource.TestCheckResourceAttr(data.ResourceName, "max_size_gb", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "Test"), - ), - }, - data.ImportStep("sample_name"), - { - Config: testAccAzureRMMsSqlDatabase_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "LicenseIncluded"), - resource.TestCheckResourceAttr(data.ResourceName, "max_size_gb", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "Staging"), - ), - }, - data.ImportStep("sample_name"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_elasticPool(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_elasticPool(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "elastic_pool_id"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "ElasticPool"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_elasticPoolDisassociation(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_GP(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_GP(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_GP_Serverless(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_GPServerless(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_pause_delay_in_minutes", "70"), - resource.TestCheckResourceAttr(data.ResourceName, "min_capacity", "0.75"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_S_Gen5_2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_GPServerlessUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_pause_delay_in_minutes", "90"), - resource.TestCheckResourceAttr(data.ResourceName, "min_capacity", "1.25"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_S_Gen5_2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_BC(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_BC(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_scale", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "BC_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "zone_redundant", "true"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_BCUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_scale", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "BC_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "zone_redundant", "false"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_HS(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_HS(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_replica_count", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "HS_Gen5_2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_HSUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "read_replica_count", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "HS_Gen5_2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_createCopyMode(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "copy") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_createCopyMode(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "SQL_AltDiction_CP850_CI_AI"), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "BasePrice"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - ), - }, - data.ImportStep("create_mode", "creation_source_database_id"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_createPITRMode(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - - { - PreConfig: func() { time.Sleep(7 * time.Minute) }, - Config: testAccAzureRMMsSqlDatabase_createPITRMode(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists("azurerm_mssql_database.pitr"), - ), - }, - - data.ImportStep("create_mode", "creation_source_database_id", "restore_point_in_time"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_createSecondaryMode(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "secondary") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_createSecondaryMode(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "SQL_AltDiction_CP850_CI_AI"), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "BasePrice"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - ), - }, - data.ImportStep("create_mode", "creation_source_database_id", "sample_name"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_createRestoreMode(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_createRestoreMode(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep("create_mode", "creation_source_database_id"), - - { - PreConfig: func() { time.Sleep(8 * time.Minute) }, - Config: testAccAzureRMMsSqlDatabase_createRestoreModeDBDeleted(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - - data.ImportStep(), - - { - PreConfig: func() { time.Sleep(8 * time.Minute) }, - Config: testAccAzureRMMsSqlDatabase_createRestoreModeDBRestored(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - testCheckAzureRMMsSqlDatabaseExists("azurerm_mssql_database.restore"), - ), - }, - - data.ImportStep("create_mode", "restore_dropped_database_id"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_threatDetectionPolicy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_threatDetectionPolicy(data, "Enabled"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.retention_days", "15"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.disabled_alerts.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.email_account_admins", "Enabled"), - ), - }, - data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlDatabase_threatDetectionPolicy(data, "Disabled"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.state", "Disabled"), - ), - }, - data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_withBlobAuditingPolices(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_withBlobAuditingPolices(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep("extended_auditing_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlDatabase_withBlobAuditingPolicesUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep("extended_auditing_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlDatabase_withBlobAuditingPolicesDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_updateSku(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_updateSku(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_updateSku2(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_minCapacity0(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_minCapacity0(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_withLongTermRetentionPolicy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_withLongTermRetentionPolicy(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_withLongTermRetentionPolicyUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlDatabase_withShortTermRetentionPolicy(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_withShortTermRetentionPolicy(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlDatabase_withShortTermRetentionPolicyUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlDatabaseExists(data.ResourceName), - ), - }, - }, - }) -} - -func testCheckAzureRMMsSqlDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.MsSqlDatabaseID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("MsSql Database %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) - } - - return fmt.Errorf("Get on MsSql Database Client: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMsSqlDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_database" { - continue - } - - id, err := parse.MsSqlDatabaseID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer, id.Name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Get on MsSql Database Client: %+v", err) - } - } - return nil - } - - return nil -} - -func testAccAzureRMMsSqlDatabase_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_sql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMMsSqlDatabase_basic(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "import" { - name = azurerm_mssql_database.test.name - server_id = azurerm_sql_server.test.id -} -`, template) -} - -func testAccAzureRMMsSqlDatabase_complete(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[2]d" - server_id = azurerm_sql_server.test.id - collation = "SQL_AltDiction_CP850_CI_AI" - license_type = "BasePrice" - max_size_gb = 1 - sample_name = "AdventureWorksLT" - sku_name = "GP_Gen5_2" - - tags = { - ENV = "Test" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_update(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[2]d" - server_id = azurerm_sql_server.test.id - collation = "SQL_AltDiction_CP850_CI_AI" - license_type = "LicenseIncluded" - max_size_gb = 2 - sku_name = "GP_Gen5_2" - - tags = { - ENV = "Staging" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_elasticPool(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_gb = 5 - - sku { - name = "GP_Gen5" - tier = "GeneralPurpose" - capacity = 4 - family = "Gen5" - } - - per_database_settings { - min_capacity = 0.25 - max_capacity = 4 - } -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[2]d" - server_id = azurerm_sql_server.test.id - elastic_pool_id = azurerm_mssql_elasticpool.test.id - sku_name = "ElasticPool" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_elasticPoolDisassociation(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_gb = 5 - - sku { - name = "GP_Gen5" - tier = "GeneralPurpose" - capacity = 4 - family = "Gen5" - } - - per_database_settings { - min_capacity = 0.25 - max_capacity = 4 - } -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[2]d" - server_id = azurerm_sql_server.test.id - sku_name = "GP_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_GP(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - sku_name = "GP_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_GPServerless(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - auto_pause_delay_in_minutes = 70 - min_capacity = 0.75 - sku_name = "GP_S_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_GPServerlessUpdate(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - auto_pause_delay_in_minutes = 90 - min_capacity = 1.25 - sku_name = "GP_S_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_HS(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - read_replica_count = 2 - sku_name = "HS_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_HSUpdate(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - read_replica_count = 4 - sku_name = "HS_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_BC(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - read_scale = true - sku_name = "BC_Gen5_2" - zone_redundant = true -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_BCUpdate(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - read_scale = false - sku_name = "BC_Gen5_2" - zone_redundant = false -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_createCopyMode(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_complete(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "copy" { - name = "acctest-dbc-%d" - server_id = azurerm_sql_server.test.id - create_mode = "Copy" - creation_source_database_id = azurerm_mssql_database.test.id -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_createPITRMode(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "pitr" { - name = "acctest-dbp-%d" - server_id = azurerm_sql_server.test.id - create_mode = "PointInTimeRestore" - restore_point_in_time = "%s" - creation_source_database_id = azurerm_mssql_database.test.id - -} -`, template, data.RandomInteger, time.Now().Add(time.Duration(7)*time.Minute).UTC().Format(time.RFC3339)) -} - -func testAccAzureRMMsSqlDatabase_createSecondaryMode(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_complete(data) - return fmt.Sprintf(` -%s - -resource "azurerm_resource_group" "second" { - name = "acctestRG-mssql2-%[2]d" - location = "%[3]s" -} - -resource "azurerm_sql_server" "second" { - name = "acctest-sqlserver2-%[2]d" - resource_group_name = azurerm_resource_group.second.name - location = azurerm_resource_group.second.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - -resource "azurerm_mssql_database" "secondary" { - name = "acctest-dbs-%[2]d" - server_id = azurerm_sql_server.second.id - create_mode = "Secondary" - creation_source_database_id = azurerm_mssql_database.test.id - -} -`, template, data.RandomInteger, data.Locations.Secondary) -} - -func testAccAzureRMMsSqlDatabase_createRestoreMode(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[1]d" - server_id = azurerm_mssql_server.test.id -} - -resource "azurerm_mssql_database" "copy" { - name = "acctest-dbc-%[1]d" - server_id = azurerm_mssql_server.test.id - create_mode = "Copy" - creation_source_database_id = azurerm_mssql_database.test.id -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMMsSqlDatabase_createRestoreModeDBDeleted(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[1]d" - server_id = azurerm_mssql_server.test.id -} - -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMMsSqlDatabase_createRestoreModeDBRestored(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[1]d" - server_id = azurerm_mssql_server.test.id -} - -resource "azurerm_mssql_database" "restore" { - name = "acctest-dbr-%[1]d" - server_id = azurerm_mssql_server.test.id - create_mode = "Restore" - restore_dropped_database_id = azurerm_mssql_server.test.restorable_dropped_database_ids[0] -} - -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMMsSqlDatabase_threatDetectionPolicy(data acceptance.TestData, state string) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%[1]s - -resource "azurerm_storage_account" "test" { - name = "test%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[2]d" - server_id = azurerm_sql_server.test.id - collation = "SQL_AltDiction_CP850_CI_AI" - license_type = "BasePrice" - max_size_gb = 1 - sample_name = "AdventureWorksLT" - sku_name = "GP_Gen5_2" - - threat_detection_policy { - retention_days = 15 - state = "%[3]s" - disabled_alerts = ["Sql_Injection"] - email_account_admins = "Enabled" - storage_account_access_key = azurerm_storage_account.test.primary_access_key - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - use_server_default = "Disabled" - } - - tags = { - ENV = "Test" - } -} -`, template, data.RandomInteger, state) -} - -func testAccAzureRMMsSqlDatabase_withBlobAuditingPolices(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - extended_auditing_policy { - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - storage_account_access_key_is_secondary = true - retention_in_days = 6 - } -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_withBlobAuditingPolicesUpdated(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - extended_auditing_policy { - storage_endpoint = azurerm_storage_account.test2.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test2.primary_access_key - storage_account_access_key_is_secondary = false - retention_in_days = 3 - } -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_withBlobAuditingPolicesDisabled(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - extended_auditing_policy = [] -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_updateSku(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - sku_name = "HS_Gen5_2" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_updateSku2(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - sku_name = "HS_Gen5_4" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_minCapacity0(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%d" - server_id = azurerm_sql_server.test.id - - min_capacity = 0 -} -`, template, data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_withLongTermRetentionPolicy(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - long_term_retention_policy { - weekly_retention = "P1W" - monthly_retention = "P1M" - yearly_retention = "P1Y" - week_of_year = 1 - } -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_withLongTermRetentionPolicyUpdated(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - long_term_retention_policy { - weekly_retention = "P1W" - yearly_retention = "P1Y" - week_of_year = 2 - } -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_withShortTermRetentionPolicy(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - short_term_retention_policy { - retention_days = 8 - } -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} - -func testAccAzureRMMsSqlDatabase_withShortTermRetentionPolicyUpdated(data acceptance.TestData) string { - template := testAccAzureRMMsSqlDatabase_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test" { - name = "acctest%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_account" "test2" { - name = "acctest2%[2]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_database" "test" { - name = "acctest-db-%[3]d" - server_id = azurerm_sql_server.test.id - short_term_retention_policy { - retention_days = 10 - } -} -`, template, data.RandomIntOfLength(15), data.RandomInteger) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_database_vulnerability_assessment_rule_baseline_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_database_vulnerability_assessment_rule_baseline_resource_test.go deleted file mode 100644 index 25c659be03a0..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_database_vulnerability_assessment_rule_baseline_resource_test.go +++ /dev/null @@ -1,277 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_vulnerability_assessment_rule_baseline", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "rule_id", "VA2002"), - resource.TestCheckResourceAttr(data.ResourceName, "baseline_name", "default"), - resource.TestCheckResourceAttr(data.ResourceName, "baseline_result.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_primary(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_database_vulnerability_assessment_rule_baseline", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_primary(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "rule_id", "VA2065"), - resource.TestCheckResourceAttr(data.ResourceName, "baseline_name", "master"), - resource.TestCheckResourceAttr(data.ResourceName, "baseline_result.#", "1"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMDatabaseVulnerabilityAssessmentRuleBaseline_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "rule_id", "VA2065"), - resource.TestCheckResourceAttr(data.ResourceName, "baseline_name", "master"), - resource.TestCheckResourceAttr(data.ResourceName, "baseline_result.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.DatabaseVulnerabilityAssessmentRuleBaselinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("resource not found: %s", resourceName) - } - - id, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - databaseName := rs.Primary.Attributes["database_name"] - ruleId := rs.Primary.Attributes["rule_id"] - baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(rs.Primary.Attributes["baseline_name"]) - - resp, err := client.Get(ctx, resourceGroupName, serverName, databaseName, ruleId, baselineName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("baseline was not found for "+ - "resource group %q, sql server %q, database %q, rule id %q, baseline name %q", - resourceGroupName, serverName, databaseName, ruleId, baselineName) - } - - return err - } - - return nil - } -} - -func testCheckAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaselineDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.DatabaseVulnerabilityAssessmentRuleBaselinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_database_vulnerability_assessment_rule_baseline" { - continue - } - - id, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - databaseName := rs.Primary.Attributes["database_name"] - ruleId := rs.Primary.Attributes["rule_id"] - baselineName := sql.VulnerabilityAssessmentPolicyBaselineName(rs.Primary.Attributes["baseline_name"]) - - resp, err := client.Get(ctx, resourceGroupName, serverName, databaseName, ruleId, baselineName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Database Vulnerability Assessment Rule Baseline still exists:\n%#v", resp.DatabaseVulnerabilityAssessmentRuleBaselineProperties) - } - } - - return nil -} - -func testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_basic(data acceptance.TestData) string { - database := testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_database(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database_vulnerability_assessment_rule_baseline" "test" { - server_vulnerability_assessment_id = azurerm_mssql_server_vulnerability_assessment.test.id - database_name = azurerm_sql_database.test.name - rule_id = "VA2002" - baseline_name = "default" - baseline_result { - result = [ - "SCHEMA", - "dbo", - "CONTROL", - "SQL_USER", - "adminuser1" - ] - } - baseline_result { - result = [ - "SCHEMA", - "dbo", - "CONTROL", - "SQL_USER", - "adminuser2" - ] - } -} -`, database) -} - -func testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_primary(data acceptance.TestData) string { - database := testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_database(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database_vulnerability_assessment_rule_baseline" "test" { - server_vulnerability_assessment_id = azurerm_mssql_server_vulnerability_assessment.test.id - database_name = azurerm_sql_database.test.name - rule_id = "VA2065" - baseline_name = "master" - baseline_result { - result = [ - "clientip", - "255.255.255.255", - "255.255.255.255" - ] - } -} -`, database) -} - -func testAccAzureRMDatabaseVulnerabilityAssessmentRuleBaseline_update(data acceptance.TestData) string { - database := testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_database(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_database_vulnerability_assessment_rule_baseline" "test" { - server_vulnerability_assessment_id = azurerm_mssql_server_vulnerability_assessment.test.id - database_name = azurerm_sql_database.test.name - rule_id = "VA2065" - baseline_name = "master" - baseline_result { - result = [ - "clientips", - "255.255.255.255", - "255.255.255.255" - ] - } - baseline_result { - result = [ - "myip", - "255.255.255.0", - "255.255.255.0" - ] - } -} -`, database) -} - -func testAccAzureRMMssqlDatabaseVulnerabilityAssessmentRuleBaseline_database(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctestsqlserver%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - -resource "azurerm_storage_account" "test" { - name = "accsa%d" - resource_group_name = azurerm_resource_group.test.name - location = "%s" - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctestsc%d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_mssql_server_security_alert_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - state = "Enabled" -} - -resource "azurerm_mssql_server_vulnerability_assessment" "test" { - server_security_alert_policy_id = azurerm_mssql_server_security_alert_policy.test.id - storage_container_path = "${azurerm_storage_account.test.primary_blob_endpoint}${azurerm_storage_container.test.name}/" - storage_account_access_key = azurerm_storage_account.test.primary_access_key -} - -resource "azurerm_sql_database" "test" { - name = "acctestdb%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - location = azurerm_resource_group.test.location - edition = "Standard" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_elasticpool_data_source_test.go b/azurerm/internal/services/mssql/tests/mssql_elasticpool_data_source_test.go deleted file mode 100644 index e22e8ad3218e..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_elasticpool_data_source_test.go +++ /dev/null @@ -1,104 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMsSqlElasticPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMsSqlElasticPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "server_name"), - resource.TestCheckResourceAttr(data.ResourceName, "location", data.Locations.Primary), - resource.TestCheckResourceAttr(data.ResourceName, "max_size_gb", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "per_db_min_capacity", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "per_db_max_capacity", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "zone_redundant", "false"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMsSqlElasticPool_licenseType(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMsSqlElasticPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "LicenseIncluded"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMsSqlElasticPool_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctest%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "4dm1n157r470r" - administrator_login_password = "4-v3ry-53cr37-p455w0rd" -} - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-dtu-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_gb = 50 - zone_redundant = false - - sku { - name = "GP_Gen5" - tier = "GeneralPurpose" - capacity = 4 - family = "Gen5" - } - - per_database_settings { - min_capacity = 0 - max_capacity = 4 - } -} - -data "azurerm_mssql_elasticpool" "test" { - name = azurerm_mssql_elasticpool.test.name - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_elasticpool_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_elasticpool_resource_test.go deleted file mode 100644 index 2c7b5a1e3ad5..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_elasticpool_resource_test.go +++ /dev/null @@ -1,572 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -// TODO: add import tests -func TestAccAzureRMMsSqlElasticPool_basic_DTU(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_basic_DTU(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "BasicPool"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "5"), - resource.TestCheckResourceAttrSet(data.ResourceName, "max_size_gb"), - resource.TestCheckResourceAttrSet(data.ResourceName, "zone_redundant"), - ), - }, - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_basic_DTU(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMsSqlElasticPool_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mssql_elasticpool"), - }, - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_standard_DTU(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_standard_DTU(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "StandardPool"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "50"), - resource.TestCheckResourceAttrSet(data.ResourceName, "max_size_gb"), - resource.TestCheckResourceAttrSet(data.ResourceName, "zone_redundant"), - ), - }, - data.ImportStep("max_size_gb"), - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_premium_DTU_zone_redundant(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_premium_DTU_zone_redundant(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "PremiumPool"), - resource.TestCheckResourceAttr(data.ResourceName, "zone_redundant", "true"), - ), - }, - data.ImportStep("max_size_gb"), - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_basic_vCore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_basic_vCore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "GP_Gen5"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0.25"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "4"), - resource.TestCheckResourceAttrSet(data.ResourceName, "max_size_gb"), - resource.TestCheckResourceAttrSet(data.ResourceName, "zone_redundant"), - ), - }, - data.ImportStep("max_size_gb"), - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_basic_vCore_MaxSizeBytes(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_basic_vCore_MaxSizeBytes(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "GP_Gen5"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0.25"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "max_size_bytes", "214748364800"), - resource.TestCheckResourceAttrSet(data.ResourceName, "max_size_gb"), - resource.TestCheckResourceAttrSet(data.ResourceName, "zone_redundant"), - ), - }, - data.ImportStep("max_size_gb"), - }, - }) -} -func TestAccAzureRMMsSqlElasticPool_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_standard_DTU(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "StandardPool"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "50"), - testCheckAzureRMMsSqlElasticPoolDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_resize_DTU(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_standard_DTU(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "StandardPool"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "50"), - ), - }, - { - Config: testAccAzureRMMsSqlElasticPool_resize_DTU(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "StandardPool"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "50"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "100"), - ), - }, - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_resize_vCore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_basic_vCore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "GP_Gen5"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0.25"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "4"), - ), - }, - { - Config: testAccAzureRMMsSqlElasticPool_resize_vCore(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "GP_Gen5"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.capacity", "8"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.min_capacity", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "per_database_settings.0.max_capacity", "8"), - ), - }, - }, - }) -} - -func TestAccAzureRMMsSqlElasticPool_licenseType(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_elasticpool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlElasticPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlElasticPool_licenseType_Template(data, "LicenseIncluded"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "LicenseIncluded"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlElasticPool_licenseType_Template(data, "BasePrice"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlElasticPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "license_type", "BasePrice"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMMsSqlElasticPoolExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ElasticPoolsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - poolName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, poolName) - if err != nil { - return fmt.Errorf("Bad: Get on msSqlElasticPoolsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: MsSql Elastic Pool %q on server: %q (resource group: %q) does not exist", poolName, serverName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMMsSqlElasticPoolDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ElasticPoolsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_elasticpool" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - poolName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, poolName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("MsSql Elastic Pool still exists:\n%#v", resp.ElasticPoolProperties) - } - } - - return nil -} - -func testCheckAzureRMMsSqlElasticPoolDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ElasticPoolsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - poolName := rs.Primary.Attributes["name"] - - if _, err := client.Delete(ctx, resourceGroup, serverName, poolName); err != nil { - return fmt.Errorf("Bad: Delete on msSqlElasticPoolsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMMsSqlElasticPool_basic_DTU(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_DTU_Template(data, "BasicPool", "Basic", 50, 4.8828125, 0, 5, false) -} - -func testAccAzureRMMsSqlElasticPool_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMsSqlElasticPool_DTU_Template(data, "BasicPool", "Basic", 50, 4.8828125, 0, 5, false) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_elasticpool" "import" { - name = azurerm_mssql_elasticpool.test.name - resource_group_name = azurerm_mssql_elasticpool.test.resource_group_name - location = azurerm_mssql_elasticpool.test.location - server_name = azurerm_mssql_elasticpool.test.server_name - max_size_gb = 4.8828125 - - sku { - name = "BasicPool" - tier = "Basic" - capacity = 50 - } - - per_database_settings { - min_capacity = 0 - max_capacity = 5 - } -} -`, template) -} - -func testAccAzureRMMsSqlElasticPool_premium_DTU_zone_redundant(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_DTU_Template(data, "PremiumPool", "Premium", 125, 50, 0, 50, true) -} - -func testAccAzureRMMsSqlElasticPool_standard_DTU(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_DTU_Template(data, "StandardPool", "Standard", 50, 50, 0, 50, false) -} - -func testAccAzureRMMsSqlElasticPool_resize_DTU(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_DTU_Template(data, "StandardPool", "Standard", 100, 100, 50, 100, false) -} - -func testAccAzureRMMsSqlElasticPool_basic_vCore(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_vCore_Template(data, "GP_Gen5", "GeneralPurpose", 4, "Gen5", 0.25, 4) -} - -func testAccAzureRMMsSqlElasticPool_basic_vCore_MaxSizeBytes(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_vCore_MaxSizeBytes_Template(data, "GP_Gen5", "GeneralPurpose", 4, "Gen5", 0.25, 4) -} - -func testAccAzureRMMsSqlElasticPool_resize_vCore(data acceptance.TestData) string { - return testAccAzureRMMsSqlElasticPool_vCore_Template(data, "GP_Gen5", "GeneralPurpose", 8, "Gen5", 0, 8) -} - -func testAccAzureRMMsSqlElasticPool_vCore_Template(data acceptance.TestData, skuName string, skuTier string, skuCapacity int, skuFamily string, databaseSettingsMin float64, databaseSettingsMax float64) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctest%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "4dm1n157r470r" - administrator_login_password = "4-v3ry-53cr37-p455w0rd" -} - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-vcore-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_gb = 5 - - sku { - name = "%[3]s" - tier = "%[4]s" - capacity = %[5]d - family = "%[6]s" - } - - per_database_settings { - min_capacity = %.2[7]f - max_capacity = %.2[8]f - } -} -`, data.RandomInteger, data.Locations.Primary, skuName, skuTier, skuCapacity, skuFamily, databaseSettingsMin, databaseSettingsMax) -} - -func testAccAzureRMMsSqlElasticPool_vCore_MaxSizeBytes_Template(data acceptance.TestData, skuName string, skuTier string, skuCapacity int, skuFamily string, databaseSettingsMin float64, databaseSettingsMax float64) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctest%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "4dm1n157r470r" - administrator_login_password = "4-v3ry-53cr37-p455w0rd" -} - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-vcore-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_bytes = 214748364800 - - sku { - name = "%[3]s" - tier = "%[4]s" - capacity = %[5]d - family = "%[6]s" - } - - per_database_settings { - min_capacity = %.2[7]f - max_capacity = %.2[8]f - } -} -`, data.RandomInteger, data.Locations.Primary, skuName, skuTier, skuCapacity, skuFamily, databaseSettingsMin, databaseSettingsMax) -} - -func testAccAzureRMMsSqlElasticPool_DTU_Template(data acceptance.TestData, skuName string, skuTier string, skuCapacity int, maxSizeGB float64, databaseSettingsMin int, databaseSettingsMax int, zoneRedundant bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctest%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "4dm1n157r470r" - administrator_login_password = "4-v3ry-53cr37-p455w0rd" -} - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-dtu-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_gb = %.7[6]f - zone_redundant = %[9]t - - sku { - name = "%[3]s" - tier = "%[4]s" - capacity = %[5]d - } - - per_database_settings { - min_capacity = %[7]d - max_capacity = %[8]d - } -} -`, data.RandomInteger, data.Locations.Primary, skuName, skuTier, skuCapacity, maxSizeGB, databaseSettingsMin, databaseSettingsMax, zoneRedundant) -} - -func testAccAzureRMMsSqlElasticPool_licenseType_Template(data acceptance.TestData, licenseType string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctest%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "4dm1n157r470r" - administrator_login_password = "4-v3ry-53cr37-p455w0rd" -} - -resource "azurerm_mssql_elasticpool" "test" { - name = "acctest-pool-dtu-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - server_name = azurerm_sql_server.test.name - max_size_gb = 50 - zone_redundant = false - license_type = "%[3]s" - - sku { - name = "GP_Gen5" - tier = "GeneralPurpose" - capacity = 4 - family = "Gen5" - } - - per_database_settings { - min_capacity = 0 - max_capacity = 4 - } - -} -`, data.RandomInteger, data.Locations.Primary, licenseType) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_server_data_source_test.go b/azurerm/internal/services/mssql/tests/mssql_server_data_source_test.go deleted file mode 100644 index 90a983ef4a23..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_server_data_source_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMsSqlServer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMsSqlServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMsSqlServer_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "version"), - resource.TestCheckResourceAttrSet(data.ResourceName, "administrator_login"), - resource.TestCheckResourceAttrSet(data.ResourceName, "fully_qualified_domain_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "tags.%"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMsSqlServer_basic(data acceptance.TestData) string { - template := testAccAzureRMMsSqlServer_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_mssql_server" "test" { - name = azurerm_mssql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} - -`, template) -} - -func testAccDataSourceAzureRMMsSqlServer_complete(data acceptance.TestData) string { - template := testAccAzureRMMsSqlServer_complete(data) - return fmt.Sprintf(` -%s - -data "azurerm_mssql_server" "test" { - name = azurerm_mssql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} - -`, template) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_server_extended_auditing_policy_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_server_extended_auditing_policy_resource_test.go deleted file mode 100644 index 117a60893e46..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_server_extended_auditing_policy_resource_test.go +++ /dev/null @@ -1,343 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/preview/sql/mgmt/v3.0/sql" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMsSqlServerExtendedAuditingPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlServerExtendedAuditingPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMsSqlServerExtendedAuditingPolicy_requiresImport), - }, - }) -} - -func TestAccAzureRMMsSqlServerExtendedAuditingPolicy_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlServerExtendedAuditingPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlServerExtendedAuditingPolicy_storageAccBehindFireWall(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_extended_auditing_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerExtendedAuditingPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServerExtendedAuditingPolicy_storageAccountBehindFireWall(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func testCheckAzureRMMsSqlServerExtendedAuditingPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.MssqlServerExtendedAuditingPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("msSql Server ExtendedAuditingPolicy %q (resource group: %q) does not exist", id.MsSqlServer, id.ResourceGroup) - } - - return fmt.Errorf("get on MsSql Server ExtendedAuditingPolicy Client: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMsSqlServerExtendedAuditingPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServerExtendedBlobAuditingPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_server_extended_auditing_policy" { - continue - } - - id, err := parse.MssqlServerExtendedAuditingPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.MsSqlServer); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("get on MsSql Server ExtendedAuditingPolicy Client: %+v", err) - } - - if resp.ExtendedServerBlobAuditingPolicyProperties != nil && resp.ExtendedServerBlobAuditingPolicyProperties.State == sql.BlobAuditingPolicyStateEnabled { - return fmt.Errorf("`azurerm_mssql_server_extended_auditing_policy` is still enabled") - } - } - return nil - } - - return nil -} - -func testAccAzureRMMsSqlServerExtendedAuditingPolicy_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "AdminPassword123!" -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMMsSqlServerExtendedAuditingPolicy_basic(data acceptance.TestData) string { - template := testAccAzureRMMsSqlServerExtendedAuditingPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_extended_auditing_policy" "test" { - server_id = azurerm_mssql_server.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key -} -`, template) -} - -func testAccAzureRMMsSqlServerExtendedAuditingPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMsSqlServerExtendedAuditingPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_extended_auditing_policy" "import" { - server_id = azurerm_mssql_server.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key -} -`, template) -} - -func testAccAzureRMMsSqlServerExtendedAuditingPolicy_complete(data acceptance.TestData) string { - template := testAccAzureRMMsSqlServerExtendedAuditingPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_extended_auditing_policy" "test" { - server_id = azurerm_mssql_server.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - storage_account_access_key_is_secondary = false - retention_in_days = 6 -} -`, template) -} - -func testAccAzureRMMsSqlServerExtendedAuditingPolicy_update(data acceptance.TestData) string { - template := testAccAzureRMMsSqlServerExtendedAuditingPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_storage_account" "test2" { - name = "unlikely23exst2acc2%[2]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_server_extended_auditing_policy" "test" { - server_id = azurerm_mssql_server.test.id - storage_endpoint = azurerm_storage_account.test2.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test2.primary_access_key - storage_account_access_key_is_secondary = true - retention_in_days = 3 -} -`, template, data.RandomString) -} - -func testAccAzureRMMsSqlServerExtendedAuditingPolicy_storageAccountBehindFireWall(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctest-sqlserver-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "AdminPassword123!" - identity { - type = "SystemAssigned" - } -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" - service_endpoints = ["Microsoft.Storage"] -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" - - network_rules { - default_action = "Deny" - ip_rules = ["127.0.0.1"] - virtual_network_subnet_ids = [azurerm_subnet.test.id] - } -} - -resource "azurerm_role_assignment" "test" { - scope = azurerm_storage_account.test.id - role_definition_name = "Storage Blob Data Contributor" - principal_id = azurerm_mssql_server.test.identity.0.principal_id -} - -resource "azurerm_mssql_server_extended_auditing_policy" "test" { - server_id = azurerm_mssql_server.test.id - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - - depends_on = [ - azurerm_role_assignment.test, - ] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_server_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_server_resource_test.go deleted file mode 100644 index 615c7ea3f893..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_server_resource_test.go +++ /dev/null @@ -1,722 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMsSqlServer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMMsSqlServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlServer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMsSqlServer_requiresImport), - }, - }) -} - -func TestAccAzureRMMsSqlServer_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - testCheckAzureRMMsSqlServerDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMMsSqlServer_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMMsSqlServer_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlServer_complete2(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMMsSqlServer_identity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_identity(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlServer_azureadAdmin(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMMsSqlServer_azureadAdmin(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMMsSqlServer_azureadAdminUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMMsSqlServer_blobAuditingPolicies_withFirewall(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlServer_blobAuditingPolicies_withFirewall(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "extended_auditing_policy.0.storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMsSqlServer_customDiff(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlServer_basicWithMinimumTLSVersion(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMMsSqlServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlServerExists(data.ResourceName), - ), - ExpectError: regexp.MustCompile("`minimum_tls_version` cannot be removed once set, please set a valid value for this property"), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func testCheckAzureRMMsSqlServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Sql.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - sqlServerName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for SQL Server: %s", sqlServerName) - } - - resp, err := conn.Get(ctx, resourceGroup, sqlServerName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: SQL Server %s (resource group: %s) does not exist", sqlServerName, resourceGroup) - } - return fmt.Errorf("Bad: Get SQL Server: %v", err) - } - - return nil - } -} - -func testCheckAzureRMMsSqlServerDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_server" { - continue - } - - sqlServerName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, sqlServerName) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("Bad: Get Server: %+v", err) - } - - return fmt.Errorf("SQL Server %s still exists", sqlServerName) - } - - return nil -} - -func testCheckAzureRMMsSqlServerDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["name"] - - future, err := client.Delete(ctx, resourceGroup, serverName) - if err != nil { - return err - } - - return future.WaitForCompletionRef(ctx, client.Client) - } -} - -func testAccAzureRMMsSqlServer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%d" - location = "%s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMMsSqlServer_basicWithMinimumTLSVersion(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%d" - location = "%s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - minimum_tls_version = "1.2" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMMsSqlServer_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server" "import" { - name = azurerm_mssql_server.test.name - resource_group_name = azurerm_mssql_server.test.resource_group_name - location = azurerm_mssql_server.test.location - version = azurerm_mssql_server.test.version - administrator_login = azurerm_mssql_server.test.administrator_login - administrator_login_password = azurerm_mssql_server.test.administrator_login_password -} -`, testAccAzureRMMsSqlServer_basic(data)) -} - -func testAccAzureRMMsSqlServer_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - address_space = ["10.5.0.0/16"] -} - -resource "azurerm_subnet" "service" { - name = "acctestsnetservice-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.5.1.0/24"] - - enforce_private_link_service_network_policies = true -} - -resource "azurerm_subnet" "endpoint" { - name = "acctestsnetendpoint-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.5.2.0/24"] - - enforce_private_link_endpoint_network_policies = true -} - -resource "azurerm_storage_account" "test" { - name = "acctesta%[3]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - minimum_tls_version = "1.2" - - public_network_access_enabled = true - - extended_auditing_policy { - storage_account_access_key = azurerm_storage_account.test.primary_access_key - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key_is_secondary = true - retention_in_days = 6 - } - - tags = { - ENV = "Staging" - database = "NotProd" - } -} - -resource "azurerm_private_dns_zone" "finance" { - name = "privatelink.sql.database.azure.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_endpoint" "test" { - name = "acctest-privatelink-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - subnet_id = azurerm_subnet.endpoint.id - - private_service_connection { - name = "acctest-privatelink-mssc-%[1]d" - private_connection_resource_id = azurerm_mssql_server.test.id - subresource_names = ["sqlServer"] - is_manual_connection = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(15)) -} - -func testAccAzureRMMsSqlServer_complete2(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet-%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - address_space = ["10.5.0.0/16"] -} - -resource "azurerm_subnet" "service" { - name = "acctestsnetservice-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.5.1.0/24"] - - enforce_private_link_service_network_policies = true -} - -resource "azurerm_subnet" "endpoint" { - name = "acctestsnetendpoint-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.5.2.0/24"] - - enforce_private_link_endpoint_network_policies = true -} - -resource "azurerm_storage_account" "testb" { - name = "acctestb%[3]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - minimum_tls_version = "1.0" - - public_network_access_enabled = false - - extended_auditing_policy { - storage_account_access_key = azurerm_storage_account.testb.primary_access_key - storage_endpoint = azurerm_storage_account.testb.primary_blob_endpoint - storage_account_access_key_is_secondary = false - retention_in_days = 11 - } - - tags = { - DB = "NotProd" - } -} - -resource "azurerm_private_dns_zone" "finance" { - name = "privatelink.sql.database.azure.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_endpoint" "test" { - name = "acctest-privatelink-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - subnet_id = azurerm_subnet.endpoint.id - - private_service_connection { - name = "acctest-privatelink-mssc-%[1]d" - private_connection_resource_id = azurerm_mssql_server.test.id - subresource_names = ["sqlServer"] - is_manual_connection = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomIntOfLength(15)) -} - -func testAccAzureRMMsSqlServer_identity(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%d" - location = "%s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMMsSqlServer_azureadAdmin(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -data "azuread_service_principal" "test" { - application_id = "%[3]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - - azuread_administrator { - login_username = "AzureAD Admin" - object_id = data.azuread_service_principal.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, os.Getenv("ARM_CLIENT_ID")) -} - -func testAccAzureRMMsSqlServer_azureadAdminUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -data "azuread_service_principal" "test" { - application_id = "%[3]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - - azuread_administrator { - login_username = "AzureAD Admin2" - object_id = data.azuread_service_principal.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, os.Getenv("ARM_CLIENT_ID")) -} - -func testAccAzureRMMsSqlServer_blobAuditingPolicies_withFirewall(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" - service_endpoints = ["Microsoft.Storage"] -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%[3]s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" - - network_rules { - default_action = "Allow" - ip_rules = ["127.0.0.1"] - virtual_network_subnet_ids = [azurerm_subnet.test.id] - } -} - -data "azuread_service_principal" "test" { - application_id = "%[4]s" -} - -resource "azurerm_mssql_server" "test" { - name = "acctestsqlserver%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "missadministrator" - administrator_login_password = "thisIsKat11" - - azuread_administrator { - login_username = "AzureAD Admin2" - object_id = data.azuread_service_principal.test.id - } - - extended_auditing_policy { - storage_account_access_key = azurerm_storage_account.test.primary_access_key - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key_is_secondary = true - retention_in_days = 6 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, os.Getenv("ARM_CLIENT_ID")) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_server_security_alert_policy_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_server_security_alert_policy_resource_test.go deleted file mode 100644 index a2c063ec9e3d..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_server_security_alert_policy_resource_test.go +++ /dev/null @@ -1,192 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMssqlServerSecurityAlertPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_security_alert_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMssqlServerSecurityAlertPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMssqlServerSecurityAlertPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlServerSecurityAlertPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "disabled_alerts.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "email_account_admins", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_days", "20"), - resource.TestCheckResourceAttr(data.ResourceName, "email_addresses.#", "0"), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMssqlServerSecurityAlertPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_security_alert_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMssqlServerSecurityAlertPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMssqlServerSecurityAlertPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlServerSecurityAlertPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "disabled_alerts.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "email_account_admins", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_days", "20"), - resource.TestCheckResourceAttr(data.ResourceName, "email_addresses.#", "0"), - ), - }, - data.ImportStep("storage_account_access_key"), - { - Config: testAccAzureRMMssqlServerSecurityAlertPolicy_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlServerSecurityAlertPolicyExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "state", "Enabled"), - resource.TestCheckResourceAttr(data.ResourceName, "disabled_alerts.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "email_account_admins", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_days", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "email_addresses.#", "0"), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func testCheckAzureRMMssqlServerSecurityAlertPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("resource not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("security alert policy was not found for resource group %q, sql server %q", - resourceGroup, serverName) - } - - return err - } - - return nil - } -} - -func testCheckAzureRMMssqlServerSecurityAlertPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServerSecurityAlertPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_server_security_alert_policy" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Security Alert Policy still exists:\n%#v", resp.SecurityAlertPolicyProperties) - } - } - - return nil -} - -func testAccAzureRMMssqlServerSecurityAlertPolicy_basic(data acceptance.TestData) string { - server := testAccAzureRMMssqlServerSecurityAlertPolicy_server(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_security_alert_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - state = "Enabled" - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - disabled_alerts = [ - "Sql_Injection", - "Data_Exfiltration" - ] - retention_days = 20 -} -`, server) -} - -func testAccAzureRMMssqlServerSecurityAlertPolicy_update(data acceptance.TestData) string { - server := testAccAzureRMMssqlServerSecurityAlertPolicy_server(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_security_alert_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - state = "Enabled" - email_account_admins = true - retention_days = 30 -} -`, server) -} - -func testAccAzureRMMssqlServerSecurityAlertPolicy_server(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctestsqlserver%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - -resource "azurerm_storage_account" "test" { - name = "accsa%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_server_vulnerability_assessment_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_server_vulnerability_assessment_resource_test.go deleted file mode 100644 index 61c0f702403d..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_server_vulnerability_assessment_resource_test.go +++ /dev/null @@ -1,203 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMssqlServerVulnerabilityAssessment_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_vulnerability_assessment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMssqlServerVulnerabilityAssessmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMssqlServerVulnerabilityAssessment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlServerVulnerabilityAssessmentExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func TestAccAzureRMMssqlServerVulnerabilityAssessment_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_server_vulnerability_assessment", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMssqlServerVulnerabilityAssessmentDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMssqlServerVulnerabilityAssessment_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlServerVulnerabilityAssessmentExists(data.ResourceName), - ), - }, - data.ImportStep("storage_account_access_key"), - { - Config: testAccAzureRMMssqlServerVulnerabilityAssessment_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMssqlServerVulnerabilityAssessmentExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "recurring_scans.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "recurring_scans.0.emails.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "recurring_scans.0.email_subscription_admins", "true"), - ), - }, - data.ImportStep("storage_account_access_key"), - }, - }) -} - -func testCheckAzureRMMssqlServerVulnerabilityAssessmentExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("resource not found: %s", resourceName) - } - - id, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - - result, err := client.Get(ctx, resourceGroupName, serverName) - if err != nil { - if utils.ResponseWasNotFound(result.Response) { - return fmt.Errorf("vulnerability assessment was not found for resource group %q, sql server %q", - resourceGroupName, serverName) - } - - return err - } - - return nil - } -} - -func testCheckAzureRMMssqlServerVulnerabilityAssessmentDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.ServerVulnerabilityAssessmentsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_server_vulnerability_assessment" { - continue - } - - id, err := azure.ParseAzureResourceID(rs.Primary.ID) - if err != nil { - return err - } - - resourceGroupName := id.ResourceGroup - serverName := id.Path["servers"] - - result, err := client.Get(ctx, resourceGroupName, serverName) - if err != nil { - return nil - } - - if result.StatusCode != http.StatusNotFound { - return fmt.Errorf("Server Vulnerability Assessment still exists:\n%#v", result.ServerVulnerabilityAssessmentProperties) - } - } - - return nil -} - -func testAccAzureRMMssqlServerVulnerabilityAssessment_basic(data acceptance.TestData) string { - server := testAccAzureRMMssqlServerVulnerabilityAssessment_server(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_vulnerability_assessment" "test" { - server_security_alert_policy_id = azurerm_mssql_server_security_alert_policy.test.id - storage_container_path = "${azurerm_storage_account.test.primary_blob_endpoint}${azurerm_storage_container.test.name}/" - storage_account_access_key = azurerm_storage_account.test.primary_access_key -} -`, server) -} - -func testAccAzureRMMssqlServerVulnerabilityAssessment_update(data acceptance.TestData) string { - server := testAccAzureRMMssqlServerVulnerabilityAssessment_server(data) - - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_server_vulnerability_assessment" "test" { - server_security_alert_policy_id = azurerm_mssql_server_security_alert_policy.test.id - storage_container_path = "${azurerm_storage_account.test.primary_blob_endpoint}${azurerm_storage_container.test.name}/" - storage_account_access_key = azurerm_storage_account.test.primary_access_key - - recurring_scans { - enabled = true - email_subscription_admins = true - emails = [ - "email@example1.com", - "email@example2.com" - ] - } -} -`, server) -} - -func testAccAzureRMMssqlServerVulnerabilityAssessment_server(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%d" - location = "%s" -} - -resource "azurerm_sql_server" "test" { - name = "acctestsqlserver%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - version = "12.0" - administrator_login = "mradministrator" - administrator_login_password = "thisIsDog11" -} - -resource "azurerm_storage_account" "test" { - name = "accsa%d" - resource_group_name = azurerm_resource_group.test.name - location = "%s" - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_storage_container" "test" { - name = "acctestsc%d" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" -} - -resource "azurerm_mssql_server_security_alert_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_sql_server.test.name - state = "Enabled" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/mssql/tests/mssql_virtual_machine_resource_test.go b/azurerm/internal/services/mssql/tests/mssql_virtual_machine_resource_test.go deleted file mode 100644 index 7d9b9f0721d6..000000000000 --- a/azurerm/internal/services/mssql/tests/mssql_virtual_machine_resource_test.go +++ /dev/null @@ -1,651 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/go-uuid" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMsSqlVirtualMachine_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlVirtualMachine_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMsSqlVirtualMachine_requiresImport), - }, - }) -} - -func TestAccAzureRMMsSqlVirtualMachine_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlVirtualMachine_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "r_services_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "sql_connectivity_type", "PRIVATE"), - resource.TestCheckResourceAttr(data.ResourceName, "sql_connectivity_port", "1433"), - ), - }, - data.ImportStep("sql_connectivity_update_password", "sql_connectivity_update_username"), - { - Config: testAccAzureRMMsSqlVirtualMachine_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "r_services_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "sql_connectivity_type", "PUBLIC"), - resource.TestCheckResourceAttr(data.ResourceName, "sql_connectivity_port", "1533"), - ), - }, - data.ImportStep("sql_connectivity_update_password", "sql_connectivity_update_username"), - }, - }) -} - -func TestAccAzureRMMsSqlVirtualMachine_updateAutoPatching(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlVirtualMachine_withAutoPatching(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_patching.0.day_of_week", "Sunday"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_patching.0.maintenance_window_duration_in_minutes", "60"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_patching.0.maintenance_window_starting_hour", "2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlVirtualMachine_withAutoPatchingUpdated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_patching.0.day_of_week", "Monday"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_patching.0.maintenance_window_duration_in_minutes", "90"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_patching.0.maintenance_window_starting_hour", "4"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMsSqlVirtualMachine_updateKeyVault(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") - value, err := uuid.GenerateUUID() - if err != nil { - t.Fatal(err) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlVirtualMachine_withKeyVault(data, value), - Check: resource.ComposeTestCheckFunc( - resource.TestMatchResourceAttr(data.ResourceName, "key_vault_credential.0.name", regexp.MustCompile("/*:acctestkv")), - ), - }, - data.ImportStep("key_vault_credential.0.key_vault_url", "key_vault_credential.0.service_principal_name", "key_vault_credential.0.service_principal_secret"), - - { - Config: testAccAzureRMMsSqlVirtualMachine_withKeyVaultUpdated(data, value), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "key_vault_credential.0.name", regexp.MustCompile("/*:acctestkv2")), - ), - }, - data.ImportStep("key_vault_credential.0.key_vault_url", "key_vault_credential.0.service_principal_name", "key_vault_credential.0.service_principal_secret"), - }, - }) -} - -func TestAccAzureRMMsSqlVirtualMachine_storageConfigurationSettings(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mssql_virtual_machine", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMsSqlVirtualMachineDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMsSqlVirtualMachine_storageConfigurationSettings(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMsSqlVirtualMachine_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMsSqlVirtualMachineExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMMsSqlVirtualMachineExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Sql Virtual Machine not found: %s", resourceName) - } - - id, err := parse.MssqlVmID(rs.Primary.ID) - if err != nil { - return err - } - - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.VirtualMachinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name, ""); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Sql Virtual Machine (Sql Virtual Machine Name %q / Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("Bad: Get on VirtualMachinesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMsSqlVirtualMachineDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MSSQL.VirtualMachinesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mssql_virtual_machine" { - continue - } - - id, err := parse.MssqlVmID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name, ""); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on VirtualMachinesClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMVirtualMachine_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mssql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VN-%[1]d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctest-SN-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.0.0/24" -} - -resource "azurerm_subnet_network_security_group_association" "test" { - subnet_id = azurerm_subnet.test.id - network_security_group_id = azurerm_network_security_group.test.id -} - -resource "azurerm_public_ip" "vm" { - name = "acctest-PIP-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" -} - -resource "azurerm_network_security_group" "test" { - name = "acctest-NSG-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_network_security_rule" "RDPRule" { - name = "RDPRule" - resource_group_name = azurerm_resource_group.test.name - priority = 1000 - direction = "Inbound" - access = "Allow" - protocol = "Tcp" - source_port_range = "*" - destination_port_range = 3389 - source_address_prefix = "167.220.255.0/25" - destination_address_prefix = "*" - network_security_group_name = azurerm_network_security_group.test.name -} - -resource "azurerm_network_security_rule" "MSSQLRule" { - name = "MSSQLRule" - resource_group_name = azurerm_resource_group.test.name - priority = 1001 - direction = "Inbound" - access = "Allow" - protocol = "Tcp" - source_port_range = "*" - destination_port_range = 1433 - source_address_prefix = "167.220.255.0/25" - destination_address_prefix = "*" - network_security_group_name = azurerm_network_security_group.test.name -} - -resource "azurerm_network_interface" "test" { - name = "acctest-NIC-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "testconfiguration1" - subnet_id = azurerm_subnet.test.id - private_ip_address_allocation = "Dynamic" - public_ip_address_id = azurerm_public_ip.vm.id - } -} - -resource "azurerm_virtual_machine" "test" { - name = "acctest-VM-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - network_interface_ids = [azurerm_network_interface.test.id] - vm_size = "Standard_F2s" - - storage_image_reference { - publisher = "MicrosoftSQLServer" - offer = "SQL2017-WS2016" - sku = "SQLDEV" - version = "latest" - } - - storage_os_disk { - name = "acctvm-%[1]dOSDisk" - caching = "ReadOnly" - create_option = "FromImage" - managed_disk_type = "Premium_LRS" - } - - os_profile { - computer_name = "winhost01" - admin_username = "testadmin" - admin_password = "Password1234!" - } - - os_profile_windows_config { - timezone = "Pacific Standard Time" - provision_vm_agent = true - enable_automatic_upgrades = true - } -} -`, data.RandomInteger, data.Locations.Primary) -} - -func testAccAzureRMMsSqlVirtualMachine_basic(data acceptance.TestData) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" -} -`, vmconfig) -} - -func testAccAzureRMMsSqlVirtualMachine_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMsSqlVirtualMachine_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_virtual_machine" "import" { - virtual_machine_id = azurerm_mssql_virtual_machine.test.virtual_machine_id - sql_license_type = azurerm_mssql_virtual_machine.test.sql_license_type -} -`, template) -} - -func testAccAzureRMMsSqlVirtualMachine_complete(data acceptance.TestData) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - r_services_enabled = true - sql_connectivity_port = 1433 - sql_connectivity_type = "PRIVATE" - sql_connectivity_update_password = "Password1234!" - sql_connectivity_update_username = "sqllogin" -} -`, vmconfig) -} - -func testAccAzureRMMsSqlVirtualMachine_update(data acceptance.TestData) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - r_services_enabled = false - sql_connectivity_port = 1533 - sql_connectivity_type = "PUBLIC" - sql_connectivity_update_password = "Password12344321!" - sql_connectivity_update_username = "sqlloginupdate" -} -`, vmconfig) -} - -func testAccAzureRMMsSqlVirtualMachine_withAutoPatching(data acceptance.TestData) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - - auto_patching { - day_of_week = "Sunday" - maintenance_window_duration_in_minutes = 60 - maintenance_window_starting_hour = 2 - } -} -`, vmconfig) -} - -func testAccAzureRMMsSqlVirtualMachine_withAutoPatchingUpdated(data acceptance.TestData) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - - auto_patching { - day_of_week = "Monday" - maintenance_window_duration_in_minutes = 90 - maintenance_window_starting_hour = 4 - } -} -`, vmconfig) -} - -func testAccAzureRMMsSqlVirtualMachine_withKeyVault(data acceptance.TestData, value string) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -data "azurerm_client_config" "current" {} - -resource "azurerm_key_vault" "test" { - name = "acckv-%[2]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "generated" { - name = "key-%[2]d" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - - key_opts = [ - "decrypt", - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] -} - -resource "azuread_application" "test" { - name = "acctestspa%[2]d" -} - -resource "azuread_service_principal" "test" { - application_id = azuread_application.test.application_id -} - -resource "azuread_service_principal_password" "test" { - service_principal_id = azuread_service_principal.test.id - value = "%s" - end_date = "2021-01-01T01:02:03Z" -} - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - key_vault_credential { - name = "acctestkv" - key_vault_url = azurerm_key_vault_key.generated.id - service_principal_name = azuread_service_principal.test.display_name - service_principal_secret = azuread_service_principal_password.test.value - } -} -`, vmconfig, data.RandomInteger, value) -} - -func testAccAzureRMMsSqlVirtualMachine_withKeyVaultUpdated(data acceptance.TestData, value string) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -data "azurerm_client_config" "current" {} - -resource "azurerm_key_vault" "test" { - name = "acckv-%[2]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - - sku_name = "premium" - - access_policy { - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = [ - "create", - "delete", - "get", - "update", - ] - - secret_permissions = [ - "get", - "delete", - "set", - ] - } - - tags = { - environment = "Production" - } -} - -resource "azurerm_key_vault_key" "generated" { - name = "key-%[2]d" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - - key_opts = [ - "decrypt", - "encrypt", - "sign", - "unwrapKey", - "verify", - "wrapKey", - ] -} - -resource "azuread_application" "test" { - name = "acctestspa%[2]d" -} - -resource "azuread_service_principal" "test" { - application_id = azuread_application.test.application_id -} - -resource "azuread_service_principal_password" "test" { - service_principal_id = azuread_service_principal.test.id - value = "%s" - end_date = "2021-01-01T01:02:03Z" -} - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - key_vault_credential { - name = "acctestkv2" - key_vault_url = azurerm_key_vault_key.generated.id - service_principal_name = azuread_service_principal.test.display_name - service_principal_secret = azuread_service_principal_password.test.value - } -} -`, vmconfig, data.RandomInteger, value) -} - -func testAccAzureRMMsSqlVirtualMachine_storageConfigurationSettings(data acceptance.TestData) string { - vmconfig := testAccAzureRMVirtualMachine_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_managed_disk" "test" { - name = "accmd-sqlvm-%[2]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - storage_account_type = "Standard_LRS" - create_option = "Empty" - disk_size_gb = 10 -} - -resource "azurerm_virtual_machine_data_disk_attachment" "test" { - managed_disk_id = azurerm_managed_disk.test.id - virtual_machine_id = azurerm_virtual_machine.test.id - lun = "0" - caching = "None" -} - -resource "azurerm_mssql_virtual_machine" "test" { - virtual_machine_id = azurerm_virtual_machine.test.id - sql_license_type = "PAYG" - - storage_configuration { - disk_type = "NEW" - storage_workload_type = "OLTP" - - data_settings { - luns = [0] - default_file_path = "F:\\SQLData" - } - - log_settings { - luns = [0] - default_file_path = "F:\\SQLLog" - } - - temp_db_settings { - luns = [0] - default_file_path = "F:\\SQLTemp" - } - } -} -`, vmconfig, data.RandomInteger) -} diff --git a/azurerm/internal/services/mssql/validate/database_auto_pause_delay.go b/azurerm/internal/services/mssql/validate/database_auto_pause_delay.go new file mode 100644 index 000000000000..8caf20e78b60 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_auto_pause_delay.go @@ -0,0 +1,19 @@ +package validate + +import "fmt" + +func DatabaseAutoPauseDelay(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(int) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %s to be integer", k)) + return warnings, errors + } + min := 60 + max := 10080 + if (v < min || v > max) && v%10 != 0 && v != -1 { + errors = append(errors, fmt.Errorf("expected %s to be in the range (%d - %d) and divisible by 10 or -1, got %d", k, min, max, v)) + return warnings, errors + } + + return warnings, errors +} diff --git a/azurerm/internal/services/mssql/validate/database_auto_pause_delay_test.go b/azurerm/internal/services/mssql/validate/database_auto_pause_delay_test.go new file mode 100644 index 000000000000..65385a922e73 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_auto_pause_delay_test.go @@ -0,0 +1,26 @@ +package validate + +import "testing" + +func TestDatabaseAutoPauseDelay(t *testing.T) { + testCases := []struct { + input string + shouldError bool + }{ + {"-1", false}, + {"-2", true}, + {"30", true}, + {"60", false}, + {"65", true}, + {"360", false}, + {"19900", true}, + } + + for _, test := range testCases { + _, es := DatabaseAutoPauseDelay(test.input, "name") + + if test.shouldError && len(es) == 0 { + t.Fatalf("Expected validating name %q to fail", test.input) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/database_collation.go b/azurerm/internal/services/mssql/validate/database_collation.go new file mode 100644 index 000000000000..a7037ee78253 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_collation.go @@ -0,0 +1,16 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func DatabaseCollation() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`(^[A-Z]+)([A-Za-z0-9]+_)+((BIN|BIN2|CI_AI|CI_AI_KS|CI_AI_KS_WS|CI_AI_WS|CI_AS|CI_AS_KS|CI_AS_KS_WS|CS_AI|CS_AI_KS|CS_AI_KS_WS|CS_AI_WS|CS_AS|CS_AS_KS|CS_AS_KS_WS|CS_AS_WS)+)((_[A-Za-z0-9]+)+$)*`), + + `This is not a valid collation.`, + ) +} diff --git a/azurerm/internal/services/mssql/validate/database_collation_test.go b/azurerm/internal/services/mssql/validate/database_collation_test.go new file mode 100644 index 000000000000..a139c195e350 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_collation_test.go @@ -0,0 +1,74 @@ +package validate + +import ( + "testing" +) + +func TestDatabaseCollation(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "SQL Collation", + input: "SQL_Latin1_General_CP1_CI_AS", + valid: true, + }, + { + name: "Windows Collation", + input: "Latin1_General_100_CI_AS_SC", + valid: true, + }, + { + name: "SQL Collation", + input: "SQL_AltDiction_CP850_CI_AI", + valid: true, + }, + { + name: "SQL Collation", + input: "SQL_Croatian_CP1250_CI_AS", + valid: true, + }, + { + name: "Windows Collation", + input: "Chinese_Hong_Kong_Stroke_90_CI_AI", + valid: true, + }, + { + name: "Windows Collation", + input: "Japanese_BIN", + valid: true, + }, + { + name: "lowercase", + input: "sql_croatian_cp1250_ci_as", + valid: false, + }, + { + name: "extra dot", + input: "SQL_Croatian_CP1250.", + valid: false, + }, + { + name: "Invalid collation", + input: "CDD", + valid: false, + }, + { + name: "Double definition", + input: "Latin1_General_100_CI_CS", + valid: false, + }, + } + validationFunction := DatabaseCollation() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/services/mssql/validate/database_extended_auditing_policy_id.go b/azurerm/internal/services/mssql/validate/database_extended_auditing_policy_id.go new file mode 100644 index 000000000000..96ed77e9b7f4 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_extended_auditing_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func DatabaseExtendedAuditingPolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabaseExtendedAuditingPolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/database_extended_auditing_policy_id_test.go b/azurerm/internal/services/mssql/validate/database_extended_auditing_policy_id_test.go new file mode 100644 index 000000000000..141b869cdee5 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_extended_auditing_policy_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabaseExtendedAuditingPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/", + Valid: false, + }, + + { + // missing ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/", + Valid: false, + }, + + { + // missing value for ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/extendedAuditingSettings/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/extendedAuditingSettings/default", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/DATABASES/DATABASE1/EXTENDEDAUDITINGSETTINGS/DEFAULT", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabaseExtendedAuditingPolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/database_id.go b/azurerm/internal/services/mssql/validate/database_id.go new file mode 100644 index 000000000000..64a842544976 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func DatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/database_id_test.go b/azurerm/internal/services/mssql/validate/database_id_test.go new file mode 100644 index 000000000000..562d6d4cc840 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/DATABASES/DATABASE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/database_sku_name.go b/azurerm/internal/services/mssql/validate/database_sku_name.go new file mode 100644 index 000000000000..ea4d673859ed --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_sku_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func DatabaseSkuName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`(?i)(^(GP_S_Gen5_(1|2|4|6|8|10|12|14|16|18|20|24|32|40))$|^((GP|HS|BC)_Gen4_(1|2|3|4|5|6|7|8|9|10|16|24))$|^((GP|HS|BC)_Gen5_(2|4|6|8|10|12|14|16|18|20|24|32|40|80))$|^(BC_M_(8|10|12|14|16|18|20|24|32|64|128))$|^(Basic)$|^(ElasticPool)$|^(S(0|1|2|3|4|6|7|9|12))$|^(P(1|2|4|6|11|15))$|^(DW(1|2|3|4|5|6|7|8|9)000*c)$|^(DS(1|2|3|4|5|6|10|12|15|20)00)$)`), + + `This is not a valid sku name. For example, a valid sku name is 'GP_S_Gen5_1','HS_Gen4_1','BC_Gen5_2', 'ElasticPool', 'Basic', 'S0', 'P1'.`, + ) +} diff --git a/azurerm/internal/services/mssql/validate/database_sku_name_test.go b/azurerm/internal/services/mssql/validate/database_sku_name_test.go new file mode 100644 index 000000000000..e7f1a3369963 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_sku_name_test.go @@ -0,0 +1,122 @@ +package validate + +import "testing" + +func TestDatabaseSkuName(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "DataWarehouse", + input: "DW100c", + valid: true, + }, + { + name: "DataWarehouse", + input: "DW102c", + valid: false, + }, + { + name: "Stretch", + input: "DS100", + valid: true, + }, + { + name: "Stretch", + input: "DS1001", + valid: false, + }, + { + name: "Valid GP", + input: "GP_Gen4_3", + valid: true, + }, + { + name: "Valid Serverless GP", + input: "GP_S_Gen5_2", + valid: true, + }, + { + name: "Valid HS", + input: "HS_Gen5_2", + valid: true, + }, + { + name: "Valid BC", + input: "BC_Gen4_5", + valid: true, + }, + { + name: "Valid BC", + input: "BC_M_12", + valid: true, + }, + { + name: "Valid BC", + input: "BC_Gen5_14", + valid: true, + }, + { + name: "Valid Standard", + input: "S3", + valid: true, + }, + { + name: "Valid Basic", + input: "Basic", + valid: true, + }, + { + name: "Valid Premium", + input: "P15", + valid: true, + }, + { + name: "empty", + input: "", + valid: false, + }, + { + name: "Extra dot", + input: "BC_Gen5_3.", + valid: false, + }, + { + name: "Wrong capacity", + input: "BC_Gen5_3", + valid: false, + }, + { + name: "Wrong Family", + input: "BC_Inv_2", + valid: false, + }, + { + name: "Wrong Serverless", + input: "GP_S_Gen4_2", + valid: false, + }, + { + name: "Wrong Serverless", + input: "BC_S_Gen5_2", + valid: false, + }, + { + name: "Lower case", + input: "bc_gen5_2", + valid: true, + }, + } + validationFunction := DatabaseSkuName() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/services/mssql/validate/database_vulnerability_assessment_rule_baseline_id.go b/azurerm/internal/services/mssql/validate/database_vulnerability_assessment_rule_baseline_id.go new file mode 100644 index 000000000000..5250417542a2 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_vulnerability_assessment_rule_baseline_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func DatabaseVulnerabilityAssessmentRuleBaselineID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabaseVulnerabilityAssessmentRuleBaselineID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/database_vulnerability_assessment_rule_baseline_id_test.go b/azurerm/internal/services/mssql/validate/database_vulnerability_assessment_rule_baseline_id_test.go new file mode 100644 index 000000000000..a39d7b47e109 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/database_vulnerability_assessment_rule_baseline_id_test.go @@ -0,0 +1,124 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabaseVulnerabilityAssessmentRuleBaselineID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for DatabaseName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/", + Valid: false, + }, + + { + // missing VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/", + Valid: false, + }, + + { + // missing value for VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/", + Valid: false, + }, + + { + // missing RuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/", + Valid: false, + }, + + { + // missing value for RuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/", + Valid: false, + }, + + { + // missing BaselineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/", + Valid: false, + }, + + { + // missing value for BaselineName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/baselines/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/databases/database1/vulnerabilityAssessments/default/rules/rule1/baselines/baseline1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/DATABASES/DATABASE1/VULNERABILITYASSESSMENTS/DEFAULT/RULES/RULE1/BASELINES/BASELINE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabaseVulnerabilityAssessmentRuleBaselineID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/elastic_pool_id.go b/azurerm/internal/services/mssql/validate/elastic_pool_id.go new file mode 100644 index 000000000000..0333e00f7b1f --- /dev/null +++ b/azurerm/internal/services/mssql/validate/elastic_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func ElasticPoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ElasticPoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/elastic_pool_id_test.go b/azurerm/internal/services/mssql/validate/elastic_pool_id_test.go new file mode 100644 index 000000000000..e6985eac7858 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/elastic_pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestElasticPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/elasticPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/elasticPools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/ELASTICPOOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ElasticPoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/mssql_database.go b/azurerm/internal/services/mssql/validate/mssql_database.go deleted file mode 100644 index cd23a035a9c2..000000000000 --- a/azurerm/internal/services/mssql/validate/mssql_database.go +++ /dev/null @@ -1,84 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" -) - -func MsSqlDatabaseID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.MsSqlDatabaseID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a MsSql Database resource id: %v", k, err)) - } - - return warnings, errors -} - -func MsSqlDatabaseAutoPauseDelay(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(int) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %s to be integer", k)) - return warnings, errors - } - min := 60 - max := 10080 - if (v < min || v > max) && v%10 != 0 && v != -1 { - errors = append(errors, fmt.Errorf("expected %s to be in the range (%d - %d) and divisible by 10 or -1, got %d", k, min, max, v)) - return warnings, errors - } - - return warnings, errors -} - -func MsSqlDBSkuName() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`(?i)(^(GP_S_Gen5_(1|2|4|6|8|10|12|14|16|18|20|24|32|40))$|^((GP|HS|BC)_Gen4_(1|2|3|4|5|6|7|8|9|10|16|24))$|^((GP|HS|BC)_Gen5_(2|4|6|8|10|12|14|16|18|20|24|32|40|80))$|^(BC_M_(8|10|12|14|16|18|20|24|32|64|128))$|^(Basic)$|^(ElasticPool)$|^(S(0|1|2|3|4|6|7|9|12))$|^(P(1|2|4|6|11|15))$|^(DW(1|2|3|4|5|10|15|20)00c)$|^(DS(1|2|3|4|5|6|10|12|15|20)00)$)`), - - `This is not a valid sku name. For example, a valid sku name is 'GP_S_Gen5_1','HS_Gen4_1','BC_Gen5_2', 'ElasticPool', 'Basic', 'S0', 'P1'.`, - ) -} - -func MsSqlDBCollation() schema.SchemaValidateFunc { - return validation.StringMatch( - regexp.MustCompile(`(^[A-Z]+)([A-Za-z0-9]+_)+((BIN|BIN2|CI_AI|CI_AI_KS|CI_AI_KS_WS|CI_AI_WS|CI_AS|CI_AS_KS|CI_AS_KS_WS|CS_AI|CS_AI_KS|CS_AI_KS_WS|CS_AI_WS|CS_AS|CS_AS_KS|CS_AS_KS_WS|CS_AS_WS)+)((_[A-Za-z0-9]+)+$)*`), - - `This is not a valid collation.`, - ) -} - -func MsSqlRestorableDatabaseID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.MssqlRestorableDBID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a MsSql Restorable Database resource id: %v", k, err)) - } - - return warnings, errors -} - -func MsSqlRecoverableDatabaseID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.MssqlRecoverableDBID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a MsSql Recoverable Database resource id: %v", k, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/mssql/validate/mssql_database_test.go b/azurerm/internal/services/mssql/validate/mssql_database_test.go deleted file mode 100644 index 96d71079e235..000000000000 --- a/azurerm/internal/services/mssql/validate/mssql_database_test.go +++ /dev/null @@ -1,216 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestMsSqlDatabaseAutoPauseDelay(t *testing.T) { - testCases := []struct { - input string - shouldError bool - }{ - {"-1", false}, - {"-2", true}, - {"30", true}, - {"60", false}, - {"65", true}, - {"360", false}, - {"19900", true}, - } - - for _, test := range testCases { - _, es := MsSqlDatabaseAutoPauseDelay(test.input, "name") - - if test.shouldError && len(es) == 0 { - t.Fatalf("Expected validating name %q to fail", test.input) - } - } -} - -func TestMsSqlDBSkuName(t *testing.T) { - tests := []struct { - name string - input string - valid bool - }{ - { - name: "DataWarehouse", - input: "DW100c", - valid: true, - }, - { - name: "DataWarehouse", - input: "DW102c", - valid: false, - }, - { - name: "Stretch", - input: "DS100", - valid: true, - }, - { - name: "Stretch", - input: "DS1001", - valid: false, - }, - { - name: "Valid GP", - input: "GP_Gen4_3", - valid: true, - }, - { - name: "Valid Serverless GP", - input: "GP_S_Gen5_2", - valid: true, - }, - { - name: "Valid HS", - input: "HS_Gen5_2", - valid: true, - }, - { - name: "Valid BC", - input: "BC_Gen4_5", - valid: true, - }, - { - name: "Valid BC", - input: "BC_M_12", - valid: true, - }, - { - name: "Valid BC", - input: "BC_Gen5_14", - valid: true, - }, - { - name: "Valid Standard", - input: "S3", - valid: true, - }, - { - name: "Valid Basic", - input: "Basic", - valid: true, - }, - { - name: "Valid Premium", - input: "P15", - valid: true, - }, - { - name: "empty", - input: "", - valid: false, - }, - { - name: "Extra dot", - input: "BC_Gen5_3.", - valid: false, - }, - { - name: "Wrong capacity", - input: "BC_Gen5_3", - valid: false, - }, - { - name: "Wrong Family", - input: "BC_Inv_2", - valid: false, - }, - { - name: "Wrong Serverless", - input: "GP_S_Gen4_2", - valid: false, - }, - { - name: "Wrong Serverless", - input: "BC_S_Gen5_2", - valid: false, - }, - { - name: "Lower case", - input: "bc_gen5_2", - valid: true, - }, - } - var validationFunction = MsSqlDBSkuName() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := validationFunction(tt.input, "") - valid := err == nil - if valid != tt.valid { - t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) - } - }) - } -} - -func TestMsSqlDBCollation(t *testing.T) { - tests := []struct { - name string - input string - valid bool - }{ - { - name: "SQL Collation", - input: "SQL_Latin1_General_CP1_CI_AS", - valid: true, - }, - { - name: "Windows Collation", - input: "Latin1_General_100_CI_AS_SC", - valid: true, - }, - { - name: "SQL Collation", - input: "SQL_AltDiction_CP850_CI_AI", - valid: true, - }, - { - name: "SQL Collation", - input: "SQL_Croatian_CP1250_CI_AS", - valid: true, - }, - { - name: "Windows Collation", - input: "Chinese_Hong_Kong_Stroke_90_CI_AI", - valid: true, - }, - { - name: "Windows Collation", - input: "Japanese_BIN", - valid: true, - }, - { - name: "lowercase", - input: "sql_croatian_cp1250_ci_as", - valid: false, - }, - { - name: "extra dot", - input: "SQL_Croatian_CP1250.", - valid: false, - }, - { - name: "Invalid collation", - input: "CDD", - valid: false, - }, - { - name: "Double definition", - input: "Latin1_General_100_CI_CS", - valid: false, - }, - } - var validationFunction = MsSqlDBCollation() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := validationFunction(tt.input, "") - valid := err == nil - if valid != tt.valid { - t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) - } - }) - } -} diff --git a/azurerm/internal/services/mssql/validate/mssql_elastic_pool.go b/azurerm/internal/services/mssql/validate/mssql_elastic_pool.go deleted file mode 100644 index 74033fc29af3..000000000000 --- a/azurerm/internal/services/mssql/validate/mssql_elastic_pool.go +++ /dev/null @@ -1,24 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" -) - -func MsSqlElasticPoolID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - if v == "" { - return warnings, errors - } - - if _, err := parse.MSSqlElasticPoolID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a MsSql Elastic Pool resource id: %v", k, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/mssql/validate/mssql_server.go b/azurerm/internal/services/mssql/validate/mssql_server.go deleted file mode 100644 index 0ab924cf6acd..000000000000 --- a/azurerm/internal/services/mssql/validate/mssql_server.go +++ /dev/null @@ -1,21 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" -) - -func MsSqlServerID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.MsSqlServerID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a MsSql Server resource id: %v", k, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/mssql/validate/mssql_virtual_machine.go b/azurerm/internal/services/mssql/validate/mssql_virtual_machine.go deleted file mode 100644 index 6a58f772bda8..000000000000 --- a/azurerm/internal/services/mssql/validate/mssql_virtual_machine.go +++ /dev/null @@ -1,36 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/parse" -) - -func VMID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.VirtualMachineID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a virtual machine id: %v", k, err)) - } - - return warnings, errors -} - -func MsSqlVMLoginUserName(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if !regexp.MustCompile(`^[^\\/"\[\]:|<>+=;,?* .]{2,128}$`).MatchString(v) { - errors = append(errors, fmt.Errorf("%v cannot contain special characters '\\/\"[]:|<>+=;,?* .'", k)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/mssql/validate/mssql_virtual_machine_test.go b/azurerm/internal/services/mssql/validate/mssql_virtual_machine_test.go deleted file mode 100644 index e0a90decdc08..000000000000 --- a/azurerm/internal/services/mssql/validate/mssql_virtual_machine_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package validate - -import "testing" - -func TestMsSqlVMLoginUserName(t *testing.T) { - testCases := []struct { - input string - shouldError bool - }{ - {"dfasdlk", false}, - {"sdfs@ ", false}, - {"dfsjsiajfiweangfvnjaksdflaklsdjdjskfamlkcsdflamkldfklafamsdklfmlaksjfdkadklsfmklamdklsfakldsflamkslfmlkeamkldmfkamfmdkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk", true}, - {"60", false}, - {"7.d", true}, - {"u i", true}, - {"a", true}, - } - - for _, test := range testCases { - _, es := MsSqlVMLoginUserName(test.input, "name") - - if test.shouldError && len(es) == 0 { - t.Fatalf("Expected validating name %q to fail", test.input) - } - } -} diff --git a/azurerm/internal/services/mssql/validate/recoverable_database_id.go b/azurerm/internal/services/mssql/validate/recoverable_database_id.go new file mode 100644 index 000000000000..45679937200b --- /dev/null +++ b/azurerm/internal/services/mssql/validate/recoverable_database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func RecoverableDatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.RecoverableDatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/recoverable_database_id_test.go b/azurerm/internal/services/mssql/validate/recoverable_database_id_test.go new file mode 100644 index 000000000000..b986c478c64f --- /dev/null +++ b/azurerm/internal/services/mssql/validate/recoverable_database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestRecoverableDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/recoverabledatabases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/recoverabledatabases/database1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/RECOVERABLEDATABASES/DATABASE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := RecoverableDatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/restorable_dropped_database_id.go b/azurerm/internal/services/mssql/validate/restorable_dropped_database_id.go new file mode 100644 index 000000000000..a441d7504006 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/restorable_dropped_database_id.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func RestorableDatabaseID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return warnings, errors + } + + if _, err := parse.RestorableDroppedDatabaseID(v); err != nil { + errors = append(errors, fmt.Errorf("Can not parse %q as a MsSql Restorable Database resource id: %v", k, err)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/mssql/validate/server_extended_auditing_policy_id.go b/azurerm/internal/services/mssql/validate/server_extended_auditing_policy_id.go new file mode 100644 index 000000000000..9f0719196cf4 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_extended_auditing_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func ServerExtendedAuditingPolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerExtendedAuditingPolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/server_extended_auditing_policy_id_test.go b/azurerm/internal/services/mssql/validate/server_extended_auditing_policy_id_test.go new file mode 100644 index 000000000000..f6159080a2aa --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_extended_auditing_policy_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerExtendedAuditingPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for ExtendedAuditingSettingName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/extendedAuditingSettings/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/extendedAuditingSettings/default", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/EXTENDEDAUDITINGSETTINGS/DEFAULT", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerExtendedAuditingPolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/server_id.go b/azurerm/internal/services/mssql/validate/server_id.go new file mode 100644 index 000000000000..7c9454b5d3c0 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func ServerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/server_id_test.go b/azurerm/internal/services/mssql/validate/server_id_test.go new file mode 100644 index 000000000000..dd2fa59d38f8 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/server_security_alert_policy_id.go b/azurerm/internal/services/mssql/validate/server_security_alert_policy_id.go new file mode 100644 index 000000000000..e4d665f6b450 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_security_alert_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func ServerSecurityAlertPolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerSecurityAlertPolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/server_security_alert_policy_id_test.go b/azurerm/internal/services/mssql/validate/server_security_alert_policy_id_test.go new file mode 100644 index 000000000000..43c6ba3bdc4e --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_security_alert_policy_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerSecurityAlertPolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing SecurityAlertPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for SecurityAlertPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/securityAlertPolicies/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/securityAlertPolicies/Default", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/SECURITYALERTPOLICIES/DEFAULT", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerSecurityAlertPolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/server_vulnerability_assessment_id.go b/azurerm/internal/services/mssql/validate/server_vulnerability_assessment_id.go new file mode 100644 index 000000000000..941efdd47b10 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_vulnerability_assessment_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func ServerVulnerabilityAssessmentID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerVulnerabilityAssessmentID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/server_vulnerability_assessment_id_test.go b/azurerm/internal/services/mssql/validate/server_vulnerability_assessment_id_test.go new file mode 100644 index 000000000000..c5ad32452200 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/server_vulnerability_assessment_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerVulnerabilityAssessmentID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/", + Valid: false, + }, + + { + // missing VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/", + Valid: false, + }, + + { + // missing value for VulnerabilityAssessmentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/vulnerabilityAssessments/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Sql/servers/server1/vulnerabilityAssessments/default", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQL/SERVERS/SERVER1/VULNERABILITYASSESSMENTS/DEFAULT", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerVulnerabilityAssessmentID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/sql_virtual_machine_id.go b/azurerm/internal/services/mssql/validate/sql_virtual_machine_id.go new file mode 100644 index 000000000000..108ddf7bece5 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/sql_virtual_machine_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mssql/parse" +) + +func SqlVirtualMachineID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SqlVirtualMachineID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mssql/validate/sql_virtual_machine_id_test.go b/azurerm/internal/services/mssql/validate/sql_virtual_machine_id_test.go new file mode 100644 index 000000000000..0ae95ffc362f --- /dev/null +++ b/azurerm/internal/services/mssql/validate/sql_virtual_machine_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSqlVirtualMachineID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/virtualMachine1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.SQLVIRTUALMACHINE/SQLVIRTUALMACHINES/VIRTUALMACHINE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SqlVirtualMachineID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mssql/validate/sql_virtual_machine_login_username.go b/azurerm/internal/services/mssql/validate/sql_virtual_machine_login_username.go new file mode 100644 index 000000000000..439673842146 --- /dev/null +++ b/azurerm/internal/services/mssql/validate/sql_virtual_machine_login_username.go @@ -0,0 +1,20 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func SqlVirtualMachineLoginUserName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if !regexp.MustCompile(`^[^\\/"\[\]:|<>+=;,?* .]{2,128}$`).MatchString(v) { + errors = append(errors, fmt.Errorf("%v cannot contain special characters '\\/\"[]:|<>+=;,?* .'", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/mssql/validate/sql_virtual_machine_login_username_test.go b/azurerm/internal/services/mssql/validate/sql_virtual_machine_login_username_test.go new file mode 100644 index 000000000000..acdfe405e1cd --- /dev/null +++ b/azurerm/internal/services/mssql/validate/sql_virtual_machine_login_username_test.go @@ -0,0 +1,26 @@ +package validate + +import "testing" + +func TestSqlVirtualMachineLoginUserName(t *testing.T) { + testCases := []struct { + input string + shouldError bool + }{ + {"dfasdlk", false}, + {"sdfs@ ", false}, + {"dfsjsiajfiweangfvnjaksdflaklsdjdjskfamlkcsdflamkldfklafamsdklfmlaksjfdkadklsfmklamdklsfakldsflamkslfmlkeamkldmfkamfmdkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk", true}, + {"60", false}, + {"7.d", true}, + {"u i", true}, + {"a", true}, + } + + for _, test := range testCases { + _, es := SqlVirtualMachineLoginUserName(test.input, "name") + + if test.shouldError && len(es) == 0 { + t.Fatalf("Expected validating name %q to fail", test.input) + } + } +} diff --git a/azurerm/internal/services/mysql/mysql_aad_administrator_resource.go b/azurerm/internal/services/mysql/mysql_aad_administrator_resource.go index 0c096b4ac216..db476567c2a9 100644 --- a/azurerm/internal/services/mysql/mysql_aad_administrator_resource.go +++ b/azurerm/internal/services/mysql/mysql_aad_administrator_resource.go @@ -16,12 +16,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMySQLAdministrator() *schema.Resource { +func resourceMySQLAdministrator() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySQLAdministratorCreateUpdate, - Read: resourceArmMySQLAdministratorRead, - Update: resourceArmMySQLAdministratorCreateUpdate, - Delete: resourceArmMySQLAdministratorDelete, + Create: resourceMySQLAdministratorCreateUpdate, + Read: resourceMySQLAdministratorRead, + Update: resourceMySQLAdministratorCreateUpdate, + Delete: resourceMySQLAdministratorDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -62,7 +62,7 @@ func resourceArmMySQLAdministrator() *schema.Resource { } } -func resourceArmMySQLAdministratorCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLAdministratorCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServerAdministratorsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -114,7 +114,7 @@ func resourceArmMySQLAdministratorCreateUpdate(d *schema.ResourceData, meta inte return nil } -func resourceArmMySQLAdministratorRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLAdministratorRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServerAdministratorsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -147,7 +147,7 @@ func resourceArmMySQLAdministratorRead(d *schema.ResourceData, meta interface{}) return nil } -func resourceArmMySQLAdministratorDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLAdministratorDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServerAdministratorsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mysql/mysql_administrator_resource_test.go b/azurerm/internal/services/mysql/mysql_administrator_resource_test.go new file mode 100644 index 000000000000..45900077dfa2 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_administrator_resource_test.go @@ -0,0 +1,214 @@ +package mysql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySqlAdministratorResource struct { +} + +func TestAccMySqlAdministrator_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_active_directory_administrator", "test") + r := MySqlAdministratorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("login").HasValue("sqladmin"), + ), + }, + data.ImportStep(), + { + Config: r.withUpdates(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("login").HasValue("sqladmin2"), + ), + }, + }) +} + +func TestAccMySqlAdministrator_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_active_directory_administrator", "test") + r := MySqlAdministratorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("login").HasValue("sqladmin"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mysql_active_directory_administrator"), + }, + }) +} + +func TestAccMySqlAdministrator_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_active_directory_administrator", "test") + r := MySqlAdministratorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckAzureMySqlAdministratorDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func (t MySqlAdministratorResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + serverName := id.Path["servers"] + + resp, err := clients.MySQL.ServerAdministratorsClient.Get(ctx, resourceGroup, serverName) + if err != nil { + return nil, fmt.Errorf("reading MySQL Administrator (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckAzureMySqlAdministratorDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServerAdministratorsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + resourceGroup := rs.Primary.Attributes["resource_group_name"] + serverName := rs.Primary.Attributes["server_name"] + + if _, err := client.Delete(ctx, resourceGroup, serverName); err != nil { + return fmt.Errorf("Bad: Delete on mysqlAdministratorClient: %+v", err) + } + + return nil + } +} + +func (MySqlAdministratorResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.7" + ssl_enforcement_enabled = true +} + +resource "azurerm_mysql_active_directory_administrator" "test" { + server_name = azurerm_mysql_server.test.name + resource_group_name = azurerm_resource_group.test.name + login = "sqladmin" + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.client_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r MySqlAdministratorResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_active_directory_administrator" "import" { + server_name = azurerm_mysql_active_directory_administrator.test.server_name + resource_group_name = azurerm_mysql_active_directory_administrator.test.resource_group_name + login = azurerm_mysql_active_directory_administrator.test.login + tenant_id = azurerm_mysql_active_directory_administrator.test.tenant_id + object_id = azurerm_mysql_active_directory_administrator.test.object_id +} +`, r.basic(data)) +} + +func (MySqlAdministratorResource) withUpdates(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.7" + ssl_enforcement_enabled = true +} + +resource "azurerm_mysql_active_directory_administrator" "test" { + server_name = azurerm_mysql_server.test.name + resource_group_name = azurerm_resource_group.test.name + login = "sqladmin2" + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.client_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/mysql/mysql_configuration_resource.go b/azurerm/internal/services/mysql/mysql_configuration_resource.go index 2c5f443da8be..e143db59ea8c 100644 --- a/azurerm/internal/services/mysql/mysql_configuration_resource.go +++ b/azurerm/internal/services/mysql/mysql_configuration_resource.go @@ -14,11 +14,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMySQLConfiguration() *schema.Resource { +func resourceMySQLConfiguration() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySQLConfigurationCreate, - Read: resourceArmMySQLConfigurationRead, - Delete: resourceArmMySQLConfigurationDelete, + Create: resourceMySQLConfigurationCreate, + Read: resourceMySQLConfigurationRead, + Delete: resourceMySQLConfigurationDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -44,7 +44,7 @@ func resourceArmMySQLConfiguration() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MySQLServerName, + ValidateFunc: validate.ServerName, }, "value": { @@ -56,7 +56,7 @@ func resourceArmMySQLConfiguration() *schema.Resource { } } -func resourceArmMySQLConfigurationCreate(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLConfigurationCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ConfigurationsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -96,10 +96,10 @@ func resourceArmMySQLConfigurationCreate(d *schema.ResourceData, meta interface{ d.SetId(*read.ID) - return resourceArmMySQLConfigurationRead(d, meta) + return resourceMySQLConfigurationRead(d, meta) } -func resourceArmMySQLConfigurationRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLConfigurationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -131,7 +131,7 @@ func resourceArmMySQLConfigurationRead(d *schema.ResourceData, meta interface{}) return nil } -func resourceArmMySQLConfigurationDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLConfigurationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ConfigurationsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mysql/mysql_configuration_resource_test.go b/azurerm/internal/services/mysql/mysql_configuration_resource_test.go new file mode 100644 index 000000000000..13028a955a14 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_configuration_resource_test.go @@ -0,0 +1,218 @@ +package mysql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySQLConfigurationResource struct { +} + +func TestAccMySQLConfiguration_characterSetServer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_configuration", "test") + r := MySQLConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.characterSetServer(data), + Check: resource.ComposeTestCheckFunc( + testCheckMySQLConfigurationValue(data.ResourceName, "hebrew"), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + testCheckMySQLConfigurationValueReset(data, "character_set_server"), + ), + }, + }) +} + +func TestAccMySQLConfiguration_interactiveTimeout(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_configuration", "test") + r := MySQLConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.interactiveTimeout(data), + Check: resource.ComposeTestCheckFunc( + testCheckMySQLConfigurationValue(data.ResourceName, "30"), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + testCheckMySQLConfigurationValueReset(data, "interactive_timeout"), + ), + }, + }) +} + +func TestAccMySQLConfiguration_logSlowAdminStatements(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_configuration", "test") + r := MySQLConfigurationResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.logSlowAdminStatements(data), + Check: resource.ComposeTestCheckFunc( + testCheckMySQLConfigurationValue(data.ResourceName, "on"), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + testCheckMySQLConfigurationValueReset(data, "log_slow_admin_statements"), + ), + }, + }) +} + +func (t MySQLConfigurationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serverName := id.Path["servers"] + name := id.Path["configurations"] + + resp, err := clients.MySQL.ConfigurationsClient.Get(ctx, resourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("reading MySQL Configuration (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckMySQLConfigurationValue(resourceName string, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ConfigurationsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + serverName := rs.Primary.Attributes["server_name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for MySQL Configuration: %s", name) + } + + resp, err := client.Get(ctx, resourceGroup, serverName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: MySQL Configuration %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) + } + + return fmt.Errorf("Bad: Get on mysqlConfigurationsClient: %+v", err) + } + + if *resp.Value != value { + return fmt.Errorf("MySQL Configuration wasn't set. Expected '%s' - got '%s': \n%+v", value, *resp.Value, resp) + } + + return nil + } +} + +func testCheckMySQLConfigurationValueReset(data acceptance.TestData, configurationName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ConfigurationsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + resourceGroup := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + serverName := fmt.Sprintf("acctestmysqlsvr-%d", data.RandomInteger) + + resp, err := client.Get(ctx, resourceGroup, serverName, configurationName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: MySQL Configuration %q (server %q resource group: %q) does not exist", configurationName, serverName, resourceGroup) + } + return fmt.Errorf("Bad: Get on mysqlConfigurationsClient: %+v", err) + } + + actualValue := *resp.Value + defaultValue := *resp.DefaultValue + + if defaultValue != actualValue { + return fmt.Errorf("MySQL Configuration wasn't set to the default value. Expected '%s' - got '%s': \n%+v", defaultValue, actualValue, resp) + } + + return nil + } +} + +func (r MySQLConfigurationResource) characterSetServer(data acceptance.TestData) string { + return r.template(data, "character_set_server", "hebrew") +} + +func (r MySQLConfigurationResource) interactiveTimeout(data acceptance.TestData) string { + return r.template(data, "interactive_timeout", "30") +} + +func (r MySQLConfigurationResource) logSlowAdminStatements(data acceptance.TestData) string { + return r.template(data, "log_slow_admin_statements", "on") +} + +func (r MySQLConfigurationResource) template(data acceptance.TestData, name string, value string) string { + config := fmt.Sprintf(` +resource "azurerm_mysql_configuration" "test" { + name = "%s" + resource_group_name = "${azurerm_resource_group.test.name}" + server_name = "${azurerm_mysql_server.test.name}" + value = "%s" +} +`, name, value) + return r.empty(data) + config +} + +func (MySQLConfigurationResource) empty(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.7" + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/mysql/mysql_database_resource.go b/azurerm/internal/services/mysql/mysql_database_resource.go index 6fb9148380c2..88dfa82ffc65 100644 --- a/azurerm/internal/services/mysql/mysql_database_resource.go +++ b/azurerm/internal/services/mysql/mysql_database_resource.go @@ -16,11 +16,11 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMySqlDatabase() *schema.Resource { +func resourceMySqlDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySqlDatabaseCreate, - Read: resourceArmMySqlDatabaseRead, - Delete: resourceArmMySqlDatabaseDelete, + Create: resourceMySqlDatabaseCreate, + Read: resourceMySqlDatabaseRead, + Delete: resourceMySqlDatabaseDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -46,7 +46,7 @@ func resourceArmMySqlDatabase() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MySQLServerName, + ValidateFunc: validate.ServerName, }, "charset": { @@ -65,7 +65,7 @@ func resourceArmMySqlDatabase() *schema.Resource { } } -func resourceArmMySqlDatabaseCreate(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlDatabaseCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.DatabasesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -118,10 +118,10 @@ func resourceArmMySqlDatabaseCreate(d *schema.ResourceData, meta interface{}) er d.SetId(*read.ID) - return resourceArmMySqlDatabaseRead(d, meta) + return resourceMySqlDatabaseRead(d, meta) } -func resourceArmMySqlDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -153,7 +153,7 @@ func resourceArmMySqlDatabaseRead(d *schema.ResourceData, meta interface{}) erro return nil } -func resourceArmMySqlDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.DatabasesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mysql/mysql_database_resource_test.go b/azurerm/internal/services/mysql/mysql_database_resource_test.go new file mode 100644 index 000000000000..fedb014d4505 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_database_resource_test.go @@ -0,0 +1,235 @@ +package mysql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySQLDatabaseResource struct { +} + +func TestAccMySQLDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") + r := MySQLDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMySQLDatabase_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") + r := MySQLDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mysql_database"), + }, + }) +} + +func TestAccMySQLDatabase_charsetUppercase(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") + r := MySQLDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.charsetUppercase(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("utf8"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMySQLDatabase_charsetMixedcase(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") + r := MySQLDatabaseResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.charsetMixedcase(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("utf8"), + ), + }, + data.ImportStep(), + }) +} + +func (t MySQLDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resourceGroup := id.ResourceGroup + serverName := id.Path["servers"] + name := id.Path["databases"] + + resp, err := clients.MySQL.DatabasesClient.Get(ctx, resourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("reading MySQL Database (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MySQLDatabaseResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestpsqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_mysql_database" "test" { + name = "acctestdb_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + charset = "utf8" + collation = "utf8_unicode_ci" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r MySQLDatabaseResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_database" "import" { + name = azurerm_mysql_database.test.name + resource_group_name = azurerm_mysql_database.test.resource_group_name + server_name = azurerm_mysql_database.test.server_name + charset = azurerm_mysql_database.test.charset + collation = azurerm_mysql_database.test.collation +} +`, r.basic(data)) +} + +func (MySQLDatabaseResource) charsetUppercase(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestpsqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_mysql_database" "test" { + name = "acctestdb_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + charset = "UTF8" + collation = "utf8_unicode_ci" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (MySQLDatabaseResource) charsetMixedcase(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestpsqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_mysql_database" "test" { + name = "acctestdb_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + charset = "Utf8" + collation = "utf8_unicode_ci" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/mysql/mysql_firewall_rule_resource.go b/azurerm/internal/services/mysql/mysql_firewall_rule_resource.go index b2e109c9a4e9..bf683adaacd8 100644 --- a/azurerm/internal/services/mysql/mysql_firewall_rule_resource.go +++ b/azurerm/internal/services/mysql/mysql_firewall_rule_resource.go @@ -16,12 +16,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMySqlFirewallRule() *schema.Resource { +func resourceMySqlFirewallRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySqlFirewallRuleCreateUpdate, - Read: resourceArmMySqlFirewallRuleRead, - Update: resourceArmMySqlFirewallRuleCreateUpdate, - Delete: resourceArmMySqlFirewallRuleDelete, + Create: resourceMySqlFirewallRuleCreateUpdate, + Read: resourceMySqlFirewallRuleRead, + Update: resourceMySqlFirewallRuleCreateUpdate, + Delete: resourceMySqlFirewallRuleDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -47,7 +47,7 @@ func resourceArmMySqlFirewallRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MySQLServerName, + ValidateFunc: validate.ServerName, }, "start_ip_address": { @@ -65,7 +65,7 @@ func resourceArmMySqlFirewallRule() *schema.Resource { } } -func resourceArmMySqlFirewallRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlFirewallRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.FirewallRulesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -117,10 +117,10 @@ func resourceArmMySqlFirewallRuleCreateUpdate(d *schema.ResourceData, meta inter d.SetId(*read.ID) - return resourceArmMySqlFirewallRuleRead(d, meta) + return resourceMySqlFirewallRuleRead(d, meta) } -func resourceArmMySqlFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.FirewallRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -151,7 +151,7 @@ func resourceArmMySqlFirewallRuleRead(d *schema.ResourceData, meta interface{}) return nil } -func resourceArmMySqlFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.FirewallRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/mysql/mysql_firewall_rule_resource_test.go b/azurerm/internal/services/mysql/mysql_firewall_rule_resource_test.go new file mode 100644 index 000000000000..aec2f05e0a53 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_firewall_rule_resource_test.go @@ -0,0 +1,122 @@ +package mysql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySQLFirewallRuleResource struct { +} + +func TestAccMySQLFirewallRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_firewall_rule", "test") + r := MySQLFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMySQLFirewallRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_firewall_rule", "test") + r := MySQLFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mysql_firewall_rule"), + }, + }) +} + +func (t MySQLFirewallRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serverName := id.Path["servers"] + name := id.Path["firewallRules"] + + resp, err := clients.MySQL.FirewallRulesClient.Get(ctx, resourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("reading MySQL Firewall Rule (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MySQLFirewallRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_mysql_firewall_rule" "test" { + name = "acctestfwrule-%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "255.255.255.255" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r MySQLFirewallRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_firewall_rule" "import" { + name = azurerm_mysql_firewall_rule.test.name + resource_group_name = azurerm_mysql_firewall_rule.test.resource_group_name + server_name = azurerm_mysql_firewall_rule.test.server_name + start_ip_address = azurerm_mysql_firewall_rule.test.start_ip_address + end_ip_address = azurerm_mysql_firewall_rule.test.end_ip_address +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/mysql/mysql_server_data_source.go b/azurerm/internal/services/mysql/mysql_server_data_source.go index ebe23dd9db78..41396db2fc3b 100644 --- a/azurerm/internal/services/mysql/mysql_server_data_source.go +++ b/azurerm/internal/services/mysql/mysql_server_data_source.go @@ -14,9 +14,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmMySqlServer() *schema.Resource { +func dataSourceMySqlServer() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmMySqlServerRead, + Read: dataSourceMySqlServerRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -26,7 +26,7 @@ func dataSourceArmMySqlServer() *schema.Resource { "name": { Type: schema.TypeString, Required: true, - ValidateFunc: validate.MySQLServerName, + ValidateFunc: validate.ServerName, }, "administrator_login": { @@ -178,7 +178,7 @@ func dataSourceArmMySqlServer() *schema.Resource { } } -func dataSourceArmMySqlServerRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceMySqlServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServersClient securityClient := meta.(*clients.Client).MySQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) diff --git a/azurerm/internal/services/mysql/mysql_server_data_source_test.go b/azurerm/internal/services/mysql/mysql_server_data_source_test.go new file mode 100644 index 000000000000..5e7c17f074c1 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_server_data_source_test.go @@ -0,0 +1,178 @@ +package mysql_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type MySQLServerDataSource struct { +} + +func TestAccDataSourceMySQLServerDataSourceMySQLServer_basicFiveSix(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") + r := MySQLServerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("auto_grow_enabled").HasValue("false"), + check.That(data.ResourceName).Key("ssl_minimal_tls_version_enforced").HasValue("TLS1_1"), + check.That(data.ResourceName).Key("storage_mb").HasValue("51200"), + check.That(data.ResourceName).Key("version").HasValue("5.6"), + ), + }, + }) +} + +func TestAccDataSourceMySQLServerDataSourceMySQLServer_basicFiveSixWithIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") + r := MySQLServerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicWithIdentity(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("auto_grow_enabled").HasValue("false"), + check.That(data.ResourceName).Key("ssl_minimal_tls_version_enforced").HasValue("TLS1_1"), + check.That(data.ResourceName).Key("storage_mb").HasValue("51200"), + check.That(data.ResourceName).Key("version").HasValue("5.6"), + check.That(data.ResourceName).Key("identity.0.type").HasValue("SystemAssigned"), + check.That(data.ResourceName).Key("identity.0.principal_id").Exists(), + check.That(data.ResourceName).Key("identity.0.tenant_id").Exists(), + ), + }, + }) +} + +func TestAccDataSourceMySQLServerDataSourceMySQLServer_basicFiveSeven(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") + r := MySQLServerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data, "5.7"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("auto_grow_enabled").HasValue("false"), + check.That(data.ResourceName).Key("ssl_minimal_tls_version_enforced").HasValue("TLS1_1"), + check.That(data.ResourceName).Key("storage_mb").HasValue("51200"), + check.That(data.ResourceName).Key("version").HasValue("5.7"), + ), + }, + }) +} + +func TestAccDataSourceMySQLServerDataSourceMySQLServer_basicEightZero(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") + r := MySQLServerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data, "8.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("auto_grow_enabled").HasValue("false"), + check.That(data.ResourceName).Key("ssl_minimal_tls_version_enforced").HasValue("TLS1_1"), + check.That(data.ResourceName).Key("storage_mb").HasValue("51200"), + check.That(data.ResourceName).Key("version").HasValue("8.0"), + ), + }, + }) +} + +func TestAccDataSourceMySQLServerDataSourceMySQLServer_autogrowOnly(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") + r := MySQLServerDataSource{} + mysqlVersion := "5.7" + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.autogrow(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("auto_grow_enabled").HasValue("true"), + check.That(data.ResourceName).Key("storage_mb").HasValue("51200"), + check.That(data.ResourceName).Key("version").HasValue("5.7"), + ), + }, + }) +} + +func TestAccDataSourceMySQLServerDataSourceMySQLServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") + r := MySQLServerDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.complete(data, "8.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), + check.That(data.ResourceName).Key("administrator_login").HasValue("acctestun"), + check.That(data.ResourceName).Key("auto_grow_enabled").HasValue("true"), + check.That(data.ResourceName).Key("ssl_minimal_tls_version_enforced").HasValue("TLS1_2"), + check.That(data.ResourceName).Key("storage_mb").HasValue("51200"), + check.That(data.ResourceName).Key("version").HasValue("8.0"), + check.That(data.ResourceName).Key("threat_detection_policy.#").HasValue("1"), + check.That(data.ResourceName).Key("threat_detection_policy.0.enabled").HasValue("true"), + check.That(data.ResourceName).Key("threat_detection_policy.0.email_account_admins").HasValue("true"), + check.That(data.ResourceName).Key("threat_detection_policy.0.retention_days").HasValue("7"), + ), + }, + }) +} + +func (MySQLServerDataSource) basic(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +data "azurerm_mysql_server" "test" { + name = azurerm_mysql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, MySQLServerResource{}.basic(data, version)) +} + +func (MySQLServerDataSource) basicWithIdentity(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +data "azurerm_mysql_server" "test" { + name = azurerm_mysql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, MySQLServerResource{}.basicWithIdentity(data, version)) +} + +func (MySQLServerDataSource) autogrow(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +data "azurerm_mysql_server" "test" { + name = azurerm_mysql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, MySQLServerResource{}.autogrow(data, version)) +} + +func (MySQLServerDataSource) complete(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +data "azurerm_mysql_server" "test" { + name = azurerm_mysql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, MySQLServerResource{}.complete(data, version)) +} diff --git a/azurerm/internal/services/mysql/mysql_server_key_resource.go b/azurerm/internal/services/mysql/mysql_server_key_resource.go index 6b916847321d..5156e27de20f 100644 --- a/azurerm/internal/services/mysql/mysql_server_key_resource.go +++ b/azurerm/internal/services/mysql/mysql_server_key_resource.go @@ -21,15 +21,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMySQLServerKey() *schema.Resource { +func resourceMySQLServerKey() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySQLServerKeyCreateUpdate, - Read: resourceArmMySQLServerKeyRead, - Update: resourceArmMySQLServerKeyCreateUpdate, - Delete: resourceArmMySQLServerKeyDelete, + Create: resourceMySQLServerKeyCreateUpdate, + Read: resourceMySQLServerKeyRead, + Update: resourceMySQLServerKeyCreateUpdate, + Delete: resourceMySQLServerKeyDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.MySQLServerKeyID(id) + _, err := parse.KeyID(id) return err }), @@ -45,7 +45,7 @@ func resourceArmMySQLServerKey() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MySQLServerID, + ValidateFunc: validate.ServerID, }, "key_vault_key_id": { @@ -66,20 +66,20 @@ func getMySQLServerKeyName(ctx context.Context, vaultsClient *keyvault.VaultsCli if err != nil { return nil, err } - keyVaultID, err := keyVaultParse.KeyVaultID(*keyVaultIDRaw) + keyVaultID, err := keyVaultParse.VaultID(*keyVaultIDRaw) if err != nil { return nil, err } return utils.String(fmt.Sprintf("%s_%s_%s", keyVaultID.Name, keyVaultKeyID.Name, keyVaultKeyID.Version)), nil } -func resourceArmMySQLServerKeyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLServerKeyCreateUpdate(d *schema.ResourceData, meta interface{}) error { keysClient := meta.(*clients.Client).MySQL.ServerKeysClient vaultsClient := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - serverID, err := parse.MySQLServerID(d.Get("server_id").(string)) + serverID, err := parse.ServerID(d.Get("server_id").(string)) if err != nil { return err } @@ -134,16 +134,16 @@ func resourceArmMySQLServerKeyCreateUpdate(d *schema.ResourceData, meta interfac d.SetId(*resp.ID) - return resourceArmMySQLServerKeyRead(d, meta) + return resourceMySQLServerKeyRead(d, meta) } -func resourceArmMySQLServerKeyRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLServerKeyRead(d *schema.ResourceData, meta interface{}) error { serversClient := meta.(*clients.Client).MySQL.ServersClient keysClient := meta.(*clients.Client).MySQL.ServerKeysClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MySQLServerKeyID(d.Id()) + id, err := parse.KeyID(d.Id()) if err != nil { return err } @@ -172,12 +172,12 @@ func resourceArmMySQLServerKeyRead(d *schema.ResourceData, meta interface{}) err return nil } -func resourceArmMySQLServerKeyDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLServerKeyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServerKeysClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MySQLServerKeyID(d.Id()) + id, err := parse.KeyID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/mysql/mysql_server_key_resource_test.go b/azurerm/internal/services/mysql/mysql_server_key_resource_test.go new file mode 100644 index 000000000000..3ae13fbc2489 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_server_key_resource_test.go @@ -0,0 +1,200 @@ +package mysql_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mysql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySQLServerKeyResource struct { +} + +func TestAccMySQLServerKey_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server_key", "test") + r := MySQLServerKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccMySQLServerKey_updateKey(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server_key", "test") + r := MySQLServerKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccMySQLServerKey_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server_key", "test") + r := MySQLServerKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t MySQLServerKeyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.KeyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.MySQL.ServerKeysClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading MySQL Server Key (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MySQLServerKeyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = false + } + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" + soft_delete_enabled = true + purge_protection_enabled = true +} + +resource "azurerm_key_vault_access_policy" "server" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = azurerm_mysql_server.test.identity.0.principal_id + key_permissions = ["get", "unwrapkey", "wrapkey"] + secret_permissions = ["get"] +} + +resource "azurerm_key_vault_access_policy" "client" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + key_permissions = ["get", "create", "delete", "list", "restore", "recover", "unwrapkey", "wrapkey", "purge", "encrypt", "decrypt", "sign", "verify"] + secret_permissions = ["get"] +} + +resource "azurerm_key_vault_key" "first" { + name = "first" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + depends_on = [ + azurerm_key_vault_access_policy.client, + azurerm_key_vault_access_policy.server, + ] +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_1" + storage_mb = 51200 + version = "5.6" + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (r MySQLServerKeyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_server_key" "test" { + server_id = azurerm_mysql_server.test.id + key_vault_key_id = azurerm_key_vault_key.first.id +} +`, r.template(data)) +} + +func (r MySQLServerKeyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_server_key" "import" { + server_id = azurerm_mysql_server_key.test.server_id + key_vault_key_id = azurerm_mysql_server_key.test.key_vault_key_id +} +`, r.template(data)) +} + +func (r MySQLServerKeyResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_key_vault_key" "second" { + name = "second" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + depends_on = [ + azurerm_key_vault_access_policy.client, + azurerm_key_vault_access_policy.server, + ] +} +resource "azurerm_mysql_server_key" "test" { + server_id = azurerm_mysql_server.test.id + key_vault_key_id = azurerm_key_vault_key.second.id +} +`, r.template(data)) +} diff --git a/azurerm/internal/services/mysql/mysql_server_resource.go b/azurerm/internal/services/mysql/mysql_server_resource.go index 84342914b594..e51f8e27cb8b 100644 --- a/azurerm/internal/services/mysql/mysql_server_resource.go +++ b/azurerm/internal/services/mysql/mysql_server_resource.go @@ -27,16 +27,16 @@ const ( mySQLServerResourceName = "azurerm_mysql_server" ) -func resourceArmMySqlServer() *schema.Resource { +func resourceMySqlServer() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySqlServerCreate, - Read: resourceArmMySqlServerRead, - Update: resourceArmMySqlServerUpdate, - Delete: resourceArmMySqlServerDelete, + Create: resourceMySqlServerCreate, + Read: resourceMySqlServerRead, + Update: resourceMySqlServerUpdate, + Delete: resourceMySqlServerDelete, Importer: &schema.ResourceImporter{ State: func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - if _, err := parse.MySQLServerID(d.Id()); err != nil { + if _, err := parse.ServerID(d.Id()); err != nil { return []*schema.ResourceData{d}, err } @@ -61,7 +61,7 @@ func resourceArmMySqlServer() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MySQLServerName, + ValidateFunc: validate.ServerName, }, "administrator_login": { @@ -107,7 +107,7 @@ func resourceArmMySqlServer() *schema.Resource { "creation_source_server_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.MySQLServerID, + ValidateFunc: validate.ServerID, }, "fqdn": { @@ -392,7 +392,7 @@ func resourceArmMySqlServer() *schema.Resource { } } -func resourceArmMySqlServerCreate(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlServerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServersClient securityClient := meta.(*clients.Client).MySQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -562,10 +562,10 @@ func resourceArmMySqlServerCreate(d *schema.ResourceData, meta interface{}) erro } } - return resourceArmMySqlServerRead(d, meta) + return resourceMySqlServerRead(d, meta) } -func resourceArmMySqlServerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlServerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServersClient securityClient := meta.(*clients.Client).MySQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) @@ -575,7 +575,7 @@ func resourceArmMySqlServerUpdate(d *schema.ResourceData, meta interface{}) erro log.Printf("[INFO] preparing arguments for AzureRM MySQL Server update.") - id, err := parse.MySQLServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing MySQL Server ID : %v", err) } @@ -645,16 +645,16 @@ func resourceArmMySqlServerUpdate(d *schema.ResourceData, meta interface{}) erro } } - return resourceArmMySqlServerRead(d, meta) + return resourceMySqlServerRead(d, meta) } -func resourceArmMySqlServerRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServersClient securityClient := meta.(*clients.Client).MySQL.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MySQLServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing MySQL Server ID : %v", err) } @@ -729,12 +729,12 @@ func resourceArmMySqlServerRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmMySqlServerDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySqlServerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.ServersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.MySQLServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing MySQL Server ID : %v", err) } diff --git a/azurerm/internal/services/mysql/mysql_server_resource_test.go b/azurerm/internal/services/mysql/mysql_server_resource_test.go new file mode 100644 index 000000000000..1ad3afdb0916 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_server_resource_test.go @@ -0,0 +1,715 @@ +package mysql_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mysql/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySQLServerResource struct { +} + +func TestAccMySQLServer_basicFiveSix(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_basicFiveSixWithIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithIdentity(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_basicFiveSixWithIdentityUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.basicWithIdentity(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_basicFiveSixDeprecated(t *testing.T) { // remove in v3.0 + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDeprecated(data, "5.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_basicFiveSeven(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "5.7"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_basicEightZero(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "8.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_autogrowOnly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + mysqlVersion := "5.7" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autogrow(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.basic(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "5.7"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccMySQLServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "8.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + mysqlVersion := "8.0" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.complete(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.complete2(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "threat_detection_policy.0.storage_account_access_key"), + { + Config: r.complete3(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password", "threat_detection_policy.0.storage_account_access_key"), + { + Config: r.basic(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_completeDeprecatedMigrate(t *testing.T) { // remove in v3.0 + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + mysqlVersion := "5.6" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeDeprecated(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.complete(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_updateDeprecated(t *testing.T) { // remove in v3.0 + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + mysqlVersion := "5.6" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDeprecated(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.completeDeprecated(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.basicDeprecated(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_updateSKU(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sku(data, "GP_Gen5_2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + { + Config: r.sku(data, "MO_Gen5_16"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), // not returned as sensitive + }) +} + +func TestAccMySQLServer_createReplica(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + mysqlVersion := "8.0" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.createReplica(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccMySQLServer_createPointInTimeRestore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") + r := MySQLServerResource{} + restoreTime := time.Now().Add(11 * time.Minute) + mysqlVersion := "8.0" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, mysqlVersion), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + PreConfig: func() { time.Sleep(restoreTime.Sub(time.Now().Add(-7 * time.Minute))) }, + Config: r.createPointInTimeRestore(data, mysqlVersion, restoreTime.Format(time.RFC3339)), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func (t MySQLServerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.MySQL.ServersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("reading MySQL Server (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (MySQLServerResource) basic(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_1" + storage_mb = 51200 + version = "%s" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MySQLServerResource) basicDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "%s" + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_1" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MySQLServerResource) basicWithIdentity(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_1" + storage_mb = 51200 + version = "%s" + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (MySQLServerResource) complete(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + auto_grow_enabled = true + backup_retention_days = 7 + create_mode = "Default" + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + storage_mb = 51200 + version = "%[3]s" + threat_detection_policy { + enabled = true + disabled_alerts = ["Sql_Injection", "Data_Exfiltration"] + email_account_admins = true + email_addresses = ["pearcec@example.com", "admin@example.com"] + retention_days = 7 + } +} +`, data.RandomInteger, data.Locations.Primary, version) +} + +func (MySQLServerResource) complete2(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mysql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "accsa%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!updated" + auto_grow_enabled = true + backup_retention_days = 7 + create_mode = "Default" + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = false + storage_mb = 51200 + version = "%[3]s" + threat_detection_policy { + enabled = true + disabled_alerts = ["Sql_Injection"] + email_account_admins = true + email_addresses = ["pearcec@example.com"] + retention_days = 7 + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + } +} +`, data.RandomInteger, data.Locations.Primary, version) +} + +func (MySQLServerResource) complete3(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-mysql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "accsa%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!updated" + auto_grow_enabled = true + backup_retention_days = 7 + create_mode = "Default" + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = false + storage_mb = 51200 + version = "%[3]s" + threat_detection_policy { + enabled = true + email_account_admins = true + retention_days = 7 + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + } +} +`, data.RandomInteger, data.Locations.Primary, version) +} + +func (MySQLServerResource) completeDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + auto_grow = "Enabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "%s" + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (r MySQLServerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_server" "import" { + name = azurerm_mysql_server.test.name + location = azurerm_mysql_server.test.location + resource_group_name = azurerm_mysql_server.test.resource_group_name + sku_name = "GP_Gen5_2" + version = "5.7" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + backup_retention_days = 7 + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, r.basic(data, "5.7")) +} + +func (MySQLServerResource) sku(data acceptance.TestData, sku string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "%s" + version = "5.7" + + storage_mb = 4194304 + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku) +} + +func (MySQLServerResource) autogrow(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + version = "%s" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + auto_grow_enabled = true + backup_retention_days = 7 + geo_redundant_backup_enabled = false + ssl_enforcement_enabled = true + storage_mb = 51200 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (r MySQLServerResource) createReplica(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_server" "replica" { + name = "acctestmysqlsvr-%d-replica" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + version = "%s" + storage_mb = 51200 + + create_mode = "Replica" + creation_source_server_id = azurerm_mysql_server.test.id + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_1" +} +`, r.basic(data, version), data.RandomInteger, version) +} + +func (r MySQLServerResource) createPointInTimeRestore(data acceptance.TestData, version, restoreTime string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_server" "restore" { + name = "acctestmysqlsvr-%d-restore" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku_name = "GP_Gen5_2" + version = "%s" + + create_mode = "PointInTimeRestore" + creation_source_server_id = azurerm_mysql_server.test.id + restore_point_in_time = "%s" + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_1" + storage_mb = 51200 +} +`, r.basic(data, version), data.RandomInteger, version, restoreTime) +} diff --git a/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource.go b/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource.go index 274145e2491c..fbd0b57a09f2 100644 --- a/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource.go +++ b/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource.go @@ -19,12 +19,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmMySQLVirtualNetworkRule() *schema.Resource { +func resourceMySQLVirtualNetworkRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmMySQLVirtualNetworkRuleCreateUpdate, - Read: resourceArmMySQLVirtualNetworkRuleRead, - Update: resourceArmMySQLVirtualNetworkRuleCreateUpdate, - Delete: resourceArmMySQLVirtualNetworkRuleDelete, + Create: resourceMySQLVirtualNetworkRuleCreateUpdate, + Read: resourceMySQLVirtualNetworkRuleRead, + Update: resourceMySQLVirtualNetworkRuleCreateUpdate, + Delete: resourceMySQLVirtualNetworkRuleDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, @@ -50,7 +50,7 @@ func resourceArmMySQLVirtualNetworkRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.MySQLServerName, + ValidateFunc: validate.ServerName, }, "subnet_id": { @@ -62,7 +62,7 @@ func resourceArmMySQLVirtualNetworkRule() *schema.Resource { } } -func resourceArmMySQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.VirtualNetworkRulesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -122,10 +122,10 @@ func resourceArmMySQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta d.SetId(*resp.ID) - return resourceArmMySQLVirtualNetworkRuleRead(d, meta) + return resourceMySQLVirtualNetworkRuleRead(d, meta) } -func resourceArmMySQLVirtualNetworkRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLVirtualNetworkRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.VirtualNetworkRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -161,7 +161,7 @@ func resourceArmMySQLVirtualNetworkRuleRead(d *schema.ResourceData, meta interfa return nil } -func resourceArmMySQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceMySQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MySQL.VirtualNetworkRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() @@ -196,7 +196,6 @@ func resourceArmMySQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta inter func mySQLVirtualNetworkStateStatusCodeRefreshFunc(ctx context.Context, client *mysql.VirtualNetworkRulesClient, resourceGroup string, serverName string, name string) resource.StateRefreshFunc { return func() (interface{}, string, error) { resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Retrieving MySQL Virtual Network Rule %q (MySQL Server: %q, Resource Group: %q) returned 404.", resourceGroup, serverName, name) diff --git a/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource_test.go b/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource_test.go new file mode 100644 index 000000000000..845c0b7a4ea5 --- /dev/null +++ b/azurerm/internal/services/mysql/mysql_virtual_network_rule_resource_test.go @@ -0,0 +1,511 @@ +package mysql_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type MySQLVirtualNetworkRuleResource struct { +} + +func TestAccMySQLVirtualNetworkRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") + r := MySQLVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccMySQLVirtualNetworkRule_badsubnet(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") + r := MySQLVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.badsubnet(data), + /* + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ),*/ + }, + }) +} + +func TestAccMySQLVirtualNetworkRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") + r := MySQLVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_mysql_virtual_network_rule"), + }, + }) +} + +func TestAccMySQLVirtualNetworkRule_switchSubnets(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") + r := MySQLVirtualNetworkRuleResource{} + + // Create regex strings that will ensure that one subnet name exists, but not the other + preConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet1%d)$|(subnet[^2]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 1 but not 2 + postConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet2%d)$|(subnet[^1]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 2 but not 1 + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subnetSwitchPre(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", preConfigRegex), + ), + }, + { + Config: r.subnetSwitchPost(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", postConfigRegex), + ), + }, + }) +} + +func TestAccMySQLVirtualNetworkRule_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") + r := MySQLVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckMySQLVirtualNetworkRuleDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccMySQLVirtualNetworkRule_multipleSubnets(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "rule1") + r := MySQLVirtualNetworkRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleSubnets(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func (t MySQLVirtualNetworkRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + serverName := id.Path["servers"] + name := id.Path["virtualNetworkRules"] + + resp, err := clients.MySQL.VirtualNetworkRulesClient.Get(ctx, resourceGroup, serverName, name) + if err != nil { + return nil, fmt.Errorf("reading MySQL Virtual Network Rule (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckMySQLVirtualNetworkRuleDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.VirtualNetworkRulesClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + resourceGroup := rs.Primary.Attributes["resource_group_name"] + serverName := rs.Primary.Attributes["server_name"] + ruleName := rs.Primary.Attributes["name"] + + future, err := client.Delete(ctx, resourceGroup, serverName, ruleName) + if err != nil { + // If the error is that the resource we want to delete does not exist in the first + // place (404), then just return with no error. + if response.WasNotFound(future.Response()) { + return nil + } + + return fmt.Errorf("Error deleting MySql Virtual Network Rule: %+v", err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + // Same deal as before. Just in case. + if response.WasNotFound(future.Response()) { + return nil + } + + return fmt.Errorf("Error deleting MySql Virtual Network Rule: %+v", err) + } + + return nil + } +} + +func (MySQLVirtualNetworkRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mysql_virtual_network_rule" "test" { + name = "acctestmysqlvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (MySQLVirtualNetworkRuleResource) badsubnet(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctestsubnet%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mysql_virtual_network_rule" "test" { + name = "acctestmysqlvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r MySQLVirtualNetworkRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_mysql_virtual_network_rule" "import" { + name = azurerm_mysql_virtual_network_rule.test.name + resource_group_name = azurerm_mysql_virtual_network_rule.test.resource_group_name + server_name = azurerm_mysql_virtual_network_rule.test.server_name + subnet_id = azurerm_mysql_virtual_network_rule.test.subnet_id +} +`, r.basic(data)) +} + +func (MySQLVirtualNetworkRuleResource) subnetSwitchPre(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test1" { + name = "subnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "test2" { + name = "subnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.128/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mysql_virtual_network_rule" "test" { + name = "acctestmysqlvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.test1.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (MySQLVirtualNetworkRuleResource) subnetSwitchPost(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvnet%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test1" { + name = "subnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "test2" { + name = "subnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.128/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mysql_virtual_network_rule" "test" { + name = "acctestmysqlvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (MySQLVirtualNetworkRuleResource) multipleSubnets(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "vnet1" { + name = "acctestvnet1%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_virtual_network" "vnet2" { + name = "acctestvnet2%d" + address_space = ["10.1.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "vnet1_subnet1" { + name = "acctestsubnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet1.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "vnet1_subnet2" { + name = "acctestsubnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet1.name + address_prefix = "10.7.29.128/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "vnet2_subnet1" { + name = "acctestsubnet3%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet2.name + address_prefix = "10.1.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_mysql_server" "test" { + name = "acctestmysqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "5.6" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_mysql_virtual_network_rule" "rule1" { + name = "acctestmysqlvnetrule1%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.vnet1_subnet1.id +} + +resource "azurerm_mysql_virtual_network_rule" "rule2" { + name = "acctestmysqlvnetrule2%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.vnet1_subnet2.id +} + +resource "azurerm_mysql_virtual_network_rule" "rule3" { + name = "acctestmysqlvnetrule3%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_mysql_server.test.name + subnet_id = azurerm_subnet.vnet2_subnet1.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/mysql/parse/key.go b/azurerm/internal/services/mysql/parse/key.go new file mode 100644 index 000000000000..71658b7e54ed --- /dev/null +++ b/azurerm/internal/services/mysql/parse/key.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type KeyId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewKeyID(subscriptionId, resourceGroup, serverName, name string) KeyId { + return KeyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id KeyId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Key", segmentsStr) +} + +func (id KeyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforMySQL/servers/%s/keys/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// KeyID parses a Key ID into an KeyId struct +func KeyID(input string) (*KeyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := KeyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("keys"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mysql/parse/key_test.go b/azurerm/internal/services/mysql/parse/key_test.go new file mode 100644 index 000000000000..417ccd30ec91 --- /dev/null +++ b/azurerm/internal/services/mysql/parse/key_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = KeyId{} + +func TestKeyIDFormatter(t *testing.T) { + actual := NewKeyID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "key1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/keys/key1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestKeyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *KeyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/keys/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/keys/key1", + Expected: &KeyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + Name: "key1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORMYSQL/SERVERS/SERVER1/KEYS/KEY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := KeyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mysql/parse/mysql_server.go b/azurerm/internal/services/mysql/parse/mysql_server.go deleted file mode 100644 index d1d14f12a25e..000000000000 --- a/azurerm/internal/services/mysql/parse/mysql_server.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MySQLServerId struct { - ResourceGroup string - Name string -} - -func MySQLServerID(input string) (*MySQLServerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse MySQL Server ID %q: %+v", input, err) - } - - server := MySQLServerId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/mysql/parse/mysql_server_key.go b/azurerm/internal/services/mysql/parse/mysql_server_key.go deleted file mode 100644 index 70f76b7ece8f..000000000000 --- a/azurerm/internal/services/mysql/parse/mysql_server_key.go +++ /dev/null @@ -1,37 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type MySQLServerKeyId struct { - ResourceGroup string - ServerName string - Name string -} - -func MySQLServerKeyID(input string) (*MySQLServerKeyId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse MySQL Server Key ID %q: %+v", input, err) - } - - key := MySQLServerKeyId{ - ResourceGroup: id.ResourceGroup, - } - - if key.ServerName, err = id.PopSegment("servers"); err != nil { - return nil, err - } - if key.Name, err = id.PopSegment("keys"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &key, nil -} diff --git a/azurerm/internal/services/mysql/parse/mysql_server_key_test.go b/azurerm/internal/services/mysql/parse/mysql_server_key_test.go deleted file mode 100644 index 976fb53f3ad0..000000000000 --- a/azurerm/internal/services/mysql/parse/mysql_server_key_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package parse - -import "testing" - -func TestMySQLServerKeyID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MySQLServerKeyId - }{ - { - Name: "Empty resource ID", - Input: "", - Expected: nil, - }, - { - Name: "No resourceGroups segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No resource group name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource group", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/", - Expected: nil, - }, - { - Name: "Missing server name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/", - Expected: nil, - }, - { - Name: "MySQL Server ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/test-mysql/", - Expected: nil, - }, - { - Name: "Missing key name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/test-mysql/keys/", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/test-mysql/keys/key1", - Expected: &MySQLServerKeyId{ - Name: "key1", - ResourceGroup: "test-rg", - ServerName: "test-mysql", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MySQLServerKeyID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ServerName != v.Expected.ServerName { - t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/mysql/parse/mysql_server_test.go b/azurerm/internal/services/mysql/parse/mysql_server_test.go deleted file mode 100644 index c01099bf2ba4..000000000000 --- a/azurerm/internal/services/mysql/parse/mysql_server_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestMySQLServerID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *MySQLServerId - }{ - { - Name: "Empty resource ID", - Input: "", - Expected: nil, - }, - { - Name: "No resourceGroups segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No resource group name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource group", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/", - Expected: nil, - }, - { - Name: "Missing server name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/", - Expected: nil, - }, - { - Name: "Valid", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/test-mysql", - Expected: &MySQLServerId{ - Name: "test-mysql", - ResourceGroup: "test-rg", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := MySQLServerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/mysql/parse/server.go b/azurerm/internal/services/mysql/parse/server.go new file mode 100644 index 000000000000..2a00b8f49dc5 --- /dev/null +++ b/azurerm/internal/services/mysql/parse/server.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServerID(subscriptionId, resourceGroup, name string) ServerId { + return ServerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server", segmentsStr) +} + +func (id ServerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforMySQL/servers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServerID parses a Server ID into an ServerId struct +func ServerID(input string) (*ServerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("servers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/mysql/parse/server_test.go b/azurerm/internal/services/mysql/parse/server_test.go new file mode 100644 index 000000000000..a786eab6f7a3 --- /dev/null +++ b/azurerm/internal/services/mysql/parse/server_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerId{} + +func TestServerIDFormatter(t *testing.T) { + actual := NewServerID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1", + Expected: &ServerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "server1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORMYSQL/SERVERS/SERVER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/mysql/registration.go b/azurerm/internal/services/mysql/registration.go index 5ca0b516589a..f31b8985285a 100644 --- a/azurerm/internal/services/mysql/registration.go +++ b/azurerm/internal/services/mysql/registration.go @@ -21,18 +21,19 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_mysql_server": dataSourceArmMySqlServer(), + "azurerm_mysql_server": dataSourceMySqlServer(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_mysql_configuration": resourceArmMySQLConfiguration(), - "azurerm_mysql_database": resourceArmMySqlDatabase(), - "azurerm_mysql_firewall_rule": resourceArmMySqlFirewallRule(), - "azurerm_mysql_server": resourceArmMySqlServer(), - "azurerm_mysql_server_key": resourceArmMySQLServerKey(), - "azurerm_mysql_virtual_network_rule": resourceArmMySQLVirtualNetworkRule(), - "azurerm_mysql_active_directory_administrator": resourceArmMySQLAdministrator()} + "azurerm_mysql_configuration": resourceMySQLConfiguration(), + "azurerm_mysql_database": resourceMySqlDatabase(), + "azurerm_mysql_firewall_rule": resourceMySqlFirewallRule(), + "azurerm_mysql_server": resourceMySqlServer(), + "azurerm_mysql_server_key": resourceMySQLServerKey(), + "azurerm_mysql_virtual_network_rule": resourceMySQLVirtualNetworkRule(), + "azurerm_mysql_active_directory_administrator": resourceMySQLAdministrator(), + } } diff --git a/azurerm/internal/services/mysql/resourceids.go b/azurerm/internal/services/mysql/resourceids.go new file mode 100644 index 000000000000..27c9818a7558 --- /dev/null +++ b/azurerm/internal/services/mysql/resourceids.go @@ -0,0 +1,4 @@ +package mysql + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Key -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/keys/key1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Server -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1 diff --git a/azurerm/internal/services/mysql/tests/mysql_administrator_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_administrator_resource_test.go deleted file mode 100644 index d3926296f962..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_administrator_resource_test.go +++ /dev/null @@ -1,247 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureMySqlAdministrator_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_active_directory_administrator", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureMySqlAdministratorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureMySqlAdministrator_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureMySqlAdministratorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "login", "sqladmin"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureMySqlAdministrator_withUpdates(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureMySqlAdministratorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "login", "sqladmin2"), - ), - }, - }, - }) -} -func TestAccAzureMySqlAdministrator_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_active_directory_administrator", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureMySqlAdministratorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureMySqlAdministrator_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureMySqlAdministratorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "login", "sqladmin"), - ), - }, - { - Config: testAccAzureMySqlAdministrator_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mysql_active_directory_administrator"), - }, - }, - }) -} - -func TestAccAzureMySqlAdministrator_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_active_directory_administrator", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureMySqlAdministratorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureMySqlAdministrator_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureMySqlAdministratorExists(data.ResourceName), - testCheckAzureMySqlAdministratorDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureMySqlAdministratorExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServerAdministratorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - _, err := client.Get(ctx, resourceGroup, serverName) - return err - } -} - -func testCheckAzureMySqlAdministratorDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServerAdministratorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - if _, err := client.Delete(ctx, resourceGroup, serverName); err != nil { - return fmt.Errorf("Bad: Delete on mysqlAdministratorClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureMySqlAdministratorDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServerAdministratorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_active_directory_administrator" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("MySQL AD Administrator (server %q / resource group %q) still exists: %+v", serverName, resourceGroup, resp) - } - - return nil -} - -func testAccAzureMySqlAdministrator_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.7" - ssl_enforcement_enabled = true -} - -resource "azurerm_mysql_active_directory_administrator" "test" { - server_name = azurerm_mysql_server.test.name - resource_group_name = azurerm_resource_group.test.name - login = "sqladmin" - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.client_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureMySqlAdministrator_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_active_directory_administrator" "import" { - server_name = azurerm_mysql_active_directory_administrator.test.server_name - resource_group_name = azurerm_mysql_active_directory_administrator.test.resource_group_name - login = azurerm_mysql_active_directory_administrator.test.login - tenant_id = azurerm_mysql_active_directory_administrator.test.tenant_id - object_id = azurerm_mysql_active_directory_administrator.test.object_id -} -`, testAccAzureMySqlAdministrator_basic(data)) -} - -func testAccAzureMySqlAdministrator_withUpdates(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.7" - ssl_enforcement_enabled = true -} - -resource "azurerm_mysql_active_directory_administrator" "test" { - server_name = azurerm_mysql_server.test.name - resource_group_name = azurerm_resource_group.test.name - login = "sqladmin2" - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.client_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_configuration_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_configuration_resource_test.go deleted file mode 100644 index 53a55165999f..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_configuration_resource_test.go +++ /dev/null @@ -1,236 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMySQLConfiguration_characterSetServer(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLConfiguration_characterSetServer(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLConfigurationValue(data.ResourceName, "hebrew"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMySQLConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMMySQLConfigurationValueReset(data, "character_set_server"), - ), - }, - }, - }) -} - -func TestAccAzureRMMySQLConfiguration_interactiveTimeout(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLConfiguration_interactiveTimeout(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLConfigurationValue(data.ResourceName, "30"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMySQLConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMMySQLConfigurationValueReset(data, "interactive_timeout"), - ), - }, - }, - }) -} - -func TestAccAzureRMMySQLConfiguration_logSlowAdminStatements(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_configuration", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLConfiguration_logSlowAdminStatements(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLConfigurationValue(data.ResourceName, "on"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMySQLConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMMySQLConfigurationValueReset(data, "log_slow_admin_statements"), - ), - }, - }, - }) -} - -func testCheckAzureRMMySQLConfigurationValue(resourceName string, value string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MySQL Configuration: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySQL Configuration %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on mysqlConfigurationsClient: %+v", err) - } - - if *resp.Value != value { - return fmt.Errorf("MySQL Configuration wasn't set. Expected '%s' - got '%s': \n%+v", value, *resp.Value, resp) - } - - return nil - } -} - -func testCheckAzureRMMySQLConfigurationValueReset(data acceptance.TestData, configurationName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resourceGroup := fmt.Sprintf("acctestRG-%d", data.RandomInteger) - serverName := fmt.Sprintf("acctestmysqlsvr-%d", data.RandomInteger) - - resp, err := client.Get(ctx, resourceGroup, serverName, configurationName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySQL Configuration %q (server %q resource group: %q) does not exist", configurationName, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on mysqlConfigurationsClient: %+v", err) - } - - actualValue := *resp.Value - defaultValue := *resp.DefaultValue - - if defaultValue != actualValue { - return fmt.Errorf("MySQL Configuration wasn't set to the default value. Expected '%s' - got '%s': \n%+v", defaultValue, actualValue, resp) - } - - return nil - } -} - -func testCheckAzureRMMySQLConfigurationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_configuration" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - } - - return nil -} - -func testAccAzureRMMySQLConfiguration_characterSetServer(data acceptance.TestData) string { - return testAccAzureRMMySQLConfiguration_template(data, "character_set_server", "hebrew") -} - -func testAccAzureRMMySQLConfiguration_interactiveTimeout(data acceptance.TestData) string { - return testAccAzureRMMySQLConfiguration_template(data, "interactive_timeout", "30") -} - -func testAccAzureRMMySQLConfiguration_logSlowAdminStatements(data acceptance.TestData) string { - return testAccAzureRMMySQLConfiguration_template(data, "log_slow_admin_statements", "on") -} - -func testAccAzureRMMySQLConfiguration_template(data acceptance.TestData, name string, value string) string { - server := testAccAzureRMMySQLConfiguration_empty(data) - config := fmt.Sprintf(` -resource "azurerm_mysql_configuration" "test" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - server_name = "${azurerm_mysql_server.test.name}" - value = "%s" -} -`, name, value) - return server + config -} - -func testAccAzureRMMySQLConfiguration_empty(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.7" - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_database_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_database_resource_test.go deleted file mode 100644 index 8a9676324475..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_database_resource_test.go +++ /dev/null @@ -1,283 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMySQLDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLDatabaseExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMySQLDatabase_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLDatabaseExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMySQLDatabase_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mysql_database"), - }, - }, - }) -} - -func TestAccAzureRMMySQLDatabase_charsetUppercase(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLDatabase_charsetUppercase(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "utf8"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMySQLDatabase_charsetMixedcase(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_database", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLDatabase_charsetMixedcase(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "utf8"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMMySQLDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MySQL Database: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySQL Database %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on mysqlDatabasesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMySQLDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_database" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("MySQL Database still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMMySQLDatabase_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestpsqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_mysql_database" "test" { - name = "acctestdb_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - charset = "utf8" - collation = "utf8_unicode_ci" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLDatabase_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_database" "import" { - name = azurerm_mysql_database.test.name - resource_group_name = azurerm_mysql_database.test.resource_group_name - server_name = azurerm_mysql_database.test.server_name - charset = azurerm_mysql_database.test.charset - collation = azurerm_mysql_database.test.collation -} -`, testAccAzureRMMySQLDatabase_basic(data)) -} - -func testAccAzureRMMySQLDatabase_charsetUppercase(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestpsqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_mysql_database" "test" { - name = "acctestdb_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - charset = "UTF8" - collation = "utf8_unicode_ci" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLDatabase_charsetMixedcase(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestpsqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_mysql_database" "test" { - name = "acctestdb_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - charset = "Utf8" - collation = "utf8_unicode_ci" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_firewall_rule_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_firewall_rule_resource_test.go deleted file mode 100644 index facf173ae737..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_firewall_rule_resource_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMySQLFirewallRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLFirewallRuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMySQLFirewallRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLFirewallRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMySQLFirewallRule_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mysql_firewall_rule"), - }, - }, - }) -} - -func testCheckAzureRMMySQLFirewallRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.FirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MySQL Firewall Rule: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySQL Firewall Rule %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on mysqlFirewallRulesClient: %s", err) - } - - return nil - } -} - -func testCheckAzureRMMySQLFirewallRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_firewall_rule" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("MySQL Firewall Rule still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMMySQLFirewallRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_mysql_firewall_rule" "test" { - name = "acctestfwrule-%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "255.255.255.255" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLFirewallRule_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_firewall_rule" "import" { - name = azurerm_mysql_firewall_rule.test.name - resource_group_name = azurerm_mysql_firewall_rule.test.resource_group_name - server_name = azurerm_mysql_firewall_rule.test.server_name - start_ip_address = azurerm_mysql_firewall_rule.test.start_ip_address - end_ip_address = azurerm_mysql_firewall_rule.test.end_ip_address -} -`, testAccAzureRMMySQLFirewallRule_basic(data)) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_server_data_source_test.go b/azurerm/internal/services/mysql/tests/mysql_server_data_source_test.go deleted file mode 100644 index 5d0f95a9f074..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_server_data_source_test.go +++ /dev/null @@ -1,204 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMMySQLServer_basicFiveSix(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMySQLServer_basic(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_grow_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_minimal_tls_version_enforced", "TLS1_1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_mb", "51200"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "5.6"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMySQLServer_basicFiveSixWithIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMySQLServer_basicWithIdentity(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_grow_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_minimal_tls_version_enforced", "TLS1_1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_mb", "51200"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "5.6"), - resource.TestCheckResourceAttr(data.ResourceName, "identity.0.type", "SystemAssigned"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMySQLServer_basicFiveSeven(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMySQLServer_basic(data, "5.7"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_grow_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_minimal_tls_version_enforced", "TLS1_1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_mb", "51200"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "5.7"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMySQLServer_basicEightZero(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMySQLServer_basic(data, "8.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_grow_enabled", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_minimal_tls_version_enforced", "TLS1_1"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_mb", "51200"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "8.0"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMySQLServer_autogrowOnly(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") - mysqlVersion := "5.7" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMySQLServer_autogrow(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_grow_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_mb", "51200"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "5.7"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMMySQLServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMMySQLServer_complete(data, "8.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "GP_Gen5_2"), - resource.TestCheckResourceAttr(data.ResourceName, "administrator_login", "acctestun"), - resource.TestCheckResourceAttr(data.ResourceName, "auto_grow_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_minimal_tls_version_enforced", "TLS1_2"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_mb", "51200"), - resource.TestCheckResourceAttr(data.ResourceName, "version", "8.0"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.email_account_admins", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_detection_policy.0.retention_days", "7"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMMySQLServer_basic(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -data "azurerm_mysql_server" "test" { - name = azurerm_mysql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, testAccAzureRMMySQLServer_basic(data, version)) -} - -func testAccDataSourceAzureRMMySQLServer_basicWithIdentity(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -data "azurerm_mysql_server" "test" { - name = azurerm_mysql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, testAccAzureRMMySQLServer_basicWithIdentity(data, version)) -} - -func testAccDataSourceAzureRMMySQLServer_autogrow(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -data "azurerm_mysql_server" "test" { - name = azurerm_mysql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, testAccAzureRMMySQLServer_autogrow(data, version)) -} - -func testAccDataSourceAzureRMMySQLServer_complete(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -data "azurerm_mysql_server" "test" { - name = azurerm_mysql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, testAccAzureRMMySQLServer_complete(data, version)) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_server_key_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_server_key_resource_test.go deleted file mode 100644 index 584dbf7acb4c..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_server_key_resource_test.go +++ /dev/null @@ -1,251 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mysql/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMySQLServerKey_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServerKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerKeyExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMMySQLServerKey_updateKey(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServerKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMMySQLServerKey_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMMySQLServerKey_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServerKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerKeyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMySQLServerKey_requiresImport), - }, - }) -} - -func testCheckAzureRMMySQLServerKeyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServerKeysClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_server_key" { - continue - } - - id, err := parse.MySQLServerKeyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("retrieving MySQL Server Key: %+v", err) - } - return nil - } - - return fmt.Errorf("MySQL Server Key still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMMySQLServerKeyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServerKeysClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.MySQLServerKeyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySQL Server Key %q (Resource Group %q / Server %q) does not exist", id.Name, id.ResourceGroup, id.ServerName) - } - return fmt.Errorf("Bad: Get on MySQLServerKeysClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMMySQLServerKey_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - purge_soft_delete_on_destroy = false - } - } -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "standard" - soft_delete_enabled = true - purge_protection_enabled = true -} - -resource "azurerm_key_vault_access_policy" "server" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = azurerm_mysql_server.test.identity.0.principal_id - key_permissions = ["get", "unwrapkey", "wrapkey"] - secret_permissions = ["get"] -} - -resource "azurerm_key_vault_access_policy" "client" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - key_permissions = ["get", "create", "delete", "list", "restore", "recover", "unwrapkey", "wrapkey", "purge", "encrypt", "decrypt", "sign", "verify"] - secret_permissions = ["get"] -} - -resource "azurerm_key_vault_key" "first" { - name = "first" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] - depends_on = [ - azurerm_key_vault_access_policy.client, - azurerm_key_vault_access_policy.server, - ] -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_1" - storage_mb = 51200 - version = "5.6" - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMMySQLServerKey_basic(data acceptance.TestData) string { - template := testAccAzureRMMySQLServerKey_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_server_key" "test" { - server_id = azurerm_mysql_server.test.id - key_vault_key_id = azurerm_key_vault_key.first.id -} -`, template) -} - -func testAccAzureRMMySQLServerKey_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMMySQLServerKey_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_server_key" "import" { - server_id = azurerm_mysql_server_key.test.server_id - key_vault_key_id = azurerm_mysql_server_key.test.key_vault_key_id -} -`, template) -} - -func testAccAzureRMMySQLServerKey_updated(data acceptance.TestData) string { - template := testAccAzureRMMySQLServerKey_template(data) - return fmt.Sprintf(` -%s -resource "azurerm_key_vault_key" "second" { - name = "second" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] - depends_on = [ - azurerm_key_vault_access_policy.client, - azurerm_key_vault_access_policy.server, - ] -} -resource "azurerm_mysql_server_key" "test" { - server_id = azurerm_mysql_server.test.id - key_vault_key_id = azurerm_key_vault_key.second.id -} -`, template) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_server_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_server_resource_test.go deleted file mode 100644 index b22b8bbadc19..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_server_resource_test.go +++ /dev/null @@ -1,812 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMySQLServer_basicFiveSix(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_basicFiveSixWithIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basicWithIdentity(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_basicFiveSixWithIdentityUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_basicWithIdentity(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_basicFiveSixDeprecated(t *testing.T) { // remove in v3.0 - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basicDeprecated(data, "5.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_basicFiveSeven(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, "5.7"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_basicEightZero(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, "8.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_autogrowOnly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - mysqlVersion := "5.7" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_autogrow(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_basic(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, "5.7"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMMySQLServer_requiresImport), - }, - }) -} - -func TestAccAzureRMMySQLServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_complete(data, "8.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - mysqlVersion := "8.0" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_complete(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_complete2(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "threat_detection_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMySQLServer_complete3(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password", "threat_detection_policy.0.storage_account_access_key"), - { - Config: testAccAzureRMMySQLServer_basic(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_completeDeprecatedMigrate(t *testing.T) { // remove in v3.0 - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - mysqlVersion := "5.6" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_completeDeprecated(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_complete(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_updateDeprecated(t *testing.T) { // remove in v3.0 - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - mysqlVersion := "5.6" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basicDeprecated(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_completeDeprecated(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_basicDeprecated(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_updateSKU(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_sku(data, "GP_Gen5_2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - { - Config: testAccAzureRMMySQLServer_sku(data, "MO_Gen5_16"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), // not returned as sensitive - }, - }) -} - -func TestAccAzureRMMySQLServer_createReplica(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - mysqlVersion := "8.0" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMMySQLServer_createReplica(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - testCheckAzureRMMySQLServerExists("azurerm_mysql_server.replica"), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMMySQLServer_createPointInTimeRestore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_server", "test") - restoreTime := time.Now().Add(11 * time.Minute) - mysqlVersion := "8.0" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLServer_basic(data, mysqlVersion), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - PreConfig: func() { time.Sleep(restoreTime.Sub(time.Now().Add(-7 * time.Minute))) }, - Config: testAccAzureRMMySQLServer_createPointInTimeRestore(data, mysqlVersion, restoreTime.Format(time.RFC3339)), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLServerExists(data.ResourceName), - testCheckAzureRMMySQLServerExists("azurerm_mysql_server.restore"), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func testCheckAzureRMMySQLServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for MySQL Server: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySQL Server %q (resource group: %q) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on mysqlServersClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMMySQLServerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_server" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return fmt.Errorf("MySQL Server still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMMySQLServer_basic(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_1" - storage_mb = 51200 - version = "%s" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMySQLServer_basicDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "%s" - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_1" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMySQLServer_basicWithIdentity(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_1" - storage_mb = 51200 - version = "%s" - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMySQLServer_complete(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%[1]d" - location = "%[2]s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - auto_grow_enabled = true - backup_retention_days = 7 - create_mode = "Default" - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_2" - storage_mb = 51200 - version = "%[3]s" - threat_detection_policy { - enabled = true - disabled_alerts = ["Sql_Injection", "Data_Exfiltration"] - email_account_admins = true - email_addresses = ["pearcec@example.com", "admin@example.com"] - retention_days = 7 - } -} -`, data.RandomInteger, data.Locations.Primary, version) -} - -func testAccAzureRMMySQLServer_complete2(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mysql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "accsa%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!updated" - auto_grow_enabled = true - backup_retention_days = 7 - create_mode = "Default" - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = false - storage_mb = 51200 - version = "%[3]s" - threat_detection_policy { - enabled = true - disabled_alerts = ["Sql_Injection"] - email_account_admins = true - email_addresses = ["pearcec@example.com"] - retention_days = 7 - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - } -} -`, data.RandomInteger, data.Locations.Primary, version) -} - -func testAccAzureRMMySQLServer_complete3(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-mysql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "accsa%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!updated" - auto_grow_enabled = true - backup_retention_days = 7 - create_mode = "Default" - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = false - storage_mb = 51200 - version = "%[3]s" - threat_detection_policy { - enabled = true - email_account_admins = true - retention_days = 7 - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - } -} -`, data.RandomInteger, data.Locations.Primary, version) -} - -func testAccAzureRMMySQLServer_completeDeprecated(data acceptance.TestData, version string) string { // remove in v3.0 - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - auto_grow = "Enabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "%s" - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_2" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMySQLServer_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_server" "import" { - name = azurerm_mysql_server.test.name - location = azurerm_mysql_server.test.location - resource_group_name = azurerm_mysql_server.test.resource_group_name - sku_name = "GP_Gen5_2" - version = "5.7" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - backup_retention_days = 7 - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, testAccAzureRMMySQLServer_basic(data, "5.7")) -} - -func testAccAzureRMMySQLServer_sku(data acceptance.TestData, sku string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "%s" - version = "5.7" - - storage_mb = 4194304 - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku) -} - -func testAccAzureRMMySQLServer_autogrow(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - version = "%s" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - auto_grow_enabled = true - backup_retention_days = 7 - geo_redundant_backup_enabled = false - ssl_enforcement_enabled = true - storage_mb = 51200 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMMySQLServer_createReplica(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_server" "replica" { - name = "acctestmysqlsvr-%d-replica" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - version = "%s" - storage_mb = 51200 - - create_mode = "Replica" - creation_source_server_id = azurerm_mysql_server.test.id - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_1" -} -`, testAccAzureRMMySQLServer_basic(data, version), data.RandomInteger, version) -} - -func testAccAzureRMMySQLServer_createPointInTimeRestore(data acceptance.TestData, version, restoreTime string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_server" "restore" { - name = "acctestmysqlsvr-%d-restore" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku_name = "GP_Gen5_2" - version = "%s" - - create_mode = "PointInTimeRestore" - creation_source_server_id = azurerm_mysql_server.test.id - restore_point_in_time = "%s" - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_1" - storage_mb = 51200 -} -`, testAccAzureRMMySQLServer_basic(data, version), data.RandomInteger, version, restoreTime) -} diff --git a/azurerm/internal/services/mysql/tests/mysql_virtual_network_rule_resource_test.go b/azurerm/internal/services/mysql/tests/mysql_virtual_network_rule_resource_test.go deleted file mode 100644 index afacafd0bf9a..000000000000 --- a/azurerm/internal/services/mysql/tests/mysql_virtual_network_rule_resource_test.go +++ /dev/null @@ -1,571 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMMySQLVirtualNetworkRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMMySQLVirtualNetworkRule_badsubnet(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLVirtualNetworkRule_badsubnet(data), - /* - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - ),*/ - }, - }, - }) -} - -func TestAccAzureRMMySQLVirtualNetworkRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMMySQLVirtualNetworkRule_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_mysql_virtual_network_rule"), - }, - }, - }) -} - -func TestAccAzureRMMySQLVirtualNetworkRule_switchSubnets(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") - - // Create regex strings that will ensure that one subnet name exists, but not the other - preConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet1%d)$|(subnet[^2]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 1 but not 2 - postConfigRegex := regexp.MustCompile(fmt.Sprintf("(subnet2%d)$|(subnet[^1]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 2 but not 1 - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLVirtualNetworkRule_subnetSwitchPre(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", preConfigRegex), - ), - }, - { - Config: testAccAzureRMMySQLVirtualNetworkRule_subnetSwitchPost(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", postConfigRegex), - ), - }, - }, - }) -} - -func TestAccAzureRMMySQLVirtualNetworkRule_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - testCheckAzureRMMySQLVirtualNetworkRuleDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMMySQLVirtualNetworkRule_multipleSubnets(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_mysql_virtual_network_rule", "rule1") - - resourceName2 := "azurerm_mysql_virtual_network_rule.rule2" - resourceName3 := "azurerm_mysql_virtual_network_rule.rule3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMMySQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMMySQLVirtualNetworkRule_multipleSubnets(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMMySQLVirtualNetworkRuleExists(data.ResourceName), - testCheckAzureRMMySQLVirtualNetworkRuleExists(resourceName2), - testCheckAzureRMMySQLVirtualNetworkRuleExists(resourceName3), - ), - }, - }, - }) -} - -func testCheckAzureRMMySQLVirtualNetworkRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, ruleName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: MySql Virtual Network Rule %q (Server %q / Resource Group %q) was not found", ruleName, serverName, resourceGroup) - } - - return err - } - - return nil - } -} - -func testCheckAzureRMMySQLVirtualNetworkRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_mysql_virtual_network_rule" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, ruleName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Bad: MySql Firewall Rule %q (Server %q / Resource Group %q) still exists: %+v", ruleName, serverName, resourceGroup, resp) - } - - return nil -} - -func testCheckAzureRMMySQLVirtualNetworkRuleDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).MySQL.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - future, err := client.Delete(ctx, resourceGroup, serverName, ruleName) - if err != nil { - // If the error is that the resource we want to delete does not exist in the first - // place (404), then just return with no error. - if response.WasNotFound(future.Response()) { - return nil - } - - return fmt.Errorf("Error deleting MySql Virtual Network Rule: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - // Same deal as before. Just in case. - if response.WasNotFound(future.Response()) { - return nil - } - - return fmt.Errorf("Error deleting MySql Virtual Network Rule: %+v", err) - } - - return nil - } -} - -func testAccAzureRMMySQLVirtualNetworkRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mysql_virtual_network_rule" "test" { - name = "acctestmysqlvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLVirtualNetworkRule_badsubnet(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctestsubnet%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mysql_virtual_network_rule" "test" { - name = "acctestmysqlvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLVirtualNetworkRule_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_mysql_virtual_network_rule" "import" { - name = azurerm_mysql_virtual_network_rule.test.name - resource_group_name = azurerm_mysql_virtual_network_rule.test.resource_group_name - server_name = azurerm_mysql_virtual_network_rule.test.server_name - subnet_id = azurerm_mysql_virtual_network_rule.test.subnet_id -} -`, testAccAzureRMMySQLVirtualNetworkRule_basic(data)) -} - -func testAccAzureRMMySQLVirtualNetworkRule_subnetSwitchPre(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "subnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "test2" { - name = "subnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.128/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mysql_virtual_network_rule" "test" { - name = "acctestmysqlvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.test1.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLVirtualNetworkRule_subnetSwitchPost(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvnet%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "subnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "test2" { - name = "subnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.128/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mysql_virtual_network_rule" "test" { - name = "acctestmysqlvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMMySQLVirtualNetworkRule_multipleSubnets(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "vnet1" { - name = "acctestvnet1%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_virtual_network" "vnet2" { - name = "acctestvnet2%d" - address_space = ["10.1.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "vnet1_subnet1" { - name = "acctestsubnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet1.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "vnet1_subnet2" { - name = "acctestsubnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet1.name - address_prefix = "10.7.29.128/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "vnet2_subnet1" { - name = "acctestsubnet3%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet2.name - address_prefix = "10.1.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_mysql_server" "test" { - name = "acctestmysqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "5.6" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_mysql_virtual_network_rule" "rule1" { - name = "acctestmysqlvnetrule1%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.vnet1_subnet1.id -} - -resource "azurerm_mysql_virtual_network_rule" "rule2" { - name = "acctestmysqlvnetrule2%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.vnet1_subnet2.id -} - -resource "azurerm_mysql_virtual_network_rule" "rule3" { - name = "acctestmysqlvnetrule3%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_mysql_server.test.name - subnet_id = azurerm_subnet.vnet2_subnet1.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/mysql/validate/key_id.go b/azurerm/internal/services/mysql/validate/key_id.go new file mode 100644 index 000000000000..bc31b253dc51 --- /dev/null +++ b/azurerm/internal/services/mysql/validate/key_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mysql/parse" +) + +func KeyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.KeyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mysql/validate/key_id_test.go b/azurerm/internal/services/mysql/validate/key_id_test.go new file mode 100644 index 000000000000..fc9765675971 --- /dev/null +++ b/azurerm/internal/services/mysql/validate/key_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestKeyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/keys/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1/keys/key1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORMYSQL/SERVERS/SERVER1/KEYS/KEY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := KeyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mysql/validate/mysql_server.go b/azurerm/internal/services/mysql/validate/mysql_server.go deleted file mode 100644 index fb9579230a66..000000000000 --- a/azurerm/internal/services/mysql/validate/mysql_server.go +++ /dev/null @@ -1,30 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mysql/parse" -) - -func MySQLServerID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.MySQLServerID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a MySQL Server resource id: %v", k, err)) - } - - return warnings, errors -} - -func MySQLServerName(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `^[0-9a-z][-0-9a-z]{1,61}[0-9a-z]$`); !m { - return nil, append(regexErrs, fmt.Errorf("%q can contain only lowercase letters, numbers, and '-', but can't start or end with '-', and must be at least 3 characters and no more than 63 characters long.", k)) - } - - return nil, nil -} diff --git a/azurerm/internal/services/mysql/validate/mysql_server_test.go b/azurerm/internal/services/mysql/validate/mysql_server_test.go deleted file mode 100644 index 4c198d842b85..000000000000 --- a/azurerm/internal/services/mysql/validate/mysql_server_test.go +++ /dev/null @@ -1,106 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestValidateMysqlServerServerID(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // invalid - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg", - expected: false, - }, - { - // valid - input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.DBforMySQL/servers/test-mysql", - expected: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := MySQLServerID(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} - -func TestValidateMysqlServerServerName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // basic example - input: "ab-c", - expected: true, - }, - { - // can't contain upper case letter - input: "AbcD", - expected: false, - }, - { - // can't start with a hyphen - input: "-abc", - expected: false, - }, - { - // can't contain underscore - input: "ab_c", - expected: false, - }, - { - // can't end with hyphen - input: "abc-", - expected: false, - }, - { - // can not be shorter than 3 characters - input: "ab", - expected: false, - }, - { - // can not be shorter than 3 characters (catching bad regex) - input: "a", - expected: false, - }, - { - // 63 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcde", - expected: true, - }, - { - // 64 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcdef", - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := MySQLServerName(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} diff --git a/azurerm/internal/services/mysql/validate/server_id.go b/azurerm/internal/services/mysql/validate/server_id.go new file mode 100644 index 000000000000..775cddf6ba68 --- /dev/null +++ b/azurerm/internal/services/mysql/validate/server_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/mysql/parse" +) + +func ServerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/mysql/validate/server_id_test.go b/azurerm/internal/services/mysql/validate/server_id_test.go new file mode 100644 index 000000000000..50807f8eeefc --- /dev/null +++ b/azurerm/internal/services/mysql/validate/server_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforMySQL/servers/server1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORMYSQL/SERVERS/SERVER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/mysql/validate/server_name.go b/azurerm/internal/services/mysql/validate/server_name.go new file mode 100644 index 000000000000..75d51248bb67 --- /dev/null +++ b/azurerm/internal/services/mysql/validate/server_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func ServerName(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `^[0-9a-z][-0-9a-z]{1,61}[0-9a-z]$`); !m { + return nil, append(regexErrs, fmt.Errorf("%q can contain only lowercase letters, numbers, and '-', but can't start or end with '-', and must be at least 3 characters and no more than 63 characters long.", k)) + } + + return nil, nil +} diff --git a/azurerm/internal/services/mysql/validate/server_name_test.go b/azurerm/internal/services/mysql/validate/server_name_test.go new file mode 100644 index 000000000000..80126b27b209 --- /dev/null +++ b/azurerm/internal/services/mysql/validate/server_name_test.go @@ -0,0 +1,73 @@ +package validate + +import ( + "testing" +) + +func TestValidateServerName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + // empty + input: "", + expected: false, + }, + { + // basic example + input: "ab-c", + expected: true, + }, + { + // can't contain upper case letter + input: "AbcD", + expected: false, + }, + { + // can't start with a hyphen + input: "-abc", + expected: false, + }, + { + // can't contain underscore + input: "ab_c", + expected: false, + }, + { + // can't end with hyphen + input: "abc-", + expected: false, + }, + { + // can not be shorter than 3 characters + input: "ab", + expected: false, + }, + { + // can not be shorter than 3 characters (catching bad regex) + input: "a", + expected: false, + }, + { + // 63 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcde", + expected: true, + }, + { + // 64 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcdef", + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ServerName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/netapp/netapp_account_data_source.go b/azurerm/internal/services/netapp/netapp_account_data_source.go index 983fa777b296..649fd1d70afc 100644 --- a/azurerm/internal/services/netapp/netapp_account_data_source.go +++ b/azurerm/internal/services/netapp/netapp_account_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmNetAppAccount() *schema.Resource { +func dataSourceNetAppAccount() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmNetAppAccountRead, + Read: dataSourceNetAppAccountRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -35,7 +35,7 @@ func dataSourceArmNetAppAccount() *schema.Resource { } } -func dataSourceArmNetAppAccountRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceNetAppAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.AccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/netapp/netapp_account_data_source_test.go b/azurerm/internal/services/netapp/netapp_account_data_source_test.go new file mode 100644 index 000000000000..99a18150ffae --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_account_data_source_test.go @@ -0,0 +1,39 @@ +package netapp_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type NetAppAccountDataSource struct { +} + +func testAccDataSourceNetAppAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_netapp_account", "test") + r := NetAppAccountDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basicConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func (r NetAppAccountDataSource) basicConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_netapp_account" "test" { + resource_group_name = azurerm_netapp_account.test.resource_group_name + name = azurerm_netapp_account.test.name +} +`, NetAppAccountResource{}.basicConfig(data)) +} diff --git a/azurerm/internal/services/netapp/netapp_account_resource.go b/azurerm/internal/services/netapp/netapp_account_resource.go index ab47ebf1b9ef..46fc694785b1 100644 --- a/azurerm/internal/services/netapp/netapp_account_resource.go +++ b/azurerm/internal/services/netapp/netapp_account_resource.go @@ -22,12 +22,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmNetAppAccount() *schema.Resource { +func resourceNetAppAccount() *schema.Resource { return &schema.Resource{ - Create: resourceArmNetAppAccountCreateUpdate, - Read: resourceArmNetAppAccountRead, - Update: resourceArmNetAppAccountCreateUpdate, - Delete: resourceArmNetAppAccountDelete, + Create: resourceNetAppAccountCreateUpdate, + Read: resourceNetAppAccountRead, + Update: resourceNetAppAccountCreateUpdate, + Delete: resourceNetAppAccountDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -36,7 +36,7 @@ func resourceArmNetAppAccount() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.NetAppAccountID(id) + _, err := parse.AccountID(id) return err }), @@ -106,7 +106,7 @@ func resourceArmNetAppAccount() *schema.Resource { } } -func resourceArmNetAppAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppAccountCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.AccountClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -132,7 +132,7 @@ func resourceArmNetAppAccountCreateUpdate(d *schema.ResourceData, meta interface accountParameters := netapp.Account{ Location: utils.String(location), AccountProperties: &netapp.AccountProperties{ - ActiveDirectories: expandArmNetAppActiveDirectories(activeDirectories), + ActiveDirectories: expandNetAppActiveDirectories(activeDirectories), }, Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } @@ -154,27 +154,27 @@ func resourceArmNetAppAccountCreateUpdate(d *schema.ResourceData, meta interface } d.SetId(*resp.ID) - return resourceArmNetAppAccountRead(d, meta) + return resourceNetAppAccountRead(d, meta) } -func resourceArmNetAppAccountRead(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppAccountRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.AccountClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetAppAccountName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] NetApp Accounts %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("Error reading NetApp Accounts %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error reading NetApp Accounts %q (Resource Group %q): %+v", id.NetAppAccountName, id.ResourceGroup, err) } d.Set("name", resp.Name) @@ -186,31 +186,31 @@ func resourceArmNetAppAccountRead(d *schema.ResourceData, meta interface{}) erro return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmNetAppAccountDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppAccountDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.AccountClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppAccountID(d.Id()) + id, err := parse.AccountID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.NetAppAccountName) if err != nil { - return fmt.Errorf("Error deleting NetApp Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error deleting NetApp Account %q (Resource Group %q): %+v", id.NetAppAccountName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error waiting for deleting NetApp Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deleting NetApp Account %q (Resource Group %q): %+v", id.NetAppAccountName, id.ResourceGroup, err) } } return nil } -func expandArmNetAppActiveDirectories(input []interface{}) *[]netapp.ActiveDirectory { +func expandNetAppActiveDirectories(input []interface{}) *[]netapp.ActiveDirectory { results := make([]netapp.ActiveDirectory, 0) for _, item := range input { v := item.(map[string]interface{}) diff --git a/azurerm/internal/services/netapp/netapp_account_resource_test.go b/azurerm/internal/services/netapp/netapp_account_resource_test.go new file mode 100644 index 000000000000..5ef5dd17278e --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_account_resource_test.go @@ -0,0 +1,199 @@ +package netapp_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NetAppAccountResource struct { +} + +func TestAccNetAppAccount(t *testing.T) { + // NOTE: this is a combined test rather than separate split out tests since + // Azure allows only one active directory can be joined to a single subscription at a time for NetApp Account. + // The CI system runs all tests in parallel, so the tests need to be changed to run one at a time. + testCases := map[string]map[string]func(t *testing.T){ + "Resource": { + "basic": testAccNetAppAccount_basic, + "requiresImport": testAccNetAppAccount_requiresImport, + "complete": testAccNetAppAccount_complete, + "update": testAccNetAppAccount_update, + }, + "DataSource": { + "basic": testAccDataSourceNetAppAccount_basic, + }, + } + + for group, m := range testCases { + for name, tc := range m { + t.Run(group, func(t *testing.T) { + t.Run(name, func(t *testing.T) { + tc(t) + }) + }) + } + } +} + +func testAccNetAppAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") + r := NetAppAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func testAccNetAppAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") + r := NetAppAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImportConfig(data), + ExpectError: acceptance.RequiresImportError("azurerm_netapp_account"), + }, + }) +} + +func testAccNetAppAccount_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") + r := NetAppAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("active_directory.#").HasValue("1"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep("active_directory"), + }) +} + +func testAccNetAppAccount_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") + r := NetAppAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("active_directory.#").HasValue("0"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + { + Config: r.completeConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("active_directory.#").HasValue("1"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep("active_directory"), + }) +} + +func (t NetAppAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AccountID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NetApp.AccountClient.Get(ctx, id.ResourceGroup, id.NetAppAccountName) + if err != nil { + return nil, fmt.Errorf("reading Netapp Account (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (NetAppAccountResource) basicConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-netapp-%d" + location = "%s" +} + +resource "azurerm_netapp_account" "test" { + name = "acctest-NetAppAccount-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r NetAppAccountResource) requiresImportConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_account" "import" { + name = azurerm_netapp_account.test.name + location = azurerm_netapp_account.test.location + resource_group_name = azurerm_netapp_account.test.resource_group_name +} +`, r.basicConfig(data)) +} + +func (NetAppAccountResource) completeConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-netapp-%d" + location = "%s" +} + +resource "azurerm_netapp_account" "test" { + name = "acctest-NetAppAccount-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + active_directory { + username = "aduser" + password = "aduserpwd" + smb_server_name = "SMBSERVER" + dns_servers = ["1.2.3.4"] + domain = "westcentralus.com" + organizational_unit = "OU=FirstLevel" + } + + tags = { + "FoO" = "BaR" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/netapp/netapp_pool_data_source.go b/azurerm/internal/services/netapp/netapp_pool_data_source.go index 11b59e2e4e22..a6e41fec6146 100644 --- a/azurerm/internal/services/netapp/netapp_pool_data_source.go +++ b/azurerm/internal/services/netapp/netapp_pool_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmNetAppPool() *schema.Resource { +func dataSourceNetAppPool() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmNetAppPoolRead, + Read: dataSourceNetAppPoolRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -49,7 +49,7 @@ func dataSourceArmNetAppPool() *schema.Resource { } } -func dataSourceArmNetAppPoolRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceNetAppPoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.PoolClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/netapp/netapp_pool_data_source_test.go b/azurerm/internal/services/netapp/netapp_pool_data_source_test.go new file mode 100644 index 000000000000..8db55a978678 --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_pool_data_source_test.go @@ -0,0 +1,43 @@ +package netapp_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type NetAppPoolDataSource struct { +} + +func TestAccDataSourceNetAppPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_netapp_pool", "test") + r := NetAppPoolDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("account_name").Exists(), + check.That(data.ResourceName).Key("service_level").Exists(), + check.That(data.ResourceName).Key("size_in_tb").Exists(), + ), + }, + }) +} + +func (NetAppPoolDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_netapp_pool" "test" { + resource_group_name = azurerm_netapp_pool.test.resource_group_name + account_name = azurerm_netapp_pool.test.account_name + name = azurerm_netapp_pool.test.name +} +`, NetAppPoolResource{}.basic(data)) +} diff --git a/azurerm/internal/services/netapp/netapp_pool_resource.go b/azurerm/internal/services/netapp/netapp_pool_resource.go index e8a9821fc1c7..110b09d4f507 100644 --- a/azurerm/internal/services/netapp/netapp_pool_resource.go +++ b/azurerm/internal/services/netapp/netapp_pool_resource.go @@ -21,12 +21,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmNetAppPool() *schema.Resource { +func resourceNetAppPool() *schema.Resource { return &schema.Resource{ - Create: resourceArmNetAppPoolCreateUpdate, - Read: resourceArmNetAppPoolRead, - Update: resourceArmNetAppPoolCreateUpdate, - Delete: resourceArmNetAppPoolDelete, + Create: resourceNetAppPoolCreateUpdate, + Read: resourceNetAppPoolRead, + Update: resourceNetAppPoolCreateUpdate, + Delete: resourceNetAppPoolDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -35,7 +35,7 @@ func resourceArmNetAppPool() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.NetAppPoolID(id) + _, err := parse.CapacityPoolID(id) return err }), @@ -79,7 +79,7 @@ func resourceArmNetAppPool() *schema.Resource { } } -func resourceArmNetAppPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.PoolClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -132,20 +132,20 @@ func resourceArmNetAppPoolCreateUpdate(d *schema.ResourceData, meta interface{}) } d.SetId(*resp.ID) - return resourceArmNetAppPoolRead(d, meta) + return resourceNetAppPoolRead(d, meta) } -func resourceArmNetAppPoolRead(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppPoolRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.PoolClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppPoolID(d.Id()) + id, err := parse.CapacityPoolID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] NetApp Pools %q does not exist - removing from state", d.Id()) @@ -157,7 +157,7 @@ func resourceArmNetAppPoolRead(d *schema.ResourceData, meta interface{}) error { d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.AccountName) + d.Set("account_name", id.NetAppAccountName) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) } @@ -176,17 +176,17 @@ func resourceArmNetAppPoolRead(d *schema.ResourceData, meta interface{}) error { return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmNetAppPoolDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppPoolDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.PoolClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppPoolID(d.Id()) + id, err := parse.CapacityPoolID(d.Id()) if err != nil { return err } - _, err = client.Delete(ctx, id.ResourceGroup, id.AccountName, id.Name) + _, err = client.Delete(ctx, id.ResourceGroup, id.NetAppAccountName, id.Name) if err != nil { return fmt.Errorf("Error deleting NetApp Pool %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } @@ -203,7 +203,7 @@ func resourceArmNetAppPoolDelete(d *schema.ResourceData, meta interface{}) error MinTimeout: 10 * time.Second, Pending: []string{"200", "202"}, Target: []string{"204", "404"}, - Refresh: netappPoolDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.AccountName, id.Name), + Refresh: netappPoolDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.NetAppAccountName, id.Name), Timeout: d.Timeout(schema.TimeoutDelete), } diff --git a/azurerm/internal/services/netapp/netapp_pool_resource_test.go b/azurerm/internal/services/netapp/netapp_pool_resource_test.go new file mode 100644 index 000000000000..10c9d7ccaebe --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_pool_resource_test.go @@ -0,0 +1,187 @@ +package netapp_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NetAppPoolResource struct { +} + +func TestAccNetAppPool_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") + r := NetAppPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppPool_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") + r := NetAppPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_netapp_pool"), + }, + }) +} + +func TestAccNetAppPool_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") + r := NetAppPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("service_level").HasValue("Standard"), + check.That(data.ResourceName).Key("size_in_tb").HasValue("15"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppPool_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") + r := NetAppPoolResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("service_level").HasValue("Standard"), + check.That(data.ResourceName).Key("size_in_tb").HasValue("4"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("service_level").HasValue("Standard"), + check.That(data.ResourceName).Key("size_in_tb").HasValue("15"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep(), + }) +} + +func (t NetAppPoolResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CapacityPoolID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NetApp.PoolClient.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Netapp Pool (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (NetAppPoolResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-netapp-%d" + location = "%s" +} + +resource "azurerm_netapp_account" "test" { + name = "acctest-NetAppAccount-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_netapp_pool" "test" { + name = "acctest-NetAppPool-%d" + account_name = azurerm_netapp_account.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + service_level = "Standard" + size_in_tb = 4 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r NetAppPoolResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s +resource "azurerm_netapp_pool" "import" { + name = azurerm_netapp_pool.test.name + location = azurerm_netapp_pool.test.location + resource_group_name = azurerm_netapp_pool.test.resource_group_name + account_name = azurerm_netapp_pool.test.account_name + service_level = azurerm_netapp_pool.test.service_level + size_in_tb = azurerm_netapp_pool.test.size_in_tb +} +`, r.basic(data)) +} + +func (NetAppPoolResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-netapp-%d" + location = "%s" +} + +resource "azurerm_netapp_account" "test" { + name = "acctest-NetAppAccount-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_netapp_pool" "test" { + name = "acctest-NetAppPool-%d" + account_name = azurerm_netapp_account.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + service_level = "Standard" + size_in_tb = 15 + + tags = { + "FoO" = "BaR" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/netapp/netapp_snapshot_data_source.go b/azurerm/internal/services/netapp/netapp_snapshot_data_source.go index 42e8e258572f..2f131f3403a6 100644 --- a/azurerm/internal/services/netapp/netapp_snapshot_data_source.go +++ b/azurerm/internal/services/netapp/netapp_snapshot_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmNetAppSnapshot() *schema.Resource { +func dataSourceNetAppSnapshot() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmNetAppSnapshotRead, + Read: dataSourceNetAppSnapshotRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -51,7 +51,7 @@ func dataSourceArmNetAppSnapshot() *schema.Resource { } } -func dataSourceArmNetAppSnapshotRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceNetAppSnapshotRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.SnapshotClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/netapp/netapp_snapshot_data_source_test.go b/azurerm/internal/services/netapp/netapp_snapshot_data_source_test.go new file mode 100644 index 000000000000..86f215f05ab0 --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_snapshot_data_source_test.go @@ -0,0 +1,41 @@ +package netapp_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type NetAppSnapshotDataSource struct { +} + +func TestAccDataSourceNetAppSnapshot_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_netapp_snapshot", "test") + r := NetAppSnapshotDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func (NetAppSnapshotDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_netapp_snapshot" "test" { + resource_group_name = azurerm_netapp_snapshot.test.resource_group_name + account_name = azurerm_netapp_snapshot.test.account_name + pool_name = azurerm_netapp_snapshot.test.pool_name + volume_name = azurerm_netapp_snapshot.test.volume_name + name = azurerm_netapp_snapshot.test.name +} +`, NetAppSnapshotResource{}.basic(data)) +} diff --git a/azurerm/internal/services/netapp/netapp_snapshot_resource.go b/azurerm/internal/services/netapp/netapp_snapshot_resource.go index 3d3090b78c23..e4f48525d0b1 100644 --- a/azurerm/internal/services/netapp/netapp_snapshot_resource.go +++ b/azurerm/internal/services/netapp/netapp_snapshot_resource.go @@ -20,12 +20,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmNetAppSnapshot() *schema.Resource { +func resourceNetAppSnapshot() *schema.Resource { return &schema.Resource{ - Create: resourceArmNetAppSnapshotCreate, - Read: resourceArmNetAppSnapshotRead, - Update: resourceArmNetAppSnapshotUpdate, - Delete: resourceArmNetAppSnapshotDelete, + Create: resourceNetAppSnapshotCreate, + Read: resourceNetAppSnapshotRead, + Update: resourceNetAppSnapshotUpdate, + Delete: resourceNetAppSnapshotDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,7 +34,7 @@ func resourceArmNetAppSnapshot() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.NetAppSnapshotID(id) + _, err := parse.SnapshotID(id) return err }), @@ -76,7 +76,7 @@ func resourceArmNetAppSnapshot() *schema.Resource { } } -func resourceArmNetAppSnapshotCreate(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppSnapshotCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.SnapshotClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -123,20 +123,20 @@ func resourceArmNetAppSnapshotCreate(d *schema.ResourceData, meta interface{}) e } d.SetId(*resp.ID) - return resourceArmNetAppSnapshotRead(d, meta) + return resourceNetAppSnapshotRead(d, meta) } -func resourceArmNetAppSnapshotRead(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppSnapshotRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.SnapshotClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppSnapshotID(d.Id()) + id, err := parse.SnapshotID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.PoolName, id.VolumeName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] NetApp Snapshots %q does not exist - removing from state", d.Id()) @@ -148,8 +148,8 @@ func resourceArmNetAppSnapshotRead(d *schema.ResourceData, meta interface{}) err d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.AccountName) - d.Set("pool_name", id.PoolName) + d.Set("account_name", id.NetAppAccountName) + d.Set("pool_name", id.CapacityPoolName) d.Set("volume_name", id.VolumeName) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) @@ -158,12 +158,12 @@ func resourceArmNetAppSnapshotRead(d *schema.ResourceData, meta interface{}) err return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmNetAppSnapshotUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppSnapshotUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.SnapshotClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppSnapshotID(d.Id()) + id, err := parse.SnapshotID(d.Id()) if err != nil { return err } @@ -172,11 +172,11 @@ func resourceArmNetAppSnapshotUpdate(d *schema.ResourceData, meta interface{}) e Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } - if _, err = client.Update(ctx, parameters, id.ResourceGroup, id.AccountName, id.PoolName, id.VolumeName, id.Name); err != nil { + if _, err = client.Update(ctx, parameters, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name); err != nil { return fmt.Errorf("Error updating NetApp Snapshot %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.PoolName, id.VolumeName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name) if err != nil { return fmt.Errorf("Error retrieving NetApp Snapshot %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } @@ -184,20 +184,20 @@ func resourceArmNetAppSnapshotUpdate(d *schema.ResourceData, meta interface{}) e return fmt.Errorf("Cannot read NetApp Snapshot %q (Resource Group %q) ID", id.Name, id.ResourceGroup) } - return resourceArmNetAppSnapshotRead(d, meta) + return resourceNetAppSnapshotRead(d, meta) } -func resourceArmNetAppSnapshotDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppSnapshotDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.SnapshotClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppSnapshotID(d.Id()) + id, err := parse.SnapshotID(d.Id()) if err != nil { return err } - if _, err = client.Delete(ctx, id.ResourceGroup, id.AccountName, id.PoolName, id.VolumeName, id.Name); err != nil { + if _, err = client.Delete(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name); err != nil { return fmt.Errorf("Error deleting NetApp Snapshot %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } @@ -213,7 +213,7 @@ func resourceArmNetAppSnapshotDelete(d *schema.ResourceData, meta interface{}) e MinTimeout: 10 * time.Second, Pending: []string{"200", "202"}, Target: []string{"204", "404"}, - Refresh: netappSnapshotDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.AccountName, id.PoolName, id.VolumeName, id.Name), + Refresh: netappSnapshotDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name), Timeout: d.Timeout(schema.TimeoutDelete), } diff --git a/azurerm/internal/services/netapp/netapp_snapshot_resource_test.go b/azurerm/internal/services/netapp/netapp_snapshot_resource_test.go new file mode 100644 index 000000000000..7fb2aa73fd61 --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_snapshot_resource_test.go @@ -0,0 +1,302 @@ +package netapp_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NetAppSnapshotResource struct { +} + +func TestAccNetAppSnapshot_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") + r := NetAppSnapshotResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppSnapshot_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") + r := NetAppSnapshotResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_netapp_snapshot"), + }, + }) +} + +func TestAccNetAppSnapshot_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") + r := NetAppSnapshotResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppSnapshot_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") + r := NetAppSnapshotResource{} + oldVolumeName := fmt.Sprintf("acctest-NetAppVolume-%d", data.RandomInteger) + newVolumeName := fmt.Sprintf("acctest-updated-NetAppVolume-%d", data.RandomInteger) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("volume_name").HasValue(oldVolumeName), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep(), + { + Config: r.updateTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("volume_name").HasValue(oldVolumeName), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaZ"), + ), + }, + data.ImportStep(), + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("volume_name").HasValue(newVolumeName), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + }) +} + +func (t NetAppSnapshotResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SnapshotID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NetApp.SnapshotClient.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Netapp Snapshot (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r NetAppSnapshotResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_snapshot" "test" { + name = "acctest-NetAppSnapshot-%d" + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_name = azurerm_netapp_volume.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, r.template(data), data.RandomInteger) +} + +func (r NetAppSnapshotResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_snapshot" "import" { + name = azurerm_netapp_snapshot.test.name + location = azurerm_netapp_snapshot.test.location + resource_group_name = azurerm_netapp_snapshot.test.resource_group_name + account_name = azurerm_netapp_snapshot.test.account_name + pool_name = azurerm_netapp_snapshot.test.pool_name + volume_name = azurerm_netapp_snapshot.test.volume_name +} +`, r.basic(data)) +} + +func (r NetAppSnapshotResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_snapshot" "test" { + name = "acctest-NetAppSnapshot-%d" + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_name = azurerm_netapp_volume.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + "FoO" = "BaR" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r NetAppSnapshotResource) updateTags(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_snapshot" "test" { + name = "acctest-NetAppSnapshot-%d" + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_name = azurerm_netapp_volume.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + tags = { + "FoO" = "BaZ" + } +} +`, r.template(data), data.RandomInteger) +} + +func (r NetAppSnapshotResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "update" { + name = "acctest-updated-VirtualNetwork-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "update" { + name = "acctest-updated-Subnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.update.name + address_prefix = "10.0.2.0/24" + + delegation { + name = "netapp" + + service_delegation { + name = "Microsoft.Netapp/volumes" + actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] + } + } +} + +resource "azurerm_netapp_volume" "update" { + name = "acctest-updated-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_path = "my-updated-unique-file-path-%d" + service_level = "Premium" + subnet_id = azurerm_subnet.update.id + storage_quota_in_gb = 100 +} + +resource "azurerm_netapp_snapshot" "test" { + name = "acctest-NetAppSnapshot-%d" + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_name = azurerm_netapp_volume.update.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (NetAppSnapshotResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-netapp-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VirtualNetwork-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "acctest-Subnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + + delegation { + name = "netapp" + + service_delegation { + name = "Microsoft.Netapp/volumes" + actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] + } + } +} + +resource "azurerm_netapp_account" "test" { + name = "acctest-NetAppAccount-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_netapp_pool" "test" { + name = "acctest-NetAppPool-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + service_level = "Premium" + size_in_tb = 4 +} + +resource "azurerm_netapp_volume" "test" { + name = "acctest-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_path = "my-unique-file-path-%d" + service_level = "Premium" + subnet_id = azurerm_subnet.test.id + storage_quota_in_gb = 100 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/netapp/netapp_volume_data_source.go b/azurerm/internal/services/netapp/netapp_volume_data_source.go index 0af279cb5cfe..b2666aac4a0c 100644 --- a/azurerm/internal/services/netapp/netapp_volume_data_source.go +++ b/azurerm/internal/services/netapp/netapp_volume_data_source.go @@ -11,9 +11,9 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmNetAppVolume() *schema.Resource { +func dataSourceNetAppVolume() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmNetAppVolumeRead, + Read: dataSourceNetAppVolumeRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -79,7 +79,7 @@ func dataSourceArmNetAppVolume() *schema.Resource { } } -func dataSourceArmNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.VolumeClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -124,7 +124,7 @@ func dataSourceArmNetAppVolumeRead(d *schema.ResourceData, meta interface{}) err if props.UsageThreshold != nil { d.Set("storage_quota_in_gb", *props.UsageThreshold/1073741824) } - if err := d.Set("mount_ip_addresses", flattenArmNetAppVolumeMountIPAddresses(props.MountTargets)); err != nil { + if err := d.Set("mount_ip_addresses", flattenNetAppVolumeMountIPAddresses(props.MountTargets)); err != nil { return fmt.Errorf("setting `mount_ip_addresses`: %+v", err) } } diff --git a/azurerm/internal/services/netapp/netapp_volume_data_source_test.go b/azurerm/internal/services/netapp/netapp_volume_data_source_test.go new file mode 100644 index 000000000000..298b4b6b344e --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_volume_data_source_test.go @@ -0,0 +1,45 @@ +package netapp_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type NetAppVolumeDataSource struct { +} + +func TestAccDataSourceNetAppVolume_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_netapp_volume", "test") + r := NetAppVolumeDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("volume_path").Exists(), + check.That(data.ResourceName).Key("service_level").Exists(), + check.That(data.ResourceName).Key("subnet_id").Exists(), + check.That(data.ResourceName).Key("storage_quota_in_gb").Exists(), + check.That(data.ResourceName).Key("protocols.0").Exists(), + check.That(data.ResourceName).Key("mount_ip_addresses.#").HasValue("1"), + ), + }, + }) +} + +func (NetAppVolumeDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_netapp_volume" "test" { + resource_group_name = azurerm_netapp_volume.test.resource_group_name + account_name = azurerm_netapp_volume.test.account_name + pool_name = azurerm_netapp_volume.test.pool_name + name = azurerm_netapp_volume.test.name +} +`, NetAppVolumeResource{}.basic(data)) +} diff --git a/azurerm/internal/services/netapp/netapp_volume_resource.go b/azurerm/internal/services/netapp/netapp_volume_resource.go index 387e5369571d..9d8117187980 100644 --- a/azurerm/internal/services/netapp/netapp_volume_resource.go +++ b/azurerm/internal/services/netapp/netapp_volume_resource.go @@ -23,12 +23,12 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmNetAppVolume() *schema.Resource { +func resourceNetAppVolume() *schema.Resource { return &schema.Resource{ - Create: resourceArmNetAppVolumeCreateUpdate, - Read: resourceArmNetAppVolumeRead, - Update: resourceArmNetAppVolumeCreateUpdate, - Delete: resourceArmNetAppVolumeDelete, + Create: resourceNetAppVolumeCreateUpdate, + Read: resourceNetAppVolumeRead, + Update: resourceNetAppVolumeCreateUpdate, + Delete: resourceNetAppVolumeDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -37,7 +37,7 @@ func resourceArmNetAppVolume() *schema.Resource { Delete: schema.DefaultTimeout(60 * time.Minute), }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.NetAppVolumeID(id) + _, err := parse.VolumeID(id) return err }), @@ -98,12 +98,14 @@ func resourceArmNetAppVolume() *schema.Resource { Optional: true, Computed: true, MaxItems: 2, - Elem: &schema.Schema{Type: schema.TypeString, + Elem: &schema.Schema{ + Type: schema.TypeString, ValidateFunc: validation.StringInSlice([]string{ "NFSv3", "NFSv4.1", "CIFS", - }, false)}, + }, false), + }, }, "storage_quota_in_gb": { @@ -139,12 +141,14 @@ func resourceArmNetAppVolume() *schema.Resource { Computed: true, MaxItems: 1, MinItems: 1, - Elem: &schema.Schema{Type: schema.TypeString, + Elem: &schema.Schema{ + Type: schema.TypeString, ValidateFunc: validation.StringInSlice([]string{ "NFSv3", "NFSv4.1", "CIFS", - }, false)}, + }, false), + }, }, "cifs_enabled": { @@ -194,7 +198,7 @@ func resourceArmNetAppVolume() *schema.Resource { } } -func resourceArmNetAppVolumeCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppVolumeCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.VolumeClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -236,7 +240,7 @@ func resourceArmNetAppVolumeCreateUpdate(d *schema.ResourceData, meta interface{ SubnetID: utils.String(subnetId), ProtocolTypes: utils.ExpandStringSlice(protocols), UsageThreshold: utils.Int64(storageQuotaInGB), - ExportPolicy: expandArmNetAppVolumeExportPolicyRule(exportPolicyRule), + ExportPolicy: expandNetAppVolumeExportPolicyRule(exportPolicyRule), }, Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } @@ -258,20 +262,20 @@ func resourceArmNetAppVolumeCreateUpdate(d *schema.ResourceData, meta interface{ } d.SetId(*resp.ID) - return resourceArmNetAppVolumeRead(d, meta) + return resourceNetAppVolumeRead(d, meta) } -func resourceArmNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.VolumeClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppVolumeID(d.Id()) + id, err := parse.VolumeID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.AccountName, id.PoolName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] NetApp Volumes %q does not exist - removing from state", d.Id()) @@ -283,8 +287,8 @@ func resourceArmNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) - d.Set("account_name", id.AccountName) - d.Set("pool_name", id.PoolName) + d.Set("account_name", id.NetAppAccountName) + d.Set("pool_name", id.CapacityPoolName) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) } @@ -296,10 +300,10 @@ func resourceArmNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error if props.UsageThreshold != nil { d.Set("storage_quota_in_gb", *props.UsageThreshold/1073741824) } - if err := d.Set("export_policy_rule", flattenArmNetAppVolumeExportPolicyRule(props.ExportPolicy)); err != nil { + if err := d.Set("export_policy_rule", flattenNetAppVolumeExportPolicyRule(props.ExportPolicy)); err != nil { return fmt.Errorf("Error setting `export_policy_rule`: %+v", err) } - if err := d.Set("mount_ip_addresses", flattenArmNetAppVolumeMountIPAddresses(props.MountTargets)); err != nil { + if err := d.Set("mount_ip_addresses", flattenNetAppVolumeMountIPAddresses(props.MountTargets)); err != nil { return fmt.Errorf("setting `mount_ip_addresses`: %+v", err) } } @@ -307,17 +311,17 @@ func resourceArmNetAppVolumeRead(d *schema.ResourceData, meta interface{}) error return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmNetAppVolumeDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNetAppVolumeDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NetApp.VolumeClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetAppVolumeID(d.Id()) + id, err := parse.VolumeID(d.Id()) if err != nil { return err } - if _, err = client.Delete(ctx, id.ResourceGroup, id.AccountName, id.PoolName, id.Name); err != nil { + if _, err = client.Delete(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.Name); err != nil { return fmt.Errorf("Error deleting NetApp Volume %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } @@ -333,7 +337,7 @@ func resourceArmNetAppVolumeDelete(d *schema.ResourceData, meta interface{}) err MinTimeout: 10 * time.Second, Pending: []string{"200", "202"}, Target: []string{"204", "404"}, - Refresh: netappVolumeDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.AccountName, id.PoolName, id.Name), + Refresh: netappVolumeDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.Name), Timeout: d.Timeout(schema.TimeoutDelete), } @@ -357,7 +361,7 @@ func netappVolumeDeleteStateRefreshFunc(ctx context.Context, client *netapp.Volu } } -func expandArmNetAppVolumeExportPolicyRule(input []interface{}) *netapp.VolumePropertiesExportPolicy { +func expandNetAppVolumeExportPolicyRule(input []interface{}) *netapp.VolumePropertiesExportPolicy { results := make([]netapp.ExportPolicyRule, 0) for _, item := range input { if item != nil { @@ -414,7 +418,7 @@ func expandArmNetAppVolumeExportPolicyRule(input []interface{}) *netapp.VolumePr } } -func flattenArmNetAppVolumeExportPolicyRule(input *netapp.VolumePropertiesExportPolicy) []interface{} { +func flattenNetAppVolumeExportPolicyRule(input *netapp.VolumePropertiesExportPolicy) []interface{} { results := make([]interface{}, 0) if input == nil || input.Rules == nil { return results @@ -478,7 +482,7 @@ func flattenArmNetAppVolumeExportPolicyRule(input *netapp.VolumePropertiesExport return results } -func flattenArmNetAppVolumeMountIPAddresses(input *[]netapp.MountTargetProperties) []interface{} { +func flattenNetAppVolumeMountIPAddresses(input *[]netapp.MountTargetProperties) []interface{} { results := make([]interface{}, 0) if input == nil { return results diff --git a/azurerm/internal/services/netapp/netapp_volume_resource_test.go b/azurerm/internal/services/netapp/netapp_volume_resource_test.go new file mode 100644 index 000000000000..cb69a6f53fd7 --- /dev/null +++ b/azurerm/internal/services/netapp/netapp_volume_resource_test.go @@ -0,0 +1,440 @@ +package netapp_test + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NetAppVolumeResource struct { +} + +func TestAccNetAppVolume_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocols.2676449260").HasValue("NFSv3"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppVolume_nfsv41(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nfsv41(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("protocols.3098200649").HasValue("NFSv4.1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppVolume_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_netapp_volume"), + }, + }) +} + +func TestAccNetAppVolume_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("service_level").HasValue("Standard"), + check.That(data.ResourceName).Key("storage_quota_in_gb").HasValue("101"), + check.That(data.ResourceName).Key("export_policy_rule.#").HasValue("3"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + check.That(data.ResourceName).Key("mount_ip_addresses.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppVolume_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("storage_quota_in_gb").HasValue("100"), + check.That(data.ResourceName).Key("export_policy_rule.#").HasValue("0"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("storage_quota_in_gb").HasValue("101"), + check.That(data.ResourceName).Key("export_policy_rule.#").HasValue("3"), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.FoO").HasValue("BaR"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("storage_quota_in_gb").HasValue("100"), + check.That(data.ResourceName).Key("export_policy_rule.#").HasValue("0"), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppVolume_updateSubnet(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + resourceGroupName := fmt.Sprintf("acctestRG-netapp-%d", data.RandomInteger) + oldVNetName := fmt.Sprintf("acctest-VirtualNetwork-%d", data.RandomInteger) + oldSubnetName := fmt.Sprintf("acctest-Subnet-%d", data.RandomInteger) + newVNetName := fmt.Sprintf("acctest-updated-VirtualNetwork-%d", data.RandomInteger) + newSubnetName := fmt.Sprintf("acctest-updated-Subnet-%d", data.RandomInteger) + uriTemplate := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s/subnets/%s" + + subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") + oldSubnetId := fmt.Sprintf(uriTemplate, subscriptionID, resourceGroupName, oldVNetName, oldSubnetName) + newSubnetId := fmt.Sprintf(uriTemplate, subscriptionID, resourceGroupName, newVNetName, newSubnetName) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subnet_id").HasValue(oldSubnetId), + ), + }, + data.ImportStep(), + { + Config: r.updateSubnet(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("subnet_id").HasValue(newSubnetId), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNetAppVolume_updateExportPolicyRule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") + r := NetAppVolumeResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("export_policy_rule.#").HasValue("3"), + ), + }, + data.ImportStep(), + { + Config: r.updateExportPolicyRule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("export_policy_rule.#").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t NetAppVolumeResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.VolumeID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NetApp.VolumeClient.Get(ctx, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Netapp Volume (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (NetAppVolumeResource) basic(data acceptance.TestData) string { + template := NetAppVolumeResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_volume" "test" { + name = "acctest-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_path = "my-unique-file-path-%d" + service_level = "Standard" + subnet_id = azurerm_subnet.test.id + storage_quota_in_gb = 100 +} +`, template, data.RandomInteger, data.RandomInteger) +} + +func (NetAppVolumeResource) nfsv41(data acceptance.TestData) string { + template := NetAppVolumeResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_volume" "test" { + name = "acctest-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_path = "my-unique-file-path-%d" + service_level = "Standard" + subnet_id = azurerm_subnet.test.id + protocols = ["NFSv4.1"] + storage_quota_in_gb = 100 + + export_policy_rule { + rule_index = 1 + allowed_clients = ["1.2.3.0/24"] + protocols_enabled = ["NFSv4.1"] + unix_read_only = false + unix_read_write = true + } +} +`, template, data.RandomInteger, data.RandomInteger) +} + +func (r NetAppVolumeResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_volume" "import" { + name = azurerm_netapp_volume.test.name + location = azurerm_netapp_volume.test.location + resource_group_name = azurerm_netapp_volume.test.resource_group_name + account_name = azurerm_netapp_volume.test.account_name + pool_name = azurerm_netapp_volume.test.pool_name + volume_path = azurerm_netapp_volume.test.volume_path + service_level = azurerm_netapp_volume.test.service_level + subnet_id = azurerm_netapp_volume.test.subnet_id + storage_quota_in_gb = azurerm_netapp_volume.test.storage_quota_in_gb +} +`, r.basic(data)) +} + +func (r NetAppVolumeResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_volume" "test" { + name = "acctest-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + service_level = "Standard" + volume_path = "my-unique-file-path-%d" + subnet_id = azurerm_subnet.test.id + protocols = ["NFSv3"] + storage_quota_in_gb = 101 + + export_policy_rule { + rule_index = 1 + allowed_clients = ["1.2.3.0/24"] + protocols_enabled = ["NFSv3"] + unix_read_only = false + unix_read_write = true + } + + export_policy_rule { + rule_index = 2 + allowed_clients = ["1.2.5.0"] + protocols_enabled = ["NFSv3"] + unix_read_only = true + unix_read_write = false + } + + export_policy_rule { + rule_index = 3 + allowed_clients = ["1.2.6.0/24"] + cifs_enabled = false + nfsv3_enabled = true + nfsv4_enabled = false + unix_read_only = true + unix_read_write = false + } + + tags = { + "FoO" = "BaR" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (r NetAppVolumeResource) updateSubnet(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_network" "updated" { + name = "acctest-updated-VirtualNetwork-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.1.0.0/16"] +} + +resource "azurerm_subnet" "updated" { + name = "acctest-updated-Subnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.updated.name + address_prefix = "10.1.3.0/24" + + delegation { + name = "testdelegation2" + + service_delegation { + name = "Microsoft.Netapp/volumes" + actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] + } + } +} + +resource "azurerm_netapp_volume" "test" { + name = "acctest-updated-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + volume_path = "my-updated-unique-file-path-%d" + service_level = "Standard" + subnet_id = azurerm_subnet.updated.id + protocols = ["NFSv3"] + storage_quota_in_gb = 100 +} +`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r NetAppVolumeResource) updateExportPolicyRule(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_netapp_volume" "test" { + name = "acctest-NetAppVolume-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + pool_name = azurerm_netapp_pool.test.name + service_level = "Standard" + volume_path = "my-unique-file-path-%d" + subnet_id = azurerm_subnet.test.id + protocols = ["NFSv3"] + storage_quota_in_gb = 101 + + export_policy_rule { + rule_index = 1 + allowed_clients = ["1.2.4.0/24", "1.3.4.0"] + protocols_enabled = ["NFSv3"] + unix_read_only = false + unix_read_write = true + } + + tags = { + "FoO" = "BaR" + } +} +`, r.template(data), data.RandomInteger, data.RandomInteger) +} + +func (NetAppVolumeResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-netapp-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VirtualNetwork-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.6.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "acctest-Subnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.6.2.0/24" + + delegation { + name = "testdelegation" + + service_delegation { + name = "Microsoft.Netapp/volumes" + actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] + } + } +} + +resource "azurerm_netapp_account" "test" { + name = "acctest-NetAppAccount-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_netapp_pool" "test" { + name = "acctest-NetAppPool-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_name = azurerm_netapp_account.test.name + service_level = "Standard" + size_in_tb = 4 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/netapp/parse/account.go b/azurerm/internal/services/netapp/parse/account.go new file mode 100644 index 000000000000..cf4656c9fbc3 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/account.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AccountId struct { + SubscriptionId string + ResourceGroup string + NetAppAccountName string +} + +func NewAccountID(subscriptionId, resourceGroup, netAppAccountName string) AccountId { + return AccountId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NetAppAccountName: netAppAccountName, + } +} + +func (id AccountId) String() string { + segments := []string{ + fmt.Sprintf("Net App Account Name %q", id.NetAppAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Account", segmentsStr) +} + +func (id AccountId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NetApp/netAppAccounts/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NetAppAccountName) +} + +// AccountID parses a Account ID into an AccountId struct +func AccountID(input string) (*AccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AccountId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NetAppAccountName, err = id.PopSegment("netAppAccounts"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/netapp/parse/account_test.go b/azurerm/internal/services/netapp/parse/account_test.go new file mode 100644 index 000000000000..95d2fdf3dbf8 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/account_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AccountId{} + +func TestAccountIDFormatter(t *testing.T) { + actual := NewAccountID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAccountID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AccountId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Error: true, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1", + Expected: &AccountId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NetAppAccountName: "account1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AccountID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NetAppAccountName != v.Expected.NetAppAccountName { + t.Fatalf("Expected %q but got %q for NetAppAccountName", v.Expected.NetAppAccountName, actual.NetAppAccountName) + } + } +} diff --git a/azurerm/internal/services/netapp/parse/capacity_pool.go b/azurerm/internal/services/netapp/parse/capacity_pool.go new file mode 100644 index 000000000000..693ce837f9e2 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/capacity_pool.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CapacityPoolId struct { + SubscriptionId string + ResourceGroup string + NetAppAccountName string + Name string +} + +func NewCapacityPoolID(subscriptionId, resourceGroup, netAppAccountName, name string) CapacityPoolId { + return CapacityPoolId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NetAppAccountName: netAppAccountName, + Name: name, + } +} + +func (id CapacityPoolId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Net App Account Name %q", id.NetAppAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Capacity Pool", segmentsStr) +} + +func (id CapacityPoolId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NetApp/netAppAccounts/%s/capacityPools/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NetAppAccountName, id.Name) +} + +// CapacityPoolID parses a CapacityPool ID into an CapacityPoolId struct +func CapacityPoolID(input string) (*CapacityPoolId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CapacityPoolId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NetAppAccountName, err = id.PopSegment("netAppAccounts"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("capacityPools"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/netapp/parse/capacity_pool_test.go b/azurerm/internal/services/netapp/parse/capacity_pool_test.go new file mode 100644 index 000000000000..10f436cf871b --- /dev/null +++ b/azurerm/internal/services/netapp/parse/capacity_pool_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CapacityPoolId{} + +func TestCapacityPoolIDFormatter(t *testing.T) { + actual := NewCapacityPoolID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "pool1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCapacityPoolID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CapacityPoolId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Error: true, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1", + Expected: &CapacityPoolId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NetAppAccountName: "account1", + Name: "pool1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1/CAPACITYPOOLS/POOL1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CapacityPoolID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NetAppAccountName != v.Expected.NetAppAccountName { + t.Fatalf("Expected %q but got %q for NetAppAccountName", v.Expected.NetAppAccountName, actual.NetAppAccountName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/netapp/parse/netapp_account.go b/azurerm/internal/services/netapp/parse/netapp_account.go deleted file mode 100644 index 96b2f1e90217..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_account.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type NetAppAccountId struct { - ResourceGroup string - Name string -} - -func NetAppAccountID(input string) (*NetAppAccountId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse NetApp Account ID %q: %+v", input, err) - } - - service := NetAppAccountId{ - ResourceGroup: id.ResourceGroup, - } - - if service.Name, err = id.PopSegment("netAppAccounts"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/netapp/parse/netapp_account_test.go b/azurerm/internal/services/netapp/parse/netapp_account_test.go deleted file mode 100644 index 11195171297d..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_account_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestNetAppAccountId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *NetAppAccountId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing NetApp Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", - Expected: nil, - }, - { - Name: "NetApp Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1", - Expected: &NetAppAccountId{ - Name: "account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/NETAPPACCOUNTS/account1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := NetAppAccountID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/netapp/parse/netapp_pool.go b/azurerm/internal/services/netapp/parse/netapp_pool.go deleted file mode 100644 index 23c2fcff7ad0..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_pool.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type NetAppPoolId struct { - ResourceGroup string - AccountName string - Name string -} - -func NetAppPoolID(input string) (*NetAppPoolId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse NetApp Pool ID %q: %+v", input, err) - } - - service := NetAppPoolId{ - ResourceGroup: id.ResourceGroup, - } - - if service.AccountName, err = id.PopSegment("netAppAccounts"); err != nil { - return nil, err - } - - if service.Name, err = id.PopSegment("capacityPools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/netapp/parse/netapp_pool_test.go b/azurerm/internal/services/netapp/parse/netapp_pool_test.go deleted file mode 100644 index 8e4b0131ed92..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_pool_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestNetAppPoolId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *NetAppPoolId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing NetApp Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", - Expected: nil, - }, - { - Name: "NetApp Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1", - Expected: nil, - }, - { - Name: "Missing NetApp Pool Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", - Expected: nil, - }, - { - Name: "NetApp Pool ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1", - Expected: &NetAppPoolId{ - Name: "pool1", - AccountName: "account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/CAPACITYPOOLS/pool1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := NetAppPoolID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for Account Name", v.Expected.AccountName, actual.AccountName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/netapp/parse/netapp_snapshot.go b/azurerm/internal/services/netapp/parse/netapp_snapshot.go deleted file mode 100644 index 5e0b78a4aaad..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_snapshot.go +++ /dev/null @@ -1,48 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type NetAppSnapshotId struct { - ResourceGroup string - AccountName string - PoolName string - VolumeName string - Name string -} - -func NetAppSnapshotID(input string) (*NetAppSnapshotId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse NetApp Snapshot ID %q: %+v", input, err) - } - - service := NetAppSnapshotId{ - ResourceGroup: id.ResourceGroup, - } - - if service.AccountName, err = id.PopSegment("netAppAccounts"); err != nil { - return nil, err - } - - if service.PoolName, err = id.PopSegment("capacityPools"); err != nil { - return nil, err - } - - if service.VolumeName, err = id.PopSegment("volumes"); err != nil { - return nil, err - } - - if service.Name, err = id.PopSegment("snapshots"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/netapp/parse/netapp_snapshot_test.go b/azurerm/internal/services/netapp/parse/netapp_snapshot_test.go deleted file mode 100644 index c0e061a78e8b..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_snapshot_test.go +++ /dev/null @@ -1,118 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestNetAppSnapshotId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *NetAppSnapshotId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing NetApp Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", - Expected: nil, - }, - { - Name: "NetApp Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1", - Expected: nil, - }, - { - Name: "Missing NetApp Pool Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", - Expected: nil, - }, - { - Name: "NetApp Pool ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1", - Expected: nil, - }, - { - Name: "Missing NetApp Volume Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/", - Expected: nil, - }, - { - Name: "NetApp Volume ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1", - Expected: nil, - }, - { - Name: "Missing NetApp Snapshot Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/", - Expected: nil, - }, - { - Name: "NetApp Snapshot ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/snapshot1", - Expected: &NetAppSnapshotId{ - Name: "snapshot1", - VolumeName: "volume1", - PoolName: "pool1", - AccountName: "account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/SNAPSHOTS/snapshot1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := NetAppSnapshotID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for Account Name", v.Expected.AccountName, actual.AccountName) - } - - if actual.PoolName != v.Expected.PoolName { - t.Fatalf("Expected %q but got %q for Pool Name", v.Expected.PoolName, actual.PoolName) - } - - if actual.VolumeName != v.Expected.VolumeName { - t.Fatalf("Expected %q but got %q for Volume Name", v.Expected.VolumeName, actual.VolumeName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/netapp/parse/netapp_volume.go b/azurerm/internal/services/netapp/parse/netapp_volume.go deleted file mode 100644 index cd2d55cf06ae..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_volume.go +++ /dev/null @@ -1,43 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type NetAppVolumeId struct { - ResourceGroup string - AccountName string - PoolName string - Name string -} - -func NetAppVolumeID(input string) (*NetAppVolumeId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse NetApp Volume ID %q: %+v", input, err) - } - - service := NetAppVolumeId{ - ResourceGroup: id.ResourceGroup, - } - - if service.AccountName, err = id.PopSegment("netAppAccounts"); err != nil { - return nil, err - } - - if service.PoolName, err = id.PopSegment("capacityPools"); err != nil { - return nil, err - } - - if service.Name, err = id.PopSegment("volumes"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &service, nil -} diff --git a/azurerm/internal/services/netapp/parse/netapp_volume_test.go b/azurerm/internal/services/netapp/parse/netapp_volume_test.go deleted file mode 100644 index c893056cb9f9..000000000000 --- a/azurerm/internal/services/netapp/parse/netapp_volume_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestNetAppVolumeId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *NetAppVolumeId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing NetApp Account Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", - Expected: nil, - }, - { - Name: "NetApp Account ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1", - Expected: nil, - }, - { - Name: "Missing NetApp Pool Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", - Expected: nil, - }, - { - Name: "NetApp Pool ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1", - Expected: nil, - }, - { - Name: "Missing NetApp Volume Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/", - Expected: nil, - }, - { - Name: "NetApp Volume ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1", - Expected: &NetAppVolumeId{ - Name: "volume1", - PoolName: "pool1", - AccountName: "account1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/VOLUMES/volume1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := NetAppVolumeID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.AccountName != v.Expected.AccountName { - t.Fatalf("Expected %q but got %q for Account Name", v.Expected.AccountName, actual.AccountName) - } - - if actual.PoolName != v.Expected.PoolName { - t.Fatalf("Expected %q but got %q for Pool Name", v.Expected.PoolName, actual.PoolName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/netapp/parse/snapshot.go b/azurerm/internal/services/netapp/parse/snapshot.go new file mode 100644 index 000000000000..05e04250f878 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/snapshot.go @@ -0,0 +1,87 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SnapshotId struct { + SubscriptionId string + ResourceGroup string + NetAppAccountName string + CapacityPoolName string + VolumeName string + Name string +} + +func NewSnapshotID(subscriptionId, resourceGroup, netAppAccountName, capacityPoolName, volumeName, name string) SnapshotId { + return SnapshotId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NetAppAccountName: netAppAccountName, + CapacityPoolName: capacityPoolName, + VolumeName: volumeName, + Name: name, + } +} + +func (id SnapshotId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Volume Name %q", id.VolumeName), + fmt.Sprintf("Capacity Pool Name %q", id.CapacityPoolName), + fmt.Sprintf("Net App Account Name %q", id.NetAppAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Snapshot", segmentsStr) +} + +func (id SnapshotId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NetApp/netAppAccounts/%s/capacityPools/%s/volumes/%s/snapshots/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.VolumeName, id.Name) +} + +// SnapshotID parses a Snapshot ID into an SnapshotId struct +func SnapshotID(input string) (*SnapshotId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SnapshotId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NetAppAccountName, err = id.PopSegment("netAppAccounts"); err != nil { + return nil, err + } + if resourceId.CapacityPoolName, err = id.PopSegment("capacityPools"); err != nil { + return nil, err + } + if resourceId.VolumeName, err = id.PopSegment("volumes"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("snapshots"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/netapp/parse/snapshot_test.go b/azurerm/internal/services/netapp/parse/snapshot_test.go new file mode 100644 index 000000000000..c34d71c344f6 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/snapshot_test.go @@ -0,0 +1,160 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SnapshotId{} + +func TestSnapshotIDFormatter(t *testing.T) { + actual := NewSnapshotID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "pool1", "volume1", "snapshot1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/snapshot1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSnapshotID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SnapshotId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Error: true, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Error: true, + }, + + { + // missing CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/", + Error: true, + }, + + { + // missing value for CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", + Error: true, + }, + + { + // missing VolumeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/", + Error: true, + }, + + { + // missing value for VolumeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/snapshot1", + Expected: &SnapshotId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NetAppAccountName: "account1", + CapacityPoolName: "pool1", + VolumeName: "volume1", + Name: "snapshot1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1/CAPACITYPOOLS/POOL1/VOLUMES/VOLUME1/SNAPSHOTS/SNAPSHOT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SnapshotID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NetAppAccountName != v.Expected.NetAppAccountName { + t.Fatalf("Expected %q but got %q for NetAppAccountName", v.Expected.NetAppAccountName, actual.NetAppAccountName) + } + if actual.CapacityPoolName != v.Expected.CapacityPoolName { + t.Fatalf("Expected %q but got %q for CapacityPoolName", v.Expected.CapacityPoolName, actual.CapacityPoolName) + } + if actual.VolumeName != v.Expected.VolumeName { + t.Fatalf("Expected %q but got %q for VolumeName", v.Expected.VolumeName, actual.VolumeName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/netapp/parse/volume.go b/azurerm/internal/services/netapp/parse/volume.go new file mode 100644 index 000000000000..4d7b91a4ac86 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/volume.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VolumeId struct { + SubscriptionId string + ResourceGroup string + NetAppAccountName string + CapacityPoolName string + Name string +} + +func NewVolumeID(subscriptionId, resourceGroup, netAppAccountName, capacityPoolName, name string) VolumeId { + return VolumeId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NetAppAccountName: netAppAccountName, + CapacityPoolName: capacityPoolName, + Name: name, + } +} + +func (id VolumeId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Capacity Pool Name %q", id.CapacityPoolName), + fmt.Sprintf("Net App Account Name %q", id.NetAppAccountName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Volume", segmentsStr) +} + +func (id VolumeId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NetApp/netAppAccounts/%s/capacityPools/%s/volumes/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NetAppAccountName, id.CapacityPoolName, id.Name) +} + +// VolumeID parses a Volume ID into an VolumeId struct +func VolumeID(input string) (*VolumeId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VolumeId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NetAppAccountName, err = id.PopSegment("netAppAccounts"); err != nil { + return nil, err + } + if resourceId.CapacityPoolName, err = id.PopSegment("capacityPools"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("volumes"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/netapp/parse/volume_test.go b/azurerm/internal/services/netapp/parse/volume_test.go new file mode 100644 index 000000000000..2b658f1a2255 --- /dev/null +++ b/azurerm/internal/services/netapp/parse/volume_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VolumeId{} + +func TestVolumeIDFormatter(t *testing.T) { + actual := NewVolumeID("12345678-1234-9876-4563-123456789012", "resGroup1", "account1", "pool1", "volume1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVolumeID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VolumeId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Error: true, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Error: true, + }, + + { + // missing CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/", + Error: true, + }, + + { + // missing value for CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1", + Expected: &VolumeId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NetAppAccountName: "account1", + CapacityPoolName: "pool1", + Name: "volume1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1/CAPACITYPOOLS/POOL1/VOLUMES/VOLUME1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VolumeID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NetAppAccountName != v.Expected.NetAppAccountName { + t.Fatalf("Expected %q but got %q for NetAppAccountName", v.Expected.NetAppAccountName, actual.NetAppAccountName) + } + if actual.CapacityPoolName != v.Expected.CapacityPoolName { + t.Fatalf("Expected %q but got %q for CapacityPoolName", v.Expected.CapacityPoolName, actual.CapacityPoolName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/netapp/registration.go b/azurerm/internal/services/netapp/registration.go index 981ce6c84912..f36d4110b7e6 100644 --- a/azurerm/internal/services/netapp/registration.go +++ b/azurerm/internal/services/netapp/registration.go @@ -21,17 +21,19 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_netapp_account": dataSourceArmNetAppAccount(), - "azurerm_netapp_pool": dataSourceArmNetAppPool(), - "azurerm_netapp_volume": dataSourceArmNetAppVolume(), - "azurerm_netapp_snapshot": dataSourceArmNetAppSnapshot()} + "azurerm_netapp_account": dataSourceNetAppAccount(), + "azurerm_netapp_pool": dataSourceNetAppPool(), + "azurerm_netapp_volume": dataSourceNetAppVolume(), + "azurerm_netapp_snapshot": dataSourceNetAppSnapshot(), + } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_netapp_account": resourceArmNetAppAccount(), - "azurerm_netapp_pool": resourceArmNetAppPool(), - "azurerm_netapp_volume": resourceArmNetAppVolume(), - "azurerm_netapp_snapshot": resourceArmNetAppSnapshot()} + "azurerm_netapp_account": resourceNetAppAccount(), + "azurerm_netapp_pool": resourceNetAppPool(), + "azurerm_netapp_volume": resourceNetAppVolume(), + "azurerm_netapp_snapshot": resourceNetAppSnapshot(), + } } diff --git a/azurerm/internal/services/netapp/resourceids.go b/azurerm/internal/services/netapp/resourceids.go new file mode 100644 index 000000000000..03ee7d2760df --- /dev/null +++ b/azurerm/internal/services/netapp/resourceids.go @@ -0,0 +1,6 @@ +package netapp + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Account -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=CapacityPool -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Snapshot -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/snapshot1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Volume -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1 diff --git a/azurerm/internal/services/netapp/tests/netapp_account_data_source_test.go b/azurerm/internal/services/netapp/tests/netapp_account_data_source_test.go deleted file mode 100644 index ff9ebdd91a9d..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_account_data_source_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func testAccDataSourceAzureRMNetAppAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_netapp_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceNetAppAccount_basicConfig(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - ), - }, - }, - }) -} - -func testAccDataSourceNetAppAccount_basicConfig(data acceptance.TestData) string { - config := testAccAzureRMNetAppAccount_basicConfig(data) - return fmt.Sprintf(` -%s - -data "azurerm_netapp_account" "test" { - resource_group_name = azurerm_netapp_account.test.resource_group_name - name = azurerm_netapp_account.test.name -} -`, config) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_account_resource_test.go b/azurerm/internal/services/netapp/tests/netapp_account_resource_test.go deleted file mode 100644 index c1aa2798a20a..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_account_resource_test.go +++ /dev/null @@ -1,242 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMNetAppAccount(t *testing.T) { - // NOTE: this is a combined test rather than separate split out tests since - // Azure allows only one active directory can be joined to a single subscription at a time for NetApp Account. - // The CI system runs all tests in parallel, so the tests need to be changed to run one at a time. - testCases := map[string]map[string]func(t *testing.T){ - "Resource": { - "basic": testAccAzureRMNetAppAccount_basic, - "requiresImport": testAccAzureRMNetAppAccount_requiresImport, - "complete": testAccAzureRMNetAppAccount_complete, - "update": testAccAzureRMNetAppAccount_update, - }, - "DataSource": { - "basic": testAccDataSourceAzureRMNetAppAccount_basic, - }, - } - - for group, m := range testCases { - for name, tc := range m { - t.Run(group, func(t *testing.T) { - t.Run(name, func(t *testing.T) { - tc(t) - }) - }) - } - } -} - -func testAccAzureRMNetAppAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppAccount_basicConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMNetAppAccount_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppAccount_basicConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppAccountExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMNetAppAccount_requiresImportConfig(data), - ExpectError: acceptance.RequiresImportError("azurerm_netapp_account"), - }, - }, - }) -} - -func testAccAzureRMNetAppAccount_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppAccount_completeConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "active_directory.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep("active_directory"), - }, - }) -} - -func testAccAzureRMNetAppAccount_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppAccount_basicConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "active_directory.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - { - Config: testAccAzureRMNetAppAccount_completeConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppAccountExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "active_directory.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep("active_directory"), - }, - }) -} - -func testCheckAzureRMNetAppAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.AccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("NetApp Account not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: NetApp Account %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on netapp.AccountClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMNetAppAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.AccountClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_netapp_account" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on netapp.AccountClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMNetAppAccount_basicConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-netapp-%d" - location = "%s" -} - -resource "azurerm_netapp_account" "test" { - name = "acctest-NetAppAccount-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMNetAppAccount_requiresImportConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -%s -resource "azurerm_netapp_account" "import" { - name = azurerm_netapp_account.test.name - location = azurerm_netapp_account.test.location - resource_group_name = azurerm_netapp_account.test.resource_group_name -} -`, testAccAzureRMNetAppAccount_basicConfig(data)) -} - -func testAccAzureRMNetAppAccount_completeConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-netapp-%d" - location = "%s" -} - -resource "azurerm_netapp_account" "test" { - name = "acctest-NetAppAccount-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - active_directory { - username = "aduser" - password = "aduserpwd" - smb_server_name = "SMBSERVER" - dns_servers = ["1.2.3.4"] - domain = "westcentralus.com" - organizational_unit = "OU=FirstLevel" - } - - tags = { - "FoO" = "BaR" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_pool_data_source_test.go b/azurerm/internal/services/netapp/tests/netapp_pool_data_source_test.go deleted file mode 100644 index 09794a84d812..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_pool_data_source_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMNetAppPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_netapp_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceNetAppPool_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "account_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_level"), - resource.TestCheckResourceAttrSet(data.ResourceName, "size_in_tb"), - ), - }, - }, - }) -} - -func testAccDataSourceNetAppPool_basic(data acceptance.TestData) string { - config := testAccAzureRMNetAppPool_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_netapp_pool" "test" { - resource_group_name = azurerm_netapp_pool.test.resource_group_name - account_name = azurerm_netapp_pool.test.account_name - name = azurerm_netapp_pool.test.name -} -`, config) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_pool_resource_test.go b/azurerm/internal/services/netapp/tests/netapp_pool_resource_test.go deleted file mode 100644 index 43172f840756..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_pool_resource_test.go +++ /dev/null @@ -1,233 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMNetAppPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppPoolExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppPool_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppPoolExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMNetAppPool_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_netapp_pool"), - }, - }, - }) -} - -func TestAccAzureRMNetAppPool_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppPool_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "service_level", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "size_in_tb", "15"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppPool_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_pool", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppPoolDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppPool_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "service_level", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "size_in_tb", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppPool_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppPoolExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "service_level", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "size_in_tb", "15"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMNetAppPoolExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.PoolClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("NetApp Pool not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, accountName, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: NetApp Pool %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on netapp.PoolClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMNetAppPoolDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.PoolClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_netapp_pool" { - continue - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, accountName, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on netapp.PoolClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMNetAppPool_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-netapp-%d" - location = "%s" -} - -resource "azurerm_netapp_account" "test" { - name = "acctest-NetAppAccount-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_netapp_pool" "test" { - name = "acctest-NetAppPool-%d" - account_name = azurerm_netapp_account.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - service_level = "Standard" - size_in_tb = 4 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppPool_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s -resource "azurerm_netapp_pool" "import" { - name = azurerm_netapp_pool.test.name - location = azurerm_netapp_pool.test.location - resource_group_name = azurerm_netapp_pool.test.resource_group_name - account_name = azurerm_netapp_pool.test.account_name - service_level = azurerm_netapp_pool.test.service_level - size_in_tb = azurerm_netapp_pool.test.size_in_tb -} -`, testAccAzureRMNetAppPool_basic(data)) -} - -func testAccAzureRMNetAppPool_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-netapp-%d" - location = "%s" -} - -resource "azurerm_netapp_account" "test" { - name = "acctest-NetAppAccount-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_netapp_pool" "test" { - name = "acctest-NetAppPool-%d" - account_name = azurerm_netapp_account.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - service_level = "Standard" - size_in_tb = 15 - - tags = { - "FoO" = "BaR" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_snapshot_data_source_test.go b/azurerm/internal/services/netapp/tests/netapp_snapshot_data_source_test.go deleted file mode 100644 index f5ea73ff628b..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_snapshot_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMNetAppSnapshot_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_netapp_snapshot", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceNetAppSnapshot_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - ), - }, - }, - }) -} - -func testAccDataSourceNetAppSnapshot_basic(data acceptance.TestData) string { - config := testAccAzureRMNetAppSnapshot_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_netapp_snapshot" "test" { - resource_group_name = azurerm_netapp_snapshot.test.resource_group_name - account_name = azurerm_netapp_snapshot.test.account_name - pool_name = azurerm_netapp_snapshot.test.pool_name - volume_name = azurerm_netapp_snapshot.test.volume_name - name = azurerm_netapp_snapshot.test.name -} -`, config) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_snapshot_resource_test.go b/azurerm/internal/services/netapp/tests/netapp_snapshot_resource_test.go deleted file mode 100644 index ea620e3a5e3f..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_snapshot_resource_test.go +++ /dev/null @@ -1,356 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMNetAppSnapshot_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppSnapshotDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppSnapshot_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppSnapshotExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppSnapshot_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppSnapshotDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppSnapshot_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppSnapshotExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMNetAppSnapshot_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_netapp_snapshot"), - }, - }, - }) -} - -func TestAccAzureRMNetAppSnapshot_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppSnapshotDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppSnapshot_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppSnapshotExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppSnapshot_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_snapshot", "test") - oldVolumeName := fmt.Sprintf("acctest-NetAppVolume-%d", data.RandomInteger) - newVolumeName := fmt.Sprintf("acctest-updated-NetAppVolume-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppSnapshotDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppSnapshot_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppSnapshotExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "volume_name", oldVolumeName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppSnapshot_updateTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppSnapshotExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "volume_name", oldVolumeName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaZ"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppSnapshot_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppSnapshotExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "volume_name", newVolumeName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMNetAppSnapshotExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.SnapshotClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("NetApp Snapshot not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - poolName := rs.Primary.Attributes["pool_name"] - volumeName := rs.Primary.Attributes["volume_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, accountName, poolName, volumeName, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: NetApp Snapshot %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on netapp.SnapshotClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMNetAppSnapshotDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.SnapshotClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_netapp_snapshot" { - continue - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - poolName := rs.Primary.Attributes["pool_name"] - volumeName := rs.Primary.Attributes["volume_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, accountName, poolName, volumeName, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on netapp.SnapshotClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMNetAppSnapshot_basic(data acceptance.TestData) string { - template := testAccAzureRMNetAppSnapshot_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_snapshot" "test" { - name = "acctest-NetAppSnapshot-%d" - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_name = azurerm_netapp_volume.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, template, data.RandomInteger) -} - -func testAccAzureRMNetAppSnapshot_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_snapshot" "import" { - name = azurerm_netapp_snapshot.test.name - location = azurerm_netapp_snapshot.test.location - resource_group_name = azurerm_netapp_snapshot.test.resource_group_name - account_name = azurerm_netapp_snapshot.test.account_name - pool_name = azurerm_netapp_snapshot.test.pool_name - volume_name = azurerm_netapp_snapshot.test.volume_name -} -`, testAccAzureRMNetAppSnapshot_basic(data)) -} - -func testAccAzureRMNetAppSnapshot_complete(data acceptance.TestData) string { - template := testAccAzureRMNetAppSnapshot_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_snapshot" "test" { - name = "acctest-NetAppSnapshot-%d" - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_name = azurerm_netapp_volume.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - "FoO" = "BaR" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMNetAppSnapshot_updateTags(data acceptance.TestData) string { - template := testAccAzureRMNetAppSnapshot_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_snapshot" "test" { - name = "acctest-NetAppSnapshot-%d" - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_name = azurerm_netapp_volume.test.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - "FoO" = "BaZ" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMNetAppSnapshot_update(data acceptance.TestData) string { - template := testAccAzureRMNetAppSnapshot_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "update" { - name = "acctest-updated-VirtualNetwork-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] -} - -resource "azurerm_subnet" "update" { - name = "acctest-updated-Subnet-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.update.name - address_prefix = "10.0.2.0/24" - - delegation { - name = "netapp" - - service_delegation { - name = "Microsoft.Netapp/volumes" - actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] - } - } -} - -resource "azurerm_netapp_volume" "update" { - name = "acctest-updated-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_path = "my-updated-unique-file-path-%d" - service_level = "Premium" - subnet_id = azurerm_subnet.update.id - storage_quota_in_gb = 100 -} - -resource "azurerm_netapp_snapshot" "test" { - name = "acctest-NetAppSnapshot-%d" - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_name = azurerm_netapp_volume.update.name - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppSnapshot_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-netapp-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VirtualNetwork-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] -} - -resource "azurerm_subnet" "test" { - name = "acctest-Subnet-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" - - delegation { - name = "netapp" - - service_delegation { - name = "Microsoft.Netapp/volumes" - actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] - } - } -} - -resource "azurerm_netapp_account" "test" { - name = "acctest-NetAppAccount-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_netapp_pool" "test" { - name = "acctest-NetAppPool-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - service_level = "Premium" - size_in_tb = 4 -} - -resource "azurerm_netapp_volume" "test" { - name = "acctest-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_path = "my-unique-file-path-%d" - service_level = "Premium" - subnet_id = azurerm_subnet.test.id - storage_quota_in_gb = 100 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_volume_data_source_test.go b/azurerm/internal/services/netapp/tests/netapp_volume_data_source_test.go deleted file mode 100644 index 760d5481bf9b..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_volume_data_source_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMNetAppVolume_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceNetAppVolume_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "volume_path"), - resource.TestCheckResourceAttrSet(data.ResourceName, "service_level"), - resource.TestCheckResourceAttrSet(data.ResourceName, "subnet_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "storage_quota_in_gb"), - resource.TestCheckResourceAttrSet(data.ResourceName, "protocols.0"), - resource.TestCheckResourceAttr(data.ResourceName, "mount_ip_addresses.#", "1"), - ), - }, - }, - }) -} - -func testAccDataSourceNetAppVolume_basic(data acceptance.TestData) string { - config := testAccAzureRMNetAppVolume_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_netapp_volume" "test" { - resource_group_name = azurerm_netapp_volume.test.resource_group_name - account_name = azurerm_netapp_volume.test.account_name - pool_name = azurerm_netapp_volume.test.pool_name - name = azurerm_netapp_volume.test.name -} -`, config) -} diff --git a/azurerm/internal/services/netapp/tests/netapp_volume_resource_test.go b/azurerm/internal/services/netapp/tests/netapp_volume_resource_test.go deleted file mode 100644 index 29aabed189ea..000000000000 --- a/azurerm/internal/services/netapp/tests/netapp_volume_resource_test.go +++ /dev/null @@ -1,502 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMNetAppVolume_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocols.2676449260", "NFSv3"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppVolume_nfsv41(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_nfsv41(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "protocols.3098200649", "NFSv4.1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppVolume_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMNetAppVolume_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_netapp_volume"), - }, - }, - }) -} - -func TestAccAzureRMNetAppVolume_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "service_level", "Standard"), - resource.TestCheckResourceAttr(data.ResourceName, "storage_quota_in_gb", "101"), - resource.TestCheckResourceAttr(data.ResourceName, "export_policy_rule.#", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - resource.TestCheckResourceAttr(data.ResourceName, "mount_ip_addresses.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppVolume_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "storage_quota_in_gb", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "export_policy_rule.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppVolume_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "storage_quota_in_gb", "101"), - resource.TestCheckResourceAttr(data.ResourceName, "export_policy_rule.#", "3"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.FoO", "BaR"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppVolume_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "storage_quota_in_gb", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "export_policy_rule.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppVolume_updateSubnet(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - resourceGroupName := fmt.Sprintf("acctestRG-netapp-%d", data.RandomInteger) - oldVNetName := fmt.Sprintf("acctest-VirtualNetwork-%d", data.RandomInteger) - oldSubnetName := fmt.Sprintf("acctest-Subnet-%d", data.RandomInteger) - newVNetName := fmt.Sprintf("acctest-updated-VirtualNetwork-%d", data.RandomInteger) - newSubnetName := fmt.Sprintf("acctest-updated-Subnet-%d", data.RandomInteger) - uriTemplate := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s/subnets/%s" - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - oldSubnetId := fmt.Sprintf(uriTemplate, subscriptionID, resourceGroupName, oldVNetName, oldSubnetName) - newSubnetId := fmt.Sprintf(uriTemplate, subscriptionID, resourceGroupName, newVNetName, newSubnetName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_id", oldSubnetId), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppVolume_updateSubnet(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "subnet_id", newSubnetId), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNetAppVolume_updateExportPolicyRule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_netapp_volume", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNetAppVolumeDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNetAppVolume_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "export_policy_rule.#", "3"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNetAppVolume_updateExportPolicyRule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNetAppVolumeExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "export_policy_rule.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMNetAppVolumeExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.VolumeClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("NetApp Volume not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - poolName := rs.Primary.Attributes["pool_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, accountName, poolName, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: NetApp Volume %q (Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on netapp.VolumeClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMNetAppVolumeDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NetApp.VolumeClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_netapp_volume" { - continue - } - - name := rs.Primary.Attributes["name"] - accountName := rs.Primary.Attributes["account_name"] - poolName := rs.Primary.Attributes["pool_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.Get(ctx, resourceGroup, accountName, poolName, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on netapp.VolumeClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMNetAppVolume_basic(data acceptance.TestData) string { - template := testAccAzureRMNetAppVolume_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_volume" "test" { - name = "acctest-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_path = "my-unique-file-path-%d" - service_level = "Standard" - subnet_id = azurerm_subnet.test.id - storage_quota_in_gb = 100 -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppVolume_nfsv41(data acceptance.TestData) string { - template := testAccAzureRMNetAppVolume_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_volume" "test" { - name = "acctest-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_path = "my-unique-file-path-%d" - service_level = "Standard" - subnet_id = azurerm_subnet.test.id - protocols = ["NFSv4.1"] - storage_quota_in_gb = 100 - - export_policy_rule { - rule_index = 1 - allowed_clients = ["1.2.3.0/24"] - protocols_enabled = ["NFSv4.1"] - unix_read_only = false - unix_read_write = true - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppVolume_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_volume" "import" { - name = azurerm_netapp_volume.test.name - location = azurerm_netapp_volume.test.location - resource_group_name = azurerm_netapp_volume.test.resource_group_name - account_name = azurerm_netapp_volume.test.account_name - pool_name = azurerm_netapp_volume.test.pool_name - volume_path = azurerm_netapp_volume.test.volume_path - service_level = azurerm_netapp_volume.test.service_level - subnet_id = azurerm_netapp_volume.test.subnet_id - storage_quota_in_gb = azurerm_netapp_volume.test.storage_quota_in_gb -} -`, testAccAzureRMNetAppVolume_basic(data)) -} - -func testAccAzureRMNetAppVolume_complete(data acceptance.TestData) string { - template := testAccAzureRMNetAppVolume_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_volume" "test" { - name = "acctest-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - service_level = "Standard" - volume_path = "my-unique-file-path-%d" - subnet_id = azurerm_subnet.test.id - protocols = ["NFSv3"] - storage_quota_in_gb = 101 - - export_policy_rule { - rule_index = 1 - allowed_clients = ["1.2.3.0/24"] - protocols_enabled = ["NFSv3"] - unix_read_only = false - unix_read_write = true - } - - export_policy_rule { - rule_index = 2 - allowed_clients = ["1.2.5.0"] - protocols_enabled = ["NFSv3"] - unix_read_only = true - unix_read_write = false - } - - export_policy_rule { - rule_index = 3 - allowed_clients = ["1.2.6.0/24"] - cifs_enabled = false - nfsv3_enabled = true - nfsv4_enabled = false - unix_read_only = true - unix_read_write = false - } - - tags = { - "FoO" = "BaR" - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppVolume_updateSubnet(data acceptance.TestData) string { - template := testAccAzureRMNetAppVolume_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_network" "updated" { - name = "acctest-updated-VirtualNetwork-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.1.0.0/16"] -} - -resource "azurerm_subnet" "updated" { - name = "acctest-updated-Subnet-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.updated.name - address_prefix = "10.1.3.0/24" - - delegation { - name = "testdelegation2" - - service_delegation { - name = "Microsoft.Netapp/volumes" - actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] - } - } -} - -resource "azurerm_netapp_volume" "test" { - name = "acctest-updated-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - volume_path = "my-updated-unique-file-path-%d" - service_level = "Standard" - subnet_id = azurerm_subnet.updated.id - protocols = ["NFSv3"] - storage_quota_in_gb = 100 -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppVolume_updateExportPolicyRule(data acceptance.TestData) string { - template := testAccAzureRMNetAppVolume_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_netapp_volume" "test" { - name = "acctest-NetAppVolume-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - pool_name = azurerm_netapp_pool.test.name - service_level = "Standard" - volume_path = "my-unique-file-path-%d" - subnet_id = azurerm_subnet.test.id - protocols = ["NFSv3"] - storage_quota_in_gb = 101 - - export_policy_rule { - rule_index = 1 - allowed_clients = ["1.2.4.0/24", "1.3.4.0"] - protocols_enabled = ["NFSv3"] - unix_read_only = false - unix_read_write = true - } - - tags = { - "FoO" = "BaR" - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNetAppVolume_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-netapp-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VirtualNetwork-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.6.0.0/16"] -} - -resource "azurerm_subnet" "test" { - name = "acctest-Subnet-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.6.2.0/24" - - delegation { - name = "testdelegation" - - service_delegation { - name = "Microsoft.Netapp/volumes" - actions = ["Microsoft.Network/networkinterfaces/*", "Microsoft.Network/virtualNetworks/subnets/join/action"] - } - } -} - -resource "azurerm_netapp_account" "test" { - name = "acctest-NetAppAccount-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_netapp_pool" "test" { - name = "acctest-NetAppPool-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_name = azurerm_netapp_account.test.name - service_level = "Standard" - size_in_tb = 4 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/netapp/validate/account_id.go b/azurerm/internal/services/netapp/validate/account_id.go new file mode 100644 index 000000000000..7acfe248b4c0 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/account_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" +) + +func AccountID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AccountID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/netapp/validate/account_id_test.go b/azurerm/internal/services/netapp/validate/account_id_test.go new file mode 100644 index 000000000000..04818b2b4332 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/account_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAccountID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Valid: false, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AccountID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/netapp/validate/capacity_pool_id.go b/azurerm/internal/services/netapp/validate/capacity_pool_id.go new file mode 100644 index 000000000000..50715ef394af --- /dev/null +++ b/azurerm/internal/services/netapp/validate/capacity_pool_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" +) + +func CapacityPoolID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CapacityPoolID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/netapp/validate/capacity_pool_id_test.go b/azurerm/internal/services/netapp/validate/capacity_pool_id_test.go new file mode 100644 index 000000000000..4a79bf964c36 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/capacity_pool_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCapacityPoolID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Valid: false, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1/CAPACITYPOOLS/POOL1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CapacityPoolID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/netapp/validate/snapshot_id.go b/azurerm/internal/services/netapp/validate/snapshot_id.go new file mode 100644 index 000000000000..aa62ff4ff398 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/snapshot_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" +) + +func SnapshotID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SnapshotID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/netapp/validate/snapshot_id_test.go b/azurerm/internal/services/netapp/validate/snapshot_id_test.go new file mode 100644 index 000000000000..7a42e62fece0 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/snapshot_id_test.go @@ -0,0 +1,112 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSnapshotID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Valid: false, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Valid: false, + }, + + { + // missing CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/", + Valid: false, + }, + + { + // missing value for CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", + Valid: false, + }, + + { + // missing VolumeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/", + Valid: false, + }, + + { + // missing value for VolumeName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1/snapshots/snapshot1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1/CAPACITYPOOLS/POOL1/VOLUMES/VOLUME1/SNAPSHOTS/SNAPSHOT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SnapshotID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/netapp/validate/volume_id.go b/azurerm/internal/services/netapp/validate/volume_id.go new file mode 100644 index 000000000000..a0fa484ce056 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/volume_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/netapp/parse" +) + +func VolumeID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VolumeID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/netapp/validate/volume_id_test.go b/azurerm/internal/services/netapp/validate/volume_id_test.go new file mode 100644 index 000000000000..5e799b218ce0 --- /dev/null +++ b/azurerm/internal/services/netapp/validate/volume_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVolumeID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/", + Valid: false, + }, + + { + // missing value for NetAppAccountName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/", + Valid: false, + }, + + { + // missing CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/", + Valid: false, + }, + + { + // missing value for CapacityPoolName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NetApp/netAppAccounts/account1/capacityPools/pool1/volumes/volume1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETAPP/NETAPPACCOUNTS/ACCOUNT1/CAPACITYPOOLS/POOL1/VOLUMES/VOLUME1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VolumeID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/application_gateway_resource.go b/azurerm/internal/services/network/application_gateway_resource.go index c9471651f13a..7d3321b5a754 100644 --- a/azurerm/internal/services/network/application_gateway_resource.go +++ b/azurerm/internal/services/network/application_gateway_resource.go @@ -1201,7 +1201,7 @@ func resourceArmApplicationGateway() *schema.Resource { "file_upload_limit_mb": { Type: schema.TypeInt, Optional: true, - ValidateFunc: validation.IntBetween(1, 500), + ValidateFunc: validation.IntBetween(1, 750), Default: 100, }, "request_body_check": { @@ -1442,6 +1442,13 @@ func resourceArmApplicationGatewayCreateUpdate(d *schema.ResourceData, meta inte gateway.ApplicationGatewayPropertiesFormat.WebApplicationFirewallConfiguration = expandApplicationGatewayWafConfig(d) } + appGWSkuTier := d.Get("sku.0.tier").(string) + wafFileUploadLimit := d.Get("waf_configuration.0.file_upload_limit_mb").(int) + + if appGWSkuTier != string(network.WAFV2) && wafFileUploadLimit > 500 { + return fmt.Errorf("Only SKU `%s` allows `file_upload_limit_mb` to exceed 500MB", network.WAFV2) + } + if v, ok := d.GetOk("firewall_policy_id"); ok { id := v.(string) gateway.ApplicationGatewayPropertiesFormat.FirewallPolicy = &network.SubResource{ diff --git a/azurerm/internal/services/network/client/client.go b/azurerm/internal/services/network/client/client.go index d7012c3406e6..6e53c978b3d3 100644 --- a/azurerm/internal/services/network/client/client.go +++ b/azurerm/internal/services/network/client/client.go @@ -1,58 +1,60 @@ package client import ( - networkLegacy "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) type Client struct { - ApplicationGatewaysClient *network.ApplicationGatewaysClient - ApplicationSecurityGroupsClient *network.ApplicationSecurityGroupsClient - AzureFirewallsClient *network.AzureFirewallsClient - BastionHostsClient *network.BastionHostsClient - ConnectionMonitorsClient *network.ConnectionMonitorsClient - DDOSProtectionPlansClient *network.DdosProtectionPlansClient - ExpressRouteAuthsClient *network.ExpressRouteCircuitAuthorizationsClient - ExpressRouteCircuitsClient *network.ExpressRouteCircuitsClient - ExpressRouteGatewaysClient *network.ExpressRouteGatewaysClient - ExpressRoutePeeringsClient *network.ExpressRouteCircuitPeeringsClient - FirewallPolicyClient *network.FirewallPoliciesClient - HubVirtualNetworkConnectionClient *network.HubVirtualNetworkConnectionsClient - InterfacesClient *network.InterfacesClient - IPGroupsClient *network.IPGroupsClient - LoadBalancersClient *networkLegacy.LoadBalancersClient - LoadBalancerLoadBalancingRulesClient *networkLegacy.LoadBalancerLoadBalancingRulesClient - LocalNetworkGatewaysClient *network.LocalNetworkGatewaysClient - PointToSiteVpnGatewaysClient *network.P2sVpnGatewaysClient - ProfileClient *network.ProfilesClient - PacketCapturesClient *network.PacketCapturesClient - PrivateEndpointClient *network.PrivateEndpointsClient - PublicIPsClient *network.PublicIPAddressesClient - PublicIPPrefixesClient *network.PublicIPPrefixesClient - RoutesClient *network.RoutesClient - RouteFiltersClient *network.RouteFiltersClient - RouteTablesClient *network.RouteTablesClient - SecurityGroupClient *network.SecurityGroupsClient - SecurityRuleClient *network.SecurityRulesClient - ServiceTagsClient *network.ServiceTagsClient - SubnetsClient *network.SubnetsClient - NatGatewayClient *network.NatGatewaysClient - VnetGatewayConnectionsClient *network.VirtualNetworkGatewayConnectionsClient - VnetGatewayClient *network.VirtualNetworkGatewaysClient - VnetClient *network.VirtualNetworksClient - VnetPeeringsClient *network.VirtualNetworkPeeringsClient - VirtualWanClient *network.VirtualWansClient - VirtualHubClient *network.VirtualHubsClient - VpnGatewaysClient *network.VpnGatewaysClient - VpnServerConfigurationsClient *network.VpnServerConfigurationsClient - VpnSitesClient *network.VpnSitesClient - WatcherClient *network.WatchersClient - WebApplicationFirewallPoliciesClient *network.WebApplicationFirewallPoliciesClient - PrivateDnsZoneGroupClient *network.PrivateDNSZoneGroupsClient - PrivateLinkServiceClient *network.PrivateLinkServicesClient - ServiceAssociationLinkClient *network.ServiceAssociationLinksClient - ResourceNavigationLinkClient *network.ResourceNavigationLinksClient + ApplicationGatewaysClient *network.ApplicationGatewaysClient + ApplicationSecurityGroupsClient *network.ApplicationSecurityGroupsClient + BastionHostsClient *network.BastionHostsClient + ConnectionMonitorsClient *network.ConnectionMonitorsClient + DDOSProtectionPlansClient *network.DdosProtectionPlansClient + ExpressRouteAuthsClient *network.ExpressRouteCircuitAuthorizationsClient + ExpressRouteCircuitsClient *network.ExpressRouteCircuitsClient + ExpressRouteGatewaysClient *network.ExpressRouteGatewaysClient + ExpressRoutePeeringsClient *network.ExpressRouteCircuitPeeringsClient + HubRouteTableClient *network.HubRouteTablesClient + HubVirtualNetworkConnectionClient *network.HubVirtualNetworkConnectionsClient + InterfacesClient *network.InterfacesClient + IPGroupsClient *network.IPGroupsClient + LocalNetworkGatewaysClient *network.LocalNetworkGatewaysClient + PointToSiteVpnGatewaysClient *network.P2sVpnGatewaysClient + ProfileClient *network.ProfilesClient + PacketCapturesClient *network.PacketCapturesClient + PrivateEndpointClient *network.PrivateEndpointsClient + PublicIPsClient *network.PublicIPAddressesClient + PublicIPPrefixesClient *network.PublicIPPrefixesClient + RoutesClient *network.RoutesClient + RouteFiltersClient *network.RouteFiltersClient + RouteTablesClient *network.RouteTablesClient + SecurityGroupClient *network.SecurityGroupsClient + SecurityPartnerProviderClient *network.SecurityPartnerProvidersClient + SecurityRuleClient *network.SecurityRulesClient + ServiceEndpointPoliciesClient *network.ServiceEndpointPoliciesClient + ServiceEndpointPolicyDefinitionsClient *network.ServiceEndpointPolicyDefinitionsClient + ServiceTagsClient *network.ServiceTagsClient + SubnetsClient *network.SubnetsClient + NatGatewayClient *network.NatGatewaysClient + VirtualHubBgpConnectionClient *network.VirtualHubBgpConnectionClient + VirtualHubIPClient *network.VirtualHubIPConfigurationClient + VnetGatewayConnectionsClient *network.VirtualNetworkGatewayConnectionsClient + VnetGatewayClient *network.VirtualNetworkGatewaysClient + VnetClient *network.VirtualNetworksClient + VnetPeeringsClient *network.VirtualNetworkPeeringsClient + VirtualWanClient *network.VirtualWansClient + VirtualHubClient *network.VirtualHubsClient + VpnConnectionsClient *network.VpnConnectionsClient + VpnGatewaysClient *network.VpnGatewaysClient + VpnServerConfigurationsClient *network.VpnServerConfigurationsClient + VpnSitesClient *network.VpnSitesClient + WatcherClient *network.WatchersClient + WebApplicationFirewallPoliciesClient *network.WebApplicationFirewallPoliciesClient + PrivateDnsZoneGroupClient *network.PrivateDNSZoneGroupsClient + PrivateLinkServiceClient *network.PrivateLinkServicesClient + ServiceAssociationLinkClient *network.ServiceAssociationLinksClient + ResourceNavigationLinkClient *network.ResourceNavigationLinksClient } func NewClient(o *common.ClientOptions) *Client { @@ -62,9 +64,6 @@ func NewClient(o *common.ClientOptions) *Client { ApplicationSecurityGroupsClient := network.NewApplicationSecurityGroupsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&ApplicationSecurityGroupsClient.Client, o.ResourceManagerAuthorizer) - AzureFirewallsClient := network.NewAzureFirewallsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) - o.ConfigureClient(&AzureFirewallsClient.Client, o.ResourceManagerAuthorizer) - BastionHostsClient := network.NewBastionHostsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&BastionHostsClient.Client, o.ResourceManagerAuthorizer) @@ -86,8 +85,8 @@ func NewClient(o *common.ClientOptions) *Client { ExpressRoutePeeringsClient := network.NewExpressRouteCircuitPeeringsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&ExpressRoutePeeringsClient.Client, o.ResourceManagerAuthorizer) - FirewallPolicyClient := network.NewFirewallPoliciesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) - o.ConfigureClient(&FirewallPolicyClient.Client, o.ResourceManagerAuthorizer) + HubRouteTableClient := network.NewHubRouteTablesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&HubRouteTableClient.Client, o.ResourceManagerAuthorizer) HubVirtualNetworkConnectionClient := network.NewHubVirtualNetworkConnectionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&HubVirtualNetworkConnectionClient.Client, o.ResourceManagerAuthorizer) @@ -98,12 +97,6 @@ func NewClient(o *common.ClientOptions) *Client { IpGroupsClient := network.NewIPGroupsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&IpGroupsClient.Client, o.ResourceManagerAuthorizer) - LoadBalancersClient := networkLegacy.NewLoadBalancersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) - o.ConfigureClient(&LoadBalancersClient.Client, o.ResourceManagerAuthorizer) - - LoadBalancerLoadBalancingRulesClient := networkLegacy.NewLoadBalancerLoadBalancingRulesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) - o.ConfigureClient(&LoadBalancerLoadBalancingRulesClient.Client, o.ResourceManagerAuthorizer) - LocalNetworkGatewaysClient := network.NewLocalNetworkGatewaysClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&LocalNetworkGatewaysClient.Client, o.ResourceManagerAuthorizer) @@ -152,15 +145,30 @@ func NewClient(o *common.ClientOptions) *Client { SecurityGroupClient := network.NewSecurityGroupsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&SecurityGroupClient.Client, o.ResourceManagerAuthorizer) + SecurityPartnerProviderClient := network.NewSecurityPartnerProvidersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&SecurityPartnerProviderClient.Client, o.ResourceManagerAuthorizer) + SecurityRuleClient := network.NewSecurityRulesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&SecurityRuleClient.Client, o.ResourceManagerAuthorizer) + ServiceEndpointPoliciesClient := network.NewServiceEndpointPoliciesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&ServiceEndpointPoliciesClient.Client, o.ResourceManagerAuthorizer) + + ServiceEndpointPolicyDefinitionsClient := network.NewServiceEndpointPolicyDefinitionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&ServiceEndpointPolicyDefinitionsClient.Client, o.ResourceManagerAuthorizer) + ServiceTagsClient := network.NewServiceTagsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&ServiceTagsClient.Client, o.ResourceManagerAuthorizer) SubnetsClient := network.NewSubnetsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&SubnetsClient.Client, o.ResourceManagerAuthorizer) + VirtualHubBgpConnectionClient := network.NewVirtualHubBgpConnectionClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&VirtualHubBgpConnectionClient.Client, o.ResourceManagerAuthorizer) + + VirtualHubIPClient := network.NewVirtualHubIPConfigurationClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&VirtualHubIPClient.Client, o.ResourceManagerAuthorizer) + VnetGatewayClient := network.NewVirtualNetworkGatewaysClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&VnetGatewayClient.Client, o.ResourceManagerAuthorizer) @@ -179,6 +187,9 @@ func NewClient(o *common.ClientOptions) *Client { vpnGatewaysClient := network.NewVpnGatewaysClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&vpnGatewaysClient.Client, o.ResourceManagerAuthorizer) + vpnConnectionsClient := network.NewVpnConnectionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&vpnConnectionsClient.Client, o.ResourceManagerAuthorizer) + vpnSitesClient := network.NewVpnSitesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&vpnSitesClient.Client, o.ResourceManagerAuthorizer) @@ -195,51 +206,54 @@ func NewClient(o *common.ClientOptions) *Client { o.ConfigureClient(&ResourceNavigationLinkClient.Client, o.ResourceManagerAuthorizer) return &Client{ - ApplicationGatewaysClient: &ApplicationGatewaysClient, - ApplicationSecurityGroupsClient: &ApplicationSecurityGroupsClient, - AzureFirewallsClient: &AzureFirewallsClient, - BastionHostsClient: &BastionHostsClient, - ConnectionMonitorsClient: &ConnectionMonitorsClient, - DDOSProtectionPlansClient: &DDOSProtectionPlansClient, - ExpressRouteAuthsClient: &ExpressRouteAuthsClient, - ExpressRouteCircuitsClient: &ExpressRouteCircuitsClient, - ExpressRouteGatewaysClient: &ExpressRouteGatewaysClient, - ExpressRoutePeeringsClient: &ExpressRoutePeeringsClient, - FirewallPolicyClient: &FirewallPolicyClient, - HubVirtualNetworkConnectionClient: &HubVirtualNetworkConnectionClient, - InterfacesClient: &InterfacesClient, - IPGroupsClient: &IpGroupsClient, - LoadBalancersClient: &LoadBalancersClient, - LoadBalancerLoadBalancingRulesClient: &LoadBalancerLoadBalancingRulesClient, - LocalNetworkGatewaysClient: &LocalNetworkGatewaysClient, - PointToSiteVpnGatewaysClient: &pointToSiteVpnGatewaysClient, - ProfileClient: &ProfileClient, - PacketCapturesClient: &PacketCapturesClient, - PrivateEndpointClient: &PrivateEndpointClient, - PublicIPsClient: &PublicIPsClient, - PublicIPPrefixesClient: &PublicIPPrefixesClient, - RoutesClient: &RoutesClient, - RouteFiltersClient: &RouteFiltersClient, - RouteTablesClient: &RouteTablesClient, - SecurityGroupClient: &SecurityGroupClient, - SecurityRuleClient: &SecurityRuleClient, - ServiceTagsClient: &ServiceTagsClient, - SubnetsClient: &SubnetsClient, - NatGatewayClient: &NatGatewayClient, - VnetGatewayConnectionsClient: &VnetGatewayConnectionsClient, - VnetGatewayClient: &VnetGatewayClient, - VnetClient: &VnetClient, - VnetPeeringsClient: &VnetPeeringsClient, - VirtualWanClient: &VirtualWanClient, - VirtualHubClient: &VirtualHubClient, - VpnGatewaysClient: &vpnGatewaysClient, - VpnServerConfigurationsClient: &vpnServerConfigurationsClient, - VpnSitesClient: &vpnSitesClient, - WatcherClient: &WatcherClient, - WebApplicationFirewallPoliciesClient: &WebApplicationFirewallPoliciesClient, - PrivateDnsZoneGroupClient: &PrivateDnsZoneGroupClient, - PrivateLinkServiceClient: &PrivateLinkServiceClient, - ServiceAssociationLinkClient: &ServiceAssociationLinkClient, - ResourceNavigationLinkClient: &ResourceNavigationLinkClient, + ApplicationGatewaysClient: &ApplicationGatewaysClient, + ApplicationSecurityGroupsClient: &ApplicationSecurityGroupsClient, + BastionHostsClient: &BastionHostsClient, + ConnectionMonitorsClient: &ConnectionMonitorsClient, + DDOSProtectionPlansClient: &DDOSProtectionPlansClient, + ExpressRouteAuthsClient: &ExpressRouteAuthsClient, + ExpressRouteCircuitsClient: &ExpressRouteCircuitsClient, + ExpressRouteGatewaysClient: &ExpressRouteGatewaysClient, + ExpressRoutePeeringsClient: &ExpressRoutePeeringsClient, + HubRouteTableClient: &HubRouteTableClient, + HubVirtualNetworkConnectionClient: &HubVirtualNetworkConnectionClient, + InterfacesClient: &InterfacesClient, + IPGroupsClient: &IpGroupsClient, + LocalNetworkGatewaysClient: &LocalNetworkGatewaysClient, + PointToSiteVpnGatewaysClient: &pointToSiteVpnGatewaysClient, + ProfileClient: &ProfileClient, + PacketCapturesClient: &PacketCapturesClient, + PrivateEndpointClient: &PrivateEndpointClient, + PublicIPsClient: &PublicIPsClient, + PublicIPPrefixesClient: &PublicIPPrefixesClient, + RoutesClient: &RoutesClient, + RouteFiltersClient: &RouteFiltersClient, + RouteTablesClient: &RouteTablesClient, + SecurityGroupClient: &SecurityGroupClient, + SecurityPartnerProviderClient: &SecurityPartnerProviderClient, + SecurityRuleClient: &SecurityRuleClient, + ServiceEndpointPoliciesClient: &ServiceEndpointPoliciesClient, + ServiceEndpointPolicyDefinitionsClient: &ServiceEndpointPolicyDefinitionsClient, + ServiceTagsClient: &ServiceTagsClient, + SubnetsClient: &SubnetsClient, + NatGatewayClient: &NatGatewayClient, + VirtualHubBgpConnectionClient: &VirtualHubBgpConnectionClient, + VirtualHubIPClient: &VirtualHubIPClient, + VnetGatewayConnectionsClient: &VnetGatewayConnectionsClient, + VnetGatewayClient: &VnetGatewayClient, + VnetClient: &VnetClient, + VnetPeeringsClient: &VnetPeeringsClient, + VirtualWanClient: &VirtualWanClient, + VirtualHubClient: &VirtualHubClient, + VpnConnectionsClient: &vpnConnectionsClient, + VpnGatewaysClient: &vpnGatewaysClient, + VpnServerConfigurationsClient: &vpnServerConfigurationsClient, + VpnSitesClient: &vpnSitesClient, + WatcherClient: &WatcherClient, + WebApplicationFirewallPoliciesClient: &WebApplicationFirewallPoliciesClient, + PrivateDnsZoneGroupClient: &PrivateDnsZoneGroupClient, + PrivateLinkServiceClient: &PrivateLinkServiceClient, + ServiceAssociationLinkClient: &ServiceAssociationLinkClient, + ResourceNavigationLinkClient: &ResourceNavigationLinkClient, } } diff --git a/azurerm/internal/services/network/express_route_circuit_peering_resource.go b/azurerm/internal/services/network/express_route_circuit_peering_resource.go index f35e349a6f15..2c74893cabe2 100644 --- a/azurerm/internal/services/network/express_route_circuit_peering_resource.go +++ b/azurerm/internal/services/network/express_route_circuit_peering_resource.go @@ -14,6 +14,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) @@ -94,11 +95,13 @@ func resourceArmExpressRouteCircuitPeering() *schema.Resource { Required: true, Elem: &schema.Schema{Type: schema.TypeString}, }, + "customer_asn": { Type: schema.TypeInt, Optional: true, Default: 0, }, + "routing_registry_name": { Type: schema.TypeString, Optional: true, @@ -108,6 +111,63 @@ func resourceArmExpressRouteCircuitPeering() *schema.Resource { }, }, + "ipv6": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "microsoft_peering": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "advertised_public_prefixes": { + Type: schema.TypeList, + MinItems: 1, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.IsCIDR, + }, + }, + + "customer_asn": { + Type: schema.TypeInt, + Optional: true, + Default: 0, + }, + + "routing_registry_name": { + Type: schema.TypeString, + Optional: true, + Default: "NONE", + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "primary_peer_address_prefix": { + Type: schema.TypeString, + Required: true, + }, + + "secondary_peer_address_prefix": { + Type: schema.TypeString, + Required: true, + }, + + "route_filter_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + }, + }, + }, + }, + "azure_asn": { Type: schema.TypeInt, Computed: true, @@ -193,8 +253,22 @@ func resourceArmExpressRouteCircuitPeeringCreateUpdate(d *schema.ResourceData, m ID: utils.String(route_filter_id), } } - } else if route_filter_id != "" { - return fmt.Errorf("`route_filter_id` may only be specified when `peering_type` is set to `MicrosoftPeering`") + + ipv6Peering := d.Get("ipv6").([]interface{}) + ipv6PeeringConfig, err := expandExpressRouteCircuitIpv6PeeringConfig(ipv6Peering) + if err != nil { + return err + } + parameters.ExpressRouteCircuitPeeringPropertiesFormat.Ipv6PeeringConfig = ipv6PeeringConfig + } else { + if route_filter_id != "" { + return fmt.Errorf("`route_filter_id` may only be specified when `peering_type` is set to `MicrosoftPeering`") + } + + ipv6Peering := d.Get("ipv6").([]interface{}) + if len(ipv6Peering) != 0 { + return fmt.Errorf("`ipv6` may only be specified when `peering_type` is set to `MicrosoftPeering`") + } } future, err := client.CreateOrUpdate(ctx, resourceGroup, circuitName, peeringType, parameters) @@ -259,7 +333,10 @@ func resourceArmExpressRouteCircuitPeeringRead(d *schema.ResourceData, meta inte config := flattenExpressRouteCircuitPeeringMicrosoftConfig(props.MicrosoftPeeringConfig) if err := d.Set("microsoft_peering_config", config); err != nil { - return fmt.Errorf("Error setting `microsoft_peering_config`: %+v", err) + return fmt.Errorf("setting `microsoft_peering_config`: %+v", err) + } + if err := d.Set("ipv6", flattenExpressRouteCircuitIpv6PeeringConfig(props.Ipv6PeeringConfig)); err != nil { + return fmt.Errorf("setting `ipv6`: %+v", err) } } @@ -302,6 +379,9 @@ func resourceArmExpressRouteCircuitPeeringDelete(d *schema.ResourceData, meta in } func expandExpressRouteCircuitPeeringMicrosoftConfig(input []interface{}) *network.ExpressRouteCircuitPeeringConfig { + if len(input) == 0 { + return nil + } peering := input[0].(map[string]interface{}) prefixes := make([]string, 0) @@ -320,6 +400,29 @@ func expandExpressRouteCircuitPeeringMicrosoftConfig(input []interface{}) *netwo } } +func expandExpressRouteCircuitIpv6PeeringConfig(input []interface{}) (*network.Ipv6ExpressRouteCircuitPeeringConfig, error) { + if len(input) == 0 { + return nil, nil + } + + v := input[0].(map[string]interface{}) + peeringConfig := network.Ipv6ExpressRouteCircuitPeeringConfig{ + PrimaryPeerAddressPrefix: utils.String(v["primary_peer_address_prefix"].(string)), + SecondaryPeerAddressPrefix: utils.String(v["secondary_peer_address_prefix"].(string)), + MicrosoftPeeringConfig: expandExpressRouteCircuitPeeringMicrosoftConfig(v["microsoft_peering"].([]interface{})), + } + routeFilterId := v["route_filter_id"].(string) + if routeFilterId != "" { + if _, err := parse.RouteFilterID(routeFilterId); err != nil { + return nil, err + } + peeringConfig.RouteFilter = &network.SubResource{ + ID: utils.String(routeFilterId), + } + } + return &peeringConfig, nil +} + func flattenExpressRouteCircuitPeeringMicrosoftConfig(input *network.ExpressRouteCircuitPeeringConfig) interface{} { if input == nil { return []interface{}{} @@ -340,3 +443,30 @@ func flattenExpressRouteCircuitPeeringMicrosoftConfig(input *network.ExpressRout return []interface{}{config} } + +func flattenExpressRouteCircuitIpv6PeeringConfig(input *network.Ipv6ExpressRouteCircuitPeeringConfig) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + var primaryPeerAddressPrefix string + if input.PrimaryPeerAddressPrefix != nil { + primaryPeerAddressPrefix = *input.PrimaryPeerAddressPrefix + } + var secondaryPeerAddressPrefix string + if input.SecondaryPeerAddressPrefix != nil { + secondaryPeerAddressPrefix = *input.SecondaryPeerAddressPrefix + } + routeFilterId := "" + if input.RouteFilter != nil && input.RouteFilter.ID != nil { + routeFilterId = *input.RouteFilter.ID + } + return []interface{}{ + map[string]interface{}{ + "microsoft_peering": flattenExpressRouteCircuitPeeringMicrosoftConfig(input.MicrosoftPeeringConfig), + "primary_peer_address_prefix": primaryPeerAddressPrefix, + "secondary_peer_address_prefix": secondaryPeerAddressPrefix, + "route_filter_id": routeFilterId, + }, + } +} diff --git a/azurerm/internal/services/network/firewall_data_source.go b/azurerm/internal/services/network/firewall_data_source.go deleted file mode 100644 index 660ed9ea6eb6..000000000000 --- a/azurerm/internal/services/network/firewall_data_source.go +++ /dev/null @@ -1,100 +0,0 @@ -package network - -import ( - "fmt" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmFirewall() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmFirewallRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: ValidateAzureFirewallName, - }, - - "location": azure.SchemaLocationForDataSource(), - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "ip_configuration": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - "subnet_id": { - Type: schema.TypeString, - Computed: true, - }, - "internal_public_ip_address_id": { - Type: schema.TypeString, - Computed: true, - }, - "public_ip_address_id": { - Type: schema.TypeString, - Computed: true, - }, - "private_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "tags": tags.SchemaDataSource(), - }, - } -} - -func dataSourceArmFirewallRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(read.Response) { - return fmt.Errorf("Firewall %q was not found in Resource Group %q", name, resourceGroup) - } - - return fmt.Errorf("Error making Read request on Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - d.SetId(*read.ID) - d.Set("name", read.Name) - d.Set("resource_group_name", resourceGroup) - - if location := read.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if props := read.AzureFirewallPropertiesFormat; props != nil { - if err := d.Set("ip_configuration", flattenArmFirewallIPConfigurations(props.IPConfigurations)); err != nil { - return fmt.Errorf("Error setting `ip_configuration`: %+v", err) - } - } - - return tags.FlattenAndSet(d, read.Tags) -} diff --git a/azurerm/internal/services/network/firewall_resource.go b/azurerm/internal/services/network/firewall_resource.go deleted file mode 100644 index 071afc794c4a..000000000000 --- a/azurerm/internal/services/network/firewall_resource.go +++ /dev/null @@ -1,526 +0,0 @@ -package network - -import ( - "fmt" - "log" - "regexp" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -var azureFirewallResourceName = "azurerm_firewall" - -func resourceArmFirewall() *schema.Resource { - return &schema.Resource{ - Create: resourceArmFirewallCreateUpdate, - Read: resourceArmFirewallRead, - Update: resourceArmFirewallCreateUpdate, - Delete: resourceArmFirewallDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(90 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(90 * time.Minute), - Delete: schema.DefaultTimeout(90 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: ValidateAzureFirewallName, - }, - - "location": azure.SchemaLocation(), - - "resource_group_name": azure.SchemaResourceGroupName(), - - "ip_configuration": { - Type: schema.TypeList, - Required: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "subnet_id": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validateAzureFirewallSubnetName, - }, - "public_ip_address_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.PublicIPAddressID, - }, - "private_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "management_ip_configuration": { - Type: schema.TypeList, - Optional: true, - ForceNew: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "subnet_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateAzureFirewallManagementSubnetName, - }, - "public_ip_address_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.PublicIPAddressID, - }, - "private_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "threat_intel_mode": { - Type: schema.TypeString, - Optional: true, - Default: string(network.AzureFirewallThreatIntelModeAlert), - ValidateFunc: validation.StringInSlice([]string{ - string(network.AzureFirewallThreatIntelModeOff), - string(network.AzureFirewallThreatIntelModeAlert), - string(network.AzureFirewallThreatIntelModeDeny), - }, false), - }, - - "zones": azure.SchemaZones(), - - "tags": tags.Schema(), - }, - } -} - -func resourceArmFirewallCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for AzureRM Azure Firewall creation") - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Firewall %q (Resource Group %q): %s", name, resourceGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_firewall", *existing.ID) - } - } - - if err := validateFirewallConfigurationSettings(d); err != nil { - return fmt.Errorf("Error validating Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - location := azure.NormalizeLocation(d.Get("location").(string)) - t := d.Get("tags").(map[string]interface{}) - i := d.Get("ip_configuration").([]interface{}) - ipConfigs, subnetToLock, vnetToLock, err := expandArmFirewallIPConfigurations(i) - if err != nil { - return fmt.Errorf("Error building list of Azure Firewall IP Configurations: %+v", err) - } - zones := azure.ExpandZones(d.Get("zones").([]interface{})) - - parameters := network.AzureFirewall{ - Location: &location, - Tags: tags.Expand(t), - AzureFirewallPropertiesFormat: &network.AzureFirewallPropertiesFormat{ - IPConfigurations: ipConfigs, - ThreatIntelMode: network.AzureFirewallThreatIntelMode(d.Get("threat_intel_mode").(string)), - }, - Zones: zones, - } - m := d.Get("management_ip_configuration").([]interface{}) - if len(m) == 1 { - mgmtIPConfig, mgmtSubnetName, mgmtVirtualNetworkName, err := expandArmFirewallIPConfigurations(m) - if err != nil { - return fmt.Errorf("Error parsing Azure Firewall Management IP Configurations: %+v", err) - } - - if !utils.SliceContainsValue(*subnetToLock, (*mgmtSubnetName)[0]) { - *subnetToLock = append(*subnetToLock, (*mgmtSubnetName)[0]) - } - - if !utils.SliceContainsValue(*vnetToLock, (*mgmtVirtualNetworkName)[0]) { - *vnetToLock = append(*vnetToLock, (*mgmtVirtualNetworkName)[0]) - } - if *mgmtIPConfig != nil { - parameters.ManagementIPConfiguration = &(*mgmtIPConfig)[0] - } - } - - locks.ByName(name, azureFirewallResourceName) - defer locks.UnlockByName(name, azureFirewallResourceName) - - locks.MultipleByName(vnetToLock, VirtualNetworkResourceName) - defer locks.UnlockMultipleByName(vnetToLock, VirtualNetworkResourceName) - - locks.MultipleByName(subnetToLock, SubnetResourceName) - defer locks.UnlockMultipleByName(subnetToLock, SubnetResourceName) - - if !d.IsNewResource() { - exists, err2 := client.Get(ctx, resourceGroup, name) - if err2 != nil { - if utils.ResponseWasNotFound(exists.Response) { - return fmt.Errorf("Error retrieving existing Firewall %q (Resource Group %q): firewall not found in resource group", name, resourceGroup) - } - return fmt.Errorf("Error retrieving existing Firewall %q (Resource Group %q): %s", name, resourceGroup, err2) - } - if exists.AzureFirewallPropertiesFormat == nil { - return fmt.Errorf("Error retrieving existing rules (Firewall %q / Resource Group %q): `props` was nil", name, resourceGroup) - } - props := *exists.AzureFirewallPropertiesFormat - parameters.AzureFirewallPropertiesFormat.ApplicationRuleCollections = props.ApplicationRuleCollections - parameters.AzureFirewallPropertiesFormat.NetworkRuleCollections = props.NetworkRuleCollections - parameters.AzureFirewallPropertiesFormat.NatRuleCollections = props.NatRuleCollections - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) - if err != nil { - return fmt.Errorf("Error creating/updating Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for creation/update of Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error retrieving Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if read.ID == nil { - return fmt.Errorf("Cannot read Azure Firewall %q (Resource Group %q) ID", name, resourceGroup) - } - - d.SetId(*read.ID) - - return resourceArmFirewallRead(d, meta) -} - -func resourceArmFirewallRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - name := id.Path["azureFirewalls"] - - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(read.Response) { - log.Printf("[DEBUG] Firewall %q was not found in Resource Group %q - removing from state!", name, resourceGroup) - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - d.Set("name", read.Name) - d.Set("resource_group_name", resourceGroup) - if location := read.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if props := read.AzureFirewallPropertiesFormat; props != nil { - if err := d.Set("ip_configuration", flattenArmFirewallIPConfigurations(props.IPConfigurations)); err != nil { - return fmt.Errorf("Error setting `ip_configuration`: %+v", err) - } - managementIPConfigs := make([]interface{}, 0) - if props.ManagementIPConfiguration != nil { - managementIPConfigs = flattenArmFirewallIPConfigurations(&[]network.AzureFirewallIPConfiguration{ - *props.ManagementIPConfiguration, - }) - } - if err := d.Set("management_ip_configuration", managementIPConfigs); err != nil { - return fmt.Errorf("Error setting `management_ip_configuration`: %+v", err) - } - d.Set("threat_intel_mode", string(props.ThreatIntelMode)) - } - - if err := d.Set("zones", azure.FlattenZones(read.Zones)); err != nil { - return fmt.Errorf("Error setting `zones`: %+v", err) - } - - return tags.FlattenAndSet(d, read.Tags) -} - -func resourceArmFirewallDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.AzureFirewallsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - name := id.Path["azureFirewalls"] - - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(read.Response) { - // deleted outside of TF - log.Printf("[DEBUG] Firewall %q was not found in Resource Group %q - assuming removed!", name, resourceGroup) - return nil - } - - return fmt.Errorf("Error retrieving Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - subnetNamesToLock := make([]string, 0) - virtualNetworkNamesToLock := make([]string, 0) - if props := read.AzureFirewallPropertiesFormat; props != nil { - if configs := props.IPConfigurations; configs != nil { - for _, config := range *configs { - if config.Subnet == nil || config.Subnet.ID == nil { - continue - } - - parsedSubnetID, err2 := azure.ParseAzureResourceID(*config.Subnet.ID) - if err2 != nil { - return err2 - } - subnetName := parsedSubnetID.Path["subnets"] - - if !utils.SliceContainsValue(subnetNamesToLock, subnetName) { - subnetNamesToLock = append(subnetNamesToLock, subnetName) - } - - virtualNetworkName := parsedSubnetID.Path["virtualNetworks"] - if !utils.SliceContainsValue(virtualNetworkNamesToLock, virtualNetworkName) { - virtualNetworkNamesToLock = append(virtualNetworkNamesToLock, virtualNetworkName) - } - } - } - - if mconfig := props.ManagementIPConfiguration; mconfig != nil { - if mconfig.Subnet != nil && mconfig.Subnet.ID != nil { - parsedSubnetID, err2 := azure.ParseAzureResourceID(*mconfig.Subnet.ID) - if err2 != nil { - return err2 - } - subnetName := parsedSubnetID.Path["subnets"] - - if !utils.SliceContainsValue(subnetNamesToLock, subnetName) { - subnetNamesToLock = append(subnetNamesToLock, subnetName) - } - - virtualNetworkName := parsedSubnetID.Path["virtualNetworks"] - if !utils.SliceContainsValue(virtualNetworkNamesToLock, virtualNetworkName) { - virtualNetworkNamesToLock = append(virtualNetworkNamesToLock, virtualNetworkName) - } - } - } - } - - locks.ByName(name, azureFirewallResourceName) - defer locks.UnlockByName(name, azureFirewallResourceName) - - locks.MultipleByName(&virtualNetworkNamesToLock, VirtualNetworkResourceName) - defer locks.UnlockMultipleByName(&virtualNetworkNamesToLock, VirtualNetworkResourceName) - - locks.MultipleByName(&subnetNamesToLock, SubnetResourceName) - defer locks.UnlockMultipleByName(&subnetNamesToLock, SubnetResourceName) - - future, err := client.Delete(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error deleting Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the deletion of Azure Firewall %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - return err -} - -func expandArmFirewallIPConfigurations(configs []interface{}) (*[]network.AzureFirewallIPConfiguration, *[]string, *[]string, error) { - ipConfigs := make([]network.AzureFirewallIPConfiguration, 0) - subnetNamesToLock := make([]string, 0) - virtualNetworkNamesToLock := make([]string, 0) - - for _, configRaw := range configs { - data := configRaw.(map[string]interface{}) - name := data["name"].(string) - subnetId := data["subnet_id"].(string) - pubID := data["public_ip_address_id"].(string) - - ipConfig := network.AzureFirewallIPConfiguration{ - Name: utils.String(name), - AzureFirewallIPConfigurationPropertiesFormat: &network.AzureFirewallIPConfigurationPropertiesFormat{ - PublicIPAddress: &network.SubResource{ - ID: utils.String(pubID), - }, - }, - } - - if subnetId != "" { - subnetID, err := azure.ParseAzureResourceID(subnetId) - if err != nil { - return nil, nil, nil, err - } - - subnetName := subnetID.Path["subnets"] - virtualNetworkName := subnetID.Path["virtualNetworks"] - - if !utils.SliceContainsValue(subnetNamesToLock, subnetName) { - subnetNamesToLock = append(subnetNamesToLock, subnetName) - } - - if !utils.SliceContainsValue(virtualNetworkNamesToLock, virtualNetworkName) { - virtualNetworkNamesToLock = append(virtualNetworkNamesToLock, virtualNetworkName) - } - - ipConfig.AzureFirewallIPConfigurationPropertiesFormat.Subnet = &network.SubResource{ - ID: utils.String(subnetId), - } - } - ipConfigs = append(ipConfigs, ipConfig) - } - return &ipConfigs, &subnetNamesToLock, &virtualNetworkNamesToLock, nil -} - -func flattenArmFirewallIPConfigurations(input *[]network.AzureFirewallIPConfiguration) []interface{} { - result := make([]interface{}, 0) - if input == nil { - return result - } - - for _, v := range *input { - afIPConfig := make(map[string]interface{}) - props := v.AzureFirewallIPConfigurationPropertiesFormat - if props == nil { - continue - } - - if name := v.Name; name != nil { - afIPConfig["name"] = *name - } - - if subnet := props.Subnet; subnet != nil { - if id := subnet.ID; id != nil { - afIPConfig["subnet_id"] = *id - } - } - - if ipAddress := props.PrivateIPAddress; ipAddress != nil { - afIPConfig["private_ip_address"] = *ipAddress - } - - if pip := props.PublicIPAddress; pip != nil { - if id := pip.ID; id != nil { - afIPConfig["public_ip_address_id"] = *id - } - } - result = append(result, afIPConfig) - } - - return result -} - -func ValidateAzureFirewallName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - // From the Portal: - // The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens. - if matched := regexp.MustCompile(`^[0-9a-zA-Z]([0-9a-zA-Z._-]{0,}[0-9a-zA-Z_])?$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("%q must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens.", k)) - } - - return warnings, errors -} - -func validateAzureFirewallSubnetName(v interface{}, k string) (warnings []string, errors []error) { - parsed, err := azure.ParseAzureResourceID(v.(string)) - if err != nil { - errors = append(errors, fmt.Errorf("Error parsing Azure Resource ID %q", v.(string))) - return warnings, errors - } - subnetName := parsed.Path["subnets"] - if subnetName != "AzureFirewallSubnet" { - errors = append(errors, fmt.Errorf("The name of the Subnet for %q must be exactly 'AzureFirewallSubnet' to be used for the Azure Firewall resource", k)) - } - - return warnings, errors -} - -func validateAzureFirewallManagementSubnetName(v interface{}, k string) (warnings []string, errors []error) { - parsed, err := azure.ParseAzureResourceID(v.(string)) - if err != nil { - errors = append(errors, fmt.Errorf("Error parsing Azure Resource ID %q", v.(string))) - return warnings, errors - } - subnetName := parsed.Path["subnets"] - if subnetName != "AzureFirewallManagementSubnet" { - errors = append(errors, fmt.Errorf("The name of the management subnet for %q must be exactly 'AzureFirewallManagementSubnet' to be used for the Azure Firewall resource", k)) - } - - return warnings, errors -} - -func validateFirewallConfigurationSettings(d *schema.ResourceData) error { - configs := d.Get("ip_configuration").([]interface{}) - subnetNumber := 0 - - for _, configRaw := range configs { - data := configRaw.(map[string]interface{}) - if subnet, exist := data["subnet_id"].(string); exist && subnet != "" { - subnetNumber++ - } - } - - if subnetNumber != 1 { - return fmt.Errorf(`The "ip_configuration" is invalid, %d "subnet_id" have been set, one "subnet_id" should be set among all "ip_configuration" blocks`, subnetNumber) - } - - return nil -} diff --git a/azurerm/internal/services/network/ip_group.go b/azurerm/internal/services/network/ip_group.go deleted file mode 100644 index 70019da79f46..000000000000 --- a/azurerm/internal/services/network/ip_group.go +++ /dev/null @@ -1,33 +0,0 @@ -package network - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type IpGroupResourceID struct { - ResourceGroup string - Name string -} - -func ParseIpGroupID(input string) (*IpGroupResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse IP Group ID %q: %+v", input, err) - } - - ipGroup := IpGroupResourceID{ - ResourceGroup: id.ResourceGroup, - } - - if ipGroup.Name, err = id.PopSegment("ipGroups"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &ipGroup, nil -} diff --git a/azurerm/internal/services/network/ip_group_resource.go b/azurerm/internal/services/network/ip_group_resource.go index 15bbf478110e..6e999c556f73 100644 --- a/azurerm/internal/services/network/ip_group_resource.go +++ b/azurerm/internal/services/network/ip_group_resource.go @@ -7,6 +7,7 @@ import ( "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" @@ -25,7 +26,7 @@ func resourceArmIpGroup() *schema.Resource { Delete: resourceArmIpGroupDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := ParseIpGroupID(id) + _, err := parse.IpGroupID(id) return err }), @@ -123,7 +124,7 @@ func resourceArmIpGroupRead(d *schema.ResourceData, meta interface{}) error { ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := ParseIpGroupID(d.Id()) + id, err := parse.IpGroupID(d.Id()) if err != nil { return err } @@ -160,7 +161,7 @@ func resourceArmIpGroupDelete(d *schema.ResourceData, meta interface{}) error { ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := ParseIpGroupID(d.Id()) + id, err := parse.IpGroupID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/network/lb_backend_address_pool_data_source.go b/azurerm/internal/services/network/lb_backend_address_pool_data_source.go deleted file mode 100644 index dea705420ef1..000000000000 --- a/azurerm/internal/services/network/lb_backend_address_pool_data_source.go +++ /dev/null @@ -1,93 +0,0 @@ -package network - -import ( - "fmt" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" -) - -func dataSourceArmLoadBalancerBackendAddressPool() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmLoadBalancerBackendAddressPoolRead, - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.NoZeroValues, - }, - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validate.LoadBalancerID, - }, - - "backend_ip_configurations": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "id": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - }, - } -} - -func dataSourceArmLoadBalancerBackendAddressPoolRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return err - } - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("retrieving Load Balancer by ID: %+v", err) - } - if !exists { - return fmt.Errorf("Load Balancer %q (Resource Group %q) was not found", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - bap, _, exists := FindLoadBalancerBackEndAddressPoolByName(loadBalancer, name) - if !exists { - return fmt.Errorf("Backend Address Pool %q was not found in Load Balancer %q (Resource Group %q)", name, loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - d.SetId(*bap.ID) - - backendIPConfigurations := make([]interface{}, 0) - if props := bap.BackendAddressPoolPropertiesFormat; props != nil { - if beipConfigs := props.BackendIPConfigurations; beipConfigs != nil { - for _, config := range *beipConfigs { - ipConfig := make(map[string]interface{}) - if id := config.ID; id != nil { - ipConfig["id"] = *id - backendIPConfigurations = append(backendIPConfigurations, ipConfig) - } - } - } - } - - d.Set("backend_ip_configurations", backendIPConfigurations) - - return nil -} diff --git a/azurerm/internal/services/network/lb_backend_address_pool_resource.go b/azurerm/internal/services/network/lb_backend_address_pool_resource.go deleted file mode 100644 index b312c8592333..000000000000 --- a/azurerm/internal/services/network/lb_backend_address_pool_resource.go +++ /dev/null @@ -1,266 +0,0 @@ -package network - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancerBackendAddressPool() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerBackendAddressPoolCreate, - Read: resourceArmLoadBalancerBackendAddressPoolRead, - Delete: resourceArmLoadBalancerBackendAddressPoolDelete, - - Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { - id, err := parse.LoadBalancerBackendAddressPoolID(input) - if err != nil { - return nil, err - } - - lbId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - return &lbId, nil - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.LoadBalancerID, - }, - - "backend_ip_configurations": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - Set: schema.HashString, - }, - - "load_balancing_rules": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - Set: schema.HashString, - }, - }, - } -} - -func resourceArmLoadBalancerBackendAddressPoolCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return fmt.Errorf("parsing Load Balancer Name and Group: %+v", err) - } - - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", name) - return nil - } - - backendAddressPools := append(*loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools, network.BackendAddressPool{ - Name: utils.String(name), - }) - existingPool, existingPoolIndex, exists := FindLoadBalancerBackEndAddressPoolByName(loadBalancer, name) - if exists { - if name == *existingPool.Name { - if d.IsNewResource() { - return tf.ImportAsExistsError("azurerm_lb_backend_address_pool", *existingPool.ID) - } - - // this pool is being updated/reapplied remove old copy from the slice - backendAddressPools = append(backendAddressPools[:existingPoolIndex], backendAddressPools[existingPoolIndex+1:]...) - } - } - - loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools = &backendAddressPools - - future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - read, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - return fmt.Errorf("retrieving Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("reading ID for Load Balancer %q (Resource Group %q)", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - var poolId string - for _, BackendAddressPool := range *read.LoadBalancerPropertiesFormat.BackendAddressPools { - if *BackendAddressPool.Name == name { - poolId = *BackendAddressPool.ID - } - } - - if poolId == "" { - return fmt.Errorf("Cannot find created Load Balancer Backend Address Pool ID %q", poolId) - } - - d.SetId(poolId) - - return resourceArmLoadBalancerBackendAddressPoolRead(d, meta) -} - -func resourceArmLoadBalancerBackendAddressPoolRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerBackendAddressPoolID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("retrieving Load Balancer by ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) - return nil - } - - config, _, exists := FindLoadBalancerBackEndAddressPoolByName(loadBalancer, id.Name) - if !exists { - log.Printf("[INFO] Load Balancer Backend Address Pool %q not found. Removing from state", id.Name) - d.SetId("") - return nil - } - - d.Set("name", config.Name) - d.Set("resource_group_name", id.ResourceGroup) - - var backendIpConfigurations []string - var loadBalancingRules []string - - if props := config.BackendAddressPoolPropertiesFormat; props != nil { - if configs := props.BackendIPConfigurations; configs != nil { - for _, backendConfig := range *configs { - backendIpConfigurations = append(backendIpConfigurations, *backendConfig.ID) - } - } - - if rules := props.LoadBalancingRules; rules != nil { - for _, rule := range *rules { - loadBalancingRules = append(loadBalancingRules, *rule.ID) - } - } - } - - d.Set("backend_ip_configurations", backendIpConfigurations) - d.Set("load_balancing_rules", loadBalancingRules) - - return nil -} - -func resourceArmLoadBalancerBackendAddressPoolDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerBackendAddressPoolID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer by ID: %+v", err) - } - if !exists { - d.SetId("") - return nil - } - - _, index, exists := FindLoadBalancerBackEndAddressPoolByName(loadBalancer, d.Get("name").(string)) - if !exists { - return nil - } - - oldBackEndPools := *loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools - newBackEndPools := append(oldBackEndPools[:index], oldBackEndPools[index+1:]...) - loadBalancer.LoadBalancerPropertiesFormat.BackendAddressPools = &newBackEndPools - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating LoadBalancer: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the completion for the LoadBalancer: %+v", err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") - if err != nil { - return fmt.Errorf("Error retrieving the Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (resource group %q) ID", id.LoadBalancerName, id.ResourceGroup) - } - - return nil -} diff --git a/azurerm/internal/services/network/lb_data_source.go b/azurerm/internal/services/network/lb_data_source.go deleted file mode 100644 index f0ad3c3214a6..000000000000 --- a/azurerm/internal/services/network/lb_data_source.go +++ /dev/null @@ -1,204 +0,0 @@ -package network - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmLoadBalancer() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmLoadBalancerRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.NoZeroValues, - }, - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "location": azure.SchemaLocationForDataSource(), - - "sku": { - Type: schema.TypeString, - Computed: true, - }, - - "frontend_ip_configuration": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - - "subnet_id": { - Type: schema.TypeString, - Computed: true, - }, - - "private_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - - "private_ip_address_version": { - Type: schema.TypeString, - Computed: true, - }, - - "public_ip_address_id": { - Type: schema.TypeString, - Computed: true, - }, - - "private_ip_address_allocation": { - Type: schema.TypeString, - Computed: true, - }, - - "zones": azure.SchemaZonesComputed(), - - "id": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "private_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - - "private_ip_addresses": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "tags": tags.SchemaDataSource(), - }, - } -} - -func dataSourceArmLoadBalancerRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Load Balancer %q was not found in Resource Group %q", name, resourceGroup) - } - - return fmt.Errorf("Error retrieving Load Balancer %s: %s", name, err) - } - - d.SetId(*resp.ID) - - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if sku := resp.Sku; sku != nil { - d.Set("sku", string(sku.Name)) - } - - if props := resp.LoadBalancerPropertiesFormat; props != nil { - if feipConfigs := props.FrontendIPConfigurations; feipConfigs != nil { - if err := d.Set("frontend_ip_configuration", flattenLoadBalancerDataSourceFrontendIpConfiguration(feipConfigs)); err != nil { - return fmt.Errorf("Error flattening `frontend_ip_configuration`: %+v", err) - } - - privateIpAddress := "" - privateIpAddresses := make([]string, 0) - for _, config := range *feipConfigs { - if feipProps := config.FrontendIPConfigurationPropertiesFormat; feipProps != nil { - if ip := feipProps.PrivateIPAddress; ip != nil { - if privateIpAddress == "" { - privateIpAddress = *feipProps.PrivateIPAddress - } - - privateIpAddresses = append(privateIpAddresses, *feipProps.PrivateIPAddress) - } - } - } - - d.Set("private_ip_address", privateIpAddress) - d.Set("private_ip_addresses", privateIpAddresses) - } - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func flattenLoadBalancerDataSourceFrontendIpConfiguration(ipConfigs *[]network.FrontendIPConfiguration) []interface{} { - result := make([]interface{}, 0) - if ipConfigs == nil { - return result - } - - for _, config := range *ipConfigs { - ipConfig := make(map[string]interface{}) - if config.Name != nil { - ipConfig["name"] = *config.Name - } - - if config.ID != nil { - ipConfig["id"] = *config.ID - } - - zones := make([]string, 0) - if zs := config.Zones; zs != nil { - zones = *zs - } - ipConfig["zones"] = zones - - if props := config.FrontendIPConfigurationPropertiesFormat; props != nil { - ipConfig["private_ip_address_allocation"] = props.PrivateIPAllocationMethod - - if subnet := props.Subnet; subnet != nil && subnet.ID != nil { - ipConfig["subnet_id"] = *subnet.ID - } - - if pip := props.PrivateIPAddress; pip != nil { - ipConfig["private_ip_address"] = *pip - } - - if props.PrivateIPAddressVersion != "" { - ipConfig["private_ip_address_version"] = string(props.PrivateIPAddressVersion) - } - - if pip := props.PublicIPAddress; pip != nil && pip.ID != nil { - ipConfig["public_ip_address_id"] = *pip.ID - } - } - - result = append(result, ipConfig) - } - return result -} diff --git a/azurerm/internal/services/network/lb_nat_pool_resource.go b/azurerm/internal/services/network/lb_nat_pool_resource.go deleted file mode 100644 index 30d1b60cdbdd..000000000000 --- a/azurerm/internal/services/network/lb_nat_pool_resource.go +++ /dev/null @@ -1,336 +0,0 @@ -package network - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancerNatPool() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerNatPoolCreateUpdate, - Read: resourceArmLoadBalancerNatPoolRead, - Update: resourceArmLoadBalancerNatPoolCreateUpdate, - Delete: resourceArmLoadBalancerNatPoolDelete, - - Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { - id, err := parse.LoadBalancerInboundNATPoolID(input) - if err != nil { - return nil, err - } - - lbId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - return &lbId, nil - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: networkValidate.LoadBalancerID, - }, - - "protocol": { - Type: schema.TypeString, - Required: true, - StateFunc: state.IgnoreCase, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(network.TransportProtocolAll), - string(network.TransportProtocolTCP), - string(network.TransportProtocolUDP), - }, true), - }, - - "frontend_port_start": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumber, - }, - - "frontend_port_end": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumber, - }, - - "backend_port": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumber, - }, - - "frontend_ip_configuration_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "frontend_ip_configuration_id": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func resourceArmLoadBalancerNatPoolCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return fmt.Errorf("parsing Load Balancer Name and Group: %+v", err) - } - - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("retrieving Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", loadBalancerId.Name) - return nil - } - - newNatPool, err := expandAzureRmLoadBalancerNatPool(d, loadBalancer) - if err != nil { - return fmt.Errorf("expanding NAT Pool: %+v", err) - } - - natPools := append(*loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools, *newNatPool) - - existingNatPool, existingNatPoolIndex, exists := FindLoadBalancerNatPoolByName(loadBalancer, name) - if exists { - if name == *existingNatPool.Name { - if d.IsNewResource() { - return tf.ImportAsExistsError("azurerm_lb_nat_pool", *existingNatPool.ID) - } - - // this probe is being updated/reapplied remove old copy from the slice - natPools = append(natPools[:existingNatPoolIndex], natPools[existingNatPoolIndex+1:]...) - } - } - - loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools = &natPools - - future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the completion of Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - read, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (Resource Group %q) ID", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - var natPoolId string - for _, InboundNatPool := range *read.LoadBalancerPropertiesFormat.InboundNatPools { - if *InboundNatPool.Name == name { - natPoolId = *InboundNatPool.ID - } - } - - if natPoolId == "" { - return fmt.Errorf("Cannot find created Load Balancer NAT Pool ID %q", natPoolId) - } - - d.SetId(natPoolId) - - return resourceArmLoadBalancerNatPoolRead(d, meta) -} - -func resourceArmLoadBalancerNatPoolRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerInboundNATPoolID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer by ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) - return nil - } - - config, _, exists := FindLoadBalancerNatPoolByName(loadBalancer, id.Name) - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer Nat Pool %q not found. Removing from state", id.Name) - return nil - } - - d.Set("name", config.Name) - d.Set("resource_group_name", id.ResourceGroup) - - if props := config.InboundNatPoolPropertiesFormat; props != nil { - backendPort := 0 - if props.BackendPort != nil { - backendPort = int(*props.BackendPort) - } - d.Set("backend_port", backendPort) - - frontendIPConfigName := "" - frontendIPConfigID := "" - if props.FrontendIPConfiguration != nil && props.FrontendIPConfiguration.ID != nil { - feid, err := parse.LoadBalancerFrontendIPConfigurationID(*props.FrontendIPConfiguration.ID) - if err != nil { - return err - } - - frontendIPConfigName = feid.Name - frontendIPConfigID = feid.ID(subscriptionId) - } - d.Set("frontend_ip_configuration_id", frontendIPConfigID) - d.Set("frontend_ip_configuration_name", frontendIPConfigName) - - frontendPortRangeEnd := 0 - if props.FrontendPortRangeEnd != nil { - frontendPortRangeEnd = int(*props.FrontendPortRangeEnd) - } - d.Set("frontend_port_end", frontendPortRangeEnd) - - frontendPortRangeStart := 0 - if props.FrontendPortRangeStart != nil { - frontendPortRangeStart = int(*props.FrontendPortRangeStart) - } - d.Set("frontend_port_start", frontendPortRangeStart) - d.Set("protocol", string(props.Protocol)) - } - - return nil -} - -func resourceArmLoadBalancerNatPoolDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerInboundNATPoolID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer by ID: %+v", err) - } - if !exists { - d.SetId("") - return nil - } - - _, index, exists := FindLoadBalancerNatPoolByName(loadBalancer, id.Name) - if !exists { - return nil - } - - oldNatPools := *loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools - newNatPools := append(oldNatPools[:index], oldNatPools[index+1:]...) - loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools = &newNatPools - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, *loadBalancer) - if err != nil { - return fmt.Errorf("Error creating/updating Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of the Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer: %+v", err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (Resource Group %q) ID", id.LoadBalancerName, id.ResourceGroup) - } - - return nil -} - -func expandAzureRmLoadBalancerNatPool(d *schema.ResourceData, lb *network.LoadBalancer) (*network.InboundNatPool, error) { - properties := network.InboundNatPoolPropertiesFormat{ - Protocol: network.TransportProtocol(d.Get("protocol").(string)), - FrontendPortRangeStart: utils.Int32(int32(d.Get("frontend_port_start").(int))), - FrontendPortRangeEnd: utils.Int32(int32(d.Get("frontend_port_end").(int))), - BackendPort: utils.Int32(int32(d.Get("backend_port").(int))), - } - - if v := d.Get("frontend_ip_configuration_name").(string); v != "" { - rule, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v) - if !exists { - return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v) - } - - properties.FrontendIPConfiguration = &network.SubResource{ - ID: rule.ID, - } - } - - return &network.InboundNatPool{ - Name: utils.String(d.Get("name").(string)), - InboundNatPoolPropertiesFormat: &properties, - }, nil -} diff --git a/azurerm/internal/services/network/lb_nat_rule_resource.go b/azurerm/internal/services/network/lb_nat_rule_resource.go deleted file mode 100644 index 257743cc9996..000000000000 --- a/azurerm/internal/services/network/lb_nat_rule_resource.go +++ /dev/null @@ -1,371 +0,0 @@ -package network - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancerNatRule() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerNatRuleCreateUpdate, - Read: resourceArmLoadBalancerNatRuleRead, - Update: resourceArmLoadBalancerNatRuleCreateUpdate, - Delete: resourceArmLoadBalancerNatRuleDelete, - - Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { - id, err := parse.LoadBalancerInboundNATRuleID(input) - if err != nil { - return nil, err - } - - lbId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - return &lbId, nil - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: networkValidate.LoadBalancerID, - }, - - "protocol": { - Type: schema.TypeString, - Required: true, - StateFunc: state.IgnoreCase, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(network.TransportProtocolAll), - string(network.TransportProtocolTCP), - string(network.TransportProtocolUDP), - }, true), - }, - - "frontend_port": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumber, - }, - - "backend_port": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumber, - }, - - "frontend_ip_configuration_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "enable_floating_ip": { - Type: schema.TypeBool, - Optional: true, - Computed: true, - }, - - "enable_tcp_reset": { - Type: schema.TypeBool, - Optional: true, - }, - - "idle_timeout_in_minutes": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - ValidateFunc: validation.IntBetween(4, 30), - }, - - "frontend_ip_configuration_id": { - Type: schema.TypeString, - Computed: true, - }, - - "backend_ip_configuration_id": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func resourceArmLoadBalancerNatRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return fmt.Errorf("retrieving Load Balancer Name and Group: %+v", err) - } - loadBalancerIdRaw := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerIdRaw) - defer locks.UnlockByID(loadBalancerIdRaw) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", name) - return nil - } - - newNatRule, err := expandAzureRmLoadBalancerNatRule(d, loadBalancer, *loadBalancerId, subscriptionId) - if err != nil { - return fmt.Errorf("Error Expanding NAT Rule: %+v", err) - } - - natRules := append(*loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules, *newNatRule) - - existingNatRule, existingNatRuleIndex, exists := FindLoadBalancerNatRuleByName(loadBalancer, name) - if exists { - if name == *existingNatRule.Name { - if d.IsNewResource() { - return tf.ImportAsExistsError("azurerm_lb_nat_rule", *existingNatRule.ID) - } - - // this nat rule is being updated/reapplied remove old copy from the slice - natRules = append(natRules[:existingNatRuleIndex], natRules[existingNatRuleIndex+1:]...) - } - } - - loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules = &natRules - - future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating / Updating Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - read, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (Resource Group %q) ID", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - var natRuleId string - for _, InboundNatRule := range *read.LoadBalancerPropertiesFormat.InboundNatRules { - if *InboundNatRule.Name == name { - natRuleId = *InboundNatRule.ID - } - } - - if natRuleId != "" { - d.SetId(natRuleId) - } else { - return fmt.Errorf("Cannot find created Load Balancer NAT Rule ID %q", natRuleId) - } - - return resourceArmLoadBalancerNatRuleRead(d, meta) -} - -func resourceArmLoadBalancerNatRuleRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerInboundNATRuleID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) - return nil - } - - config, _, exists := FindLoadBalancerNatRuleByName(loadBalancer, id.Name) - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer Nat Rule %q not found. Removing from state", id.Name) - return nil - } - - d.Set("name", config.Name) - d.Set("resource_group_name", id.ResourceGroup) - - if props := config.InboundNatRulePropertiesFormat; props != nil { - backendIPConfigId := "" - if props.BackendIPConfiguration != nil && props.BackendIPConfiguration.ID != nil { - backendIPConfigId = *props.BackendIPConfiguration.ID - } - d.Set("backend_ip_configuration_id", backendIPConfigId) - - backendPort := 0 - if props.BackendPort != nil { - backendPort = int(*props.BackendPort) - } - d.Set("backend_port", backendPort) - d.Set("enable_floating_ip", props.EnableFloatingIP) - d.Set("enable_tcp_reset", props.EnableTCPReset) - - frontendIPConfigName := "" - frontendIPConfigID := "" - if props.FrontendIPConfiguration != nil && props.FrontendIPConfiguration.ID != nil { - feid, err := parse.LoadBalancerFrontendIPConfigurationID(*props.FrontendIPConfiguration.ID) - if err != nil { - return err - } - - frontendIPConfigName = feid.Name - frontendIPConfigID = feid.ID(subscriptionId) - } - d.Set("frontend_ip_configuration_name", frontendIPConfigName) - d.Set("frontend_ip_configuration_id", frontendIPConfigID) - - frontendPort := 0 - if props.FrontendPort != nil { - frontendPort = int(*props.FrontendPort) - } - d.Set("frontend_port", frontendPort) - - idleTimeoutInMinutes := 0 - if props.IdleTimeoutInMinutes != nil { - idleTimeoutInMinutes = int(*props.IdleTimeoutInMinutes) - } - d.Set("idle_timeout_in_minutes", idleTimeoutInMinutes) - d.Set("protocol", string(props.Protocol)) - } - - return nil -} - -func resourceArmLoadBalancerNatRuleDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerInboundNATRuleID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - return nil - } - - _, index, exists := FindLoadBalancerNatRuleByName(loadBalancer, id.Name) - if !exists { - return nil - } - - oldNatRules := *loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules - newNatRules := append(oldNatRules[:index], oldNatRules[index+1:]...) - loadBalancer.LoadBalancerPropertiesFormat.InboundNatRules = &newNatRules - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q) %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the completion of Load Balancer updates for %q (Resource Group %q) %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (resource group %q) ID", id.LoadBalancerName, id.ResourceGroup) - } - - return nil -} - -func expandAzureRmLoadBalancerNatRule(d *schema.ResourceData, lb *network.LoadBalancer, loadBalancerId parse.LoadBalancerId, subscriptionId string) (*network.InboundNatRule, error) { - properties := network.InboundNatRulePropertiesFormat{ - Protocol: network.TransportProtocol(d.Get("protocol").(string)), - FrontendPort: utils.Int32(int32(d.Get("frontend_port").(int))), - BackendPort: utils.Int32(int32(d.Get("backend_port").(int))), - EnableTCPReset: utils.Bool(d.Get("enable_tcp_reset").(bool)), - } - - if v, ok := d.GetOk("enable_floating_ip"); ok { - properties.EnableFloatingIP = utils.Bool(v.(bool)) - } - - if v, ok := d.GetOk("idle_timeout_in_minutes"); ok { - properties.IdleTimeoutInMinutes = utils.Int32(int32(v.(int))) - } - - if v := d.Get("frontend_ip_configuration_name").(string); v != "" { - if _, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v); !exists { - return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v) - } - - id := parse.NewLoadBalancerFrontendIPConfigurationId(loadBalancerId, v).ID(subscriptionId) - properties.FrontendIPConfiguration = &network.SubResource{ - ID: utils.String(id), - } - } - - natRule := network.InboundNatRule{ - Name: utils.String(d.Get("name").(string)), - InboundNatRulePropertiesFormat: &properties, - } - - return &natRule, nil -} diff --git a/azurerm/internal/services/network/lb_outbound_rule_resource.go b/azurerm/internal/services/network/lb_outbound_rule_resource.go deleted file mode 100644 index 09fe6ba5b706..000000000000 --- a/azurerm/internal/services/network/lb_outbound_rule_resource.go +++ /dev/null @@ -1,383 +0,0 @@ -package network - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancerOutboundRule() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerOutboundRuleCreateUpdate, - Read: resourceArmLoadBalancerOutboundRuleRead, - Update: resourceArmLoadBalancerOutboundRuleCreateUpdate, - Delete: resourceArmLoadBalancerOutboundRuleDelete, - - Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { - id, err := parse.LoadBalancerOutboundRuleID(input) - if err != nil { - return nil, err - } - - lbId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - return &lbId, nil - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.LoadBalancerID, - }, - - "frontend_ip_configuration": { - Type: schema.TypeList, - Optional: true, - MinItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "id": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "backend_address_pool_id": { - Type: schema.TypeString, - Required: true, - }, - - "protocol": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{ - string(network.TransportProtocolAll), - string(network.TransportProtocolTCP), - string(network.TransportProtocolUDP), - }, false), - }, - - "enable_tcp_reset": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - - "allocated_outbound_ports": { - Type: schema.TypeInt, - Optional: true, - Default: 1024, - }, - - "idle_timeout_in_minutes": { - Type: schema.TypeInt, - Optional: true, - Default: 4, - }, - }, - } -} - -func resourceArmLoadBalancerOutboundRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return err - } - loadBalancerIDRaw := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerIDRaw) - defer locks.UnlockByID(loadBalancerIDRaw) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", name) - return nil - } - - newOutboundRule, err := expandAzureRmLoadBalancerOutboundRule(d, loadBalancer) - if err != nil { - return fmt.Errorf("expanding Load Balancer Rule: %+v", err) - } - - outboundRules := make([]network.OutboundRule, 0) - - if loadBalancer.LoadBalancerPropertiesFormat.OutboundRules != nil { - outboundRules = *loadBalancer.LoadBalancerPropertiesFormat.OutboundRules - } - - existingOutboundRule, existingOutboundRuleIndex, exists := FindLoadBalancerOutboundRuleByName(loadBalancer, name) - if exists { - if name == *existingOutboundRule.Name { - if d.IsNewResource() { - return tf.ImportAsExistsError("azurerm_lb_outbound_rule", *existingOutboundRule.ID) - } - - // this outbound rule is being updated/reapplied remove old copy from the slice - outboundRules = append(outboundRules[:existingOutboundRuleIndex], outboundRules[existingOutboundRuleIndex+1:]...) - } - } - - outboundRules = append(outboundRules, *newOutboundRule) - - loadBalancer.LoadBalancerPropertiesFormat.OutboundRules = &outboundRules - - future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating LoadBalancer: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion for Load Balancer updates: %+v", err) - } - - read, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - return fmt.Errorf("Error Getting LoadBalancer: %+v", err) - } - - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %s (resource group %s) ID", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - var outboundRuleId string - for _, OutboundRule := range *read.LoadBalancerPropertiesFormat.OutboundRules { - if *OutboundRule.Name == name { - outboundRuleId = *OutboundRule.ID - } - } - - if outboundRuleId == "" { - return fmt.Errorf("Cannot find created Load Balancer Outbound Rule ID %q", outboundRuleId) - } - - d.SetId(outboundRuleId) - - return resourceArmLoadBalancerOutboundRuleRead(d, meta) -} - -func resourceArmLoadBalancerOutboundRuleRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerOutboundRuleID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) - return nil - } - - config, _, exists := FindLoadBalancerOutboundRuleByName(loadBalancer, id.Name) - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer Outbound Rule %q not found. Removing from state", id.Name) - return nil - } - - d.Set("name", config.Name) - d.Set("resource_group_name", id.ResourceGroup) - - if props := config.OutboundRulePropertiesFormat; props != nil { - allocatedOutboundPorts := 0 - if props.AllocatedOutboundPorts != nil { - allocatedOutboundPorts = int(*props.AllocatedOutboundPorts) - } - d.Set("allocated_outbound_ports", allocatedOutboundPorts) - - backendAddressPoolId := "" - if props.BackendAddressPool != nil && props.BackendAddressPool.ID != nil { - bapid, err := parse.LoadBalancerBackendAddressPoolID(*props.BackendAddressPool.ID) - if err != nil { - return err - } - - backendAddressPoolId = bapid.ID(subscriptionId) - } - d.Set("backend_address_pool_id", backendAddressPoolId) - d.Set("enable_tcp_reset", props.EnableTCPReset) - - frontendIpConfigurations := make([]interface{}, 0) - for _, feConfig := range *props.FrontendIPConfigurations { - if feConfig.ID == nil { - continue - } - feid, err := parse.LoadBalancerFrontendIPConfigurationID(*feConfig.ID) - if err != nil { - return err - } - - frontendIpConfigurations = append(frontendIpConfigurations, map[string]interface{}{ - "id": feid.ID(subscriptionId), - "name": feid.Name, - }) - } - d.Set("frontend_ip_configuration", frontendIpConfigurations) - - idleTimeoutInMinutes := 0 - if props.IdleTimeoutInMinutes != nil { - idleTimeoutInMinutes = int(*props.IdleTimeoutInMinutes) - } - d.Set("idle_timeout_in_minutes", idleTimeoutInMinutes) - d.Set("protocol", string(props.Protocol)) - } - - return nil -} - -func resourceArmLoadBalancerOutboundRuleDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerOutboundRuleID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("retrieving Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - return nil - } - - _, index, exists := FindLoadBalancerOutboundRuleByName(loadBalancer, id.Name) - if !exists { - return nil - } - - oldOutboundRules := *loadBalancer.LoadBalancerPropertiesFormat.OutboundRules - newOutboundRules := append(oldOutboundRules[:index], oldOutboundRules[index+1:]...) - loadBalancer.LoadBalancerPropertiesFormat.OutboundRules = &newOutboundRules - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, *loadBalancer) - if err != nil { - return fmt.Errorf("Creating/Updating Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") - if err != nil { - return fmt.Errorf("Error Getting LoadBalancer: %+v", err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read ID of Load Balancer %q (resource group %s)", id.LoadBalancerName, id.ResourceGroup) - } - - return nil -} - -func expandAzureRmLoadBalancerOutboundRule(d *schema.ResourceData, lb *network.LoadBalancer) (*network.OutboundRule, error) { - properties := network.OutboundRulePropertiesFormat{ - Protocol: network.LoadBalancerOutboundRuleProtocol(d.Get("protocol").(string)), - } - - feConfigs := d.Get("frontend_ip_configuration").([]interface{}) - feConfigSubResources := make([]network.SubResource, 0) - - for _, raw := range feConfigs { - v := raw.(map[string]interface{}) - rule, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v["name"].(string)) - if !exists { - return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v["name"]) - } - - feConfigSubResource := network.SubResource{ - ID: rule.ID, - } - - feConfigSubResources = append(feConfigSubResources, feConfigSubResource) - } - - properties.FrontendIPConfigurations = &feConfigSubResources - - if v := d.Get("backend_address_pool_id").(string); v != "" { - properties.BackendAddressPool = &network.SubResource{ - ID: &v, - } - } - - if v, ok := d.GetOk("idle_timeout_in_minutes"); ok { - properties.IdleTimeoutInMinutes = utils.Int32(int32(v.(int))) - } - - if v, ok := d.GetOk("enable_tcp_reset"); ok { - properties.EnableTCPReset = utils.Bool(v.(bool)) - } - - if v, ok := d.GetOk("allocated_outbound_ports"); ok { - properties.AllocatedOutboundPorts = utils.Int32(int32(v.(int))) - } - - return &network.OutboundRule{ - Name: utils.String(d.Get("name").(string)), - OutboundRulePropertiesFormat: &properties, - }, nil -} diff --git a/azurerm/internal/services/network/lb_probe_resource.go b/azurerm/internal/services/network/lb_probe_resource.go deleted file mode 100644 index b2fbb4046aab..000000000000 --- a/azurerm/internal/services/network/lb_probe_resource.go +++ /dev/null @@ -1,330 +0,0 @@ -package network - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancerProbe() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerProbeCreateUpdate, - Read: resourceArmLoadBalancerProbeRead, - Update: resourceArmLoadBalancerProbeCreateUpdate, - Delete: resourceArmLoadBalancerProbeDelete, - - Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { - id, err := parse.LoadBalancerProbeID(input) - if err != nil { - return nil, err - } - - lbId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - return &lbId, nil - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: networkValidate.LoadBalancerID, - }, - - "protocol": { - Type: schema.TypeString, - Computed: true, - Optional: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(network.ProbeProtocolHTTP), - string(network.ProbeProtocolHTTPS), - string(network.ProbeProtocolTCP), - }, true), - }, - - "port": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumber, - }, - - "request_path": { - Type: schema.TypeString, - Optional: true, - }, - - "interval_in_seconds": { - Type: schema.TypeInt, - Optional: true, - Default: 15, - ValidateFunc: validation.IntAtLeast(5), - }, - - "number_of_probes": { - Type: schema.TypeInt, - Optional: true, - Default: 2, - }, - - "load_balancer_rules": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - Set: schema.HashString, - }, - }, - } -} - -func resourceArmLoadBalancerProbeCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return err - } - loadBalancerIDRaw := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerIDRaw) - defer locks.UnlockByID(loadBalancerIDRaw) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", name) - return nil - } - - newProbe := expandAzureRmLoadBalancerProbe(d) - probes := append(*loadBalancer.LoadBalancerPropertiesFormat.Probes, *newProbe) - - existingProbe, existingProbeIndex, exists := FindLoadBalancerProbeByName(loadBalancer, name) - if exists { - if name == *existingProbe.Name { - if d.IsNewResource() { - return tf.ImportAsExistsError("azurerm_lb_probe", *existingProbe.ID) - } - - // this probe is being updated/reapplied remove old copy from the slice - probes = append(probes[:existingProbeIndex], probes[existingProbeIndex+1:]...) - } - } - - loadBalancer.LoadBalancerPropertiesFormat.Probes = &probes - - future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - - read, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer %q (Resource Group %q): %+v", loadBalancerId.Name, loadBalancerId.ResourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (resource group %q) ID", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - var createdProbeId string - for _, Probe := range *read.LoadBalancerPropertiesFormat.Probes { - if *Probe.Name == name { - createdProbeId = *Probe.ID - } - } - - if createdProbeId == "" { - return fmt.Errorf("Cannot find created Load Balancer Probe ID %q", createdProbeId) - } - - d.SetId(createdProbeId) - - return resourceArmLoadBalancerProbeRead(d, meta) -} - -func resourceArmLoadBalancerProbeRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerProbeID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) - return nil - } - - config, _, exists := FindLoadBalancerProbeByName(loadBalancer, id.Name) - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer Probe %q not found. Removing from state", id.Name) - return nil - } - - d.Set("name", config.Name) - d.Set("resource_group_name", id.ResourceGroup) - - if props := config.ProbePropertiesFormat; props != nil { - intervalInSeconds := 0 - if props.IntervalInSeconds != nil { - intervalInSeconds = int(*props.IntervalInSeconds) - } - d.Set("interval_in_seconds", intervalInSeconds) - - numberOfProbes := 0 - if props.NumberOfProbes != nil { - numberOfProbes = int(*props.NumberOfProbes) - } - d.Set("number_of_probes", numberOfProbes) - - port := 0 - if props.Port != nil { - port = int(*props.Port) - } - d.Set("port", port) - d.Set("protocol", string(props.Protocol)) - d.Set("request_path", props.RequestPath) - - // TODO: parse/make these consistent - var loadBalancerRules []string - if rules := props.LoadBalancingRules; rules != nil { - for _, ruleConfig := range *rules { - if id := ruleConfig.ID; id != nil { - loadBalancerRules = append(loadBalancerRules, *id) - } - } - } - if err := d.Set("load_balancer_rules", loadBalancerRules); err != nil { - return fmt.Errorf("Error setting `load_balancer_rules` (Load Balancer Probe %q): %+v", id.Name, err) - } - } - - return nil -} - -func resourceArmLoadBalancerProbeDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerProbeID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - return nil - } - - _, index, exists := FindLoadBalancerProbeByName(loadBalancer, id.Name) - if !exists { - return nil - } - - oldProbes := *loadBalancer.LoadBalancerPropertiesFormat.Probes - newProbes := append(oldProbes[:index], oldProbes[index+1:]...) - loadBalancer.LoadBalancerPropertiesFormat.Probes = &newProbes - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") - if err != nil { - return fmt.Errorf("Error Getting LoadBalancer: %+v", err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %s (resource group %s) ID", id.LoadBalancerName, id.ResourceGroup) - } - - return nil -} - -func expandAzureRmLoadBalancerProbe(d *schema.ResourceData) *network.Probe { - properties := network.ProbePropertiesFormat{ - NumberOfProbes: utils.Int32(int32(d.Get("number_of_probes").(int))), - IntervalInSeconds: utils.Int32(int32(d.Get("interval_in_seconds").(int))), - Port: utils.Int32(int32(d.Get("port").(int))), - } - - if v, ok := d.GetOk("protocol"); ok { - properties.Protocol = network.ProbeProtocol(v.(string)) - } - - if v, ok := d.GetOk("request_path"); ok { - properties.RequestPath = utils.String(v.(string)) - } - - return &network.Probe{ - Name: utils.String(d.Get("name").(string)), - ProbePropertiesFormat: &properties, - } -} diff --git a/azurerm/internal/services/network/lb_resource.go b/azurerm/internal/services/network/lb_resource.go deleted file mode 100644 index 7eeee5cc898a..000000000000 --- a/azurerm/internal/services/network/lb_resource.go +++ /dev/null @@ -1,457 +0,0 @@ -package network - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/state" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancer() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerCreateUpdate, - Read: resourceArmLoadBalancerRead, - Update: resourceArmLoadBalancerCreateUpdate, - Delete: resourceArmLoadBalancerDelete, - - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.LoadBalancerID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - - "location": azure.SchemaLocation(), - - "resource_group_name": azure.SchemaResourceGroupName(), - - "sku": { - Type: schema.TypeString, - Optional: true, - Default: string(network.LoadBalancerSkuNameBasic), - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - string(network.LoadBalancerSkuNameBasic), - string(network.LoadBalancerSkuNameStandard), - }, true), - DiffSuppressFunc: suppress.CaseDifference, - }, - - "frontend_ip_configuration": { - Type: schema.TypeList, - Optional: true, - MinItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "subnet_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: azure.ValidateResourceIDOrEmpty, - }, - - "private_ip_address": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: validation.Any( - validation.IsIPAddress, - validation.StringIsEmpty, - ), - }, - - "private_ip_address_version": { - Type: schema.TypeString, - Optional: true, - Default: string(network.IPv4), - ValidateFunc: validation.StringInSlice([]string{ - string(network.IPv4), - string(network.IPv6), - }, false), - }, - - "public_ip_address_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: azure.ValidateResourceIDOrEmpty, - }, - - "public_ip_prefix_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: azure.ValidateResourceIDOrEmpty, - }, - - "private_ip_address_allocation": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: validation.StringInSlice([]string{ - string(network.Dynamic), - string(network.Static), - }, true), - StateFunc: state.IgnoreCase, - DiffSuppressFunc: suppress.CaseDifference, - }, - - "load_balancer_rules": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - Set: schema.HashString, - }, - - "inbound_nat_rules": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - Set: schema.HashString, - }, - - "outbound_rules": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringIsNotEmpty, - }, - Set: schema.HashString, - }, - - "zones": azure.SchemaSingleZone(), - - "id": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - - "private_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - "private_ip_addresses": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "tags": tags.Schema(), - }, - } -} - -func resourceArmLoadBalancerCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for Azure ARM Load Balancer creation.") - - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, name, "") - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Load Balancer %q (Resource Group %q): %s", name, resGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_lb", *existing.ID) - } - } - - location := azure.NormalizeLocation(d.Get("location").(string)) - sku := network.LoadBalancerSku{ - Name: network.LoadBalancerSkuName(d.Get("sku").(string)), - } - t := d.Get("tags").(map[string]interface{}) - expandedTags := tags.Expand(t) - - properties := network.LoadBalancerPropertiesFormat{} - - if _, ok := d.GetOk("frontend_ip_configuration"); ok { - properties.FrontendIPConfigurations = expandAzureRmLoadBalancerFrontendIpConfigurations(d) - } - - loadBalancer := network.LoadBalancer{ - Name: utils.String(name), - Location: utils.String(location), - Tags: expandedTags, - Sku: &sku, - LoadBalancerPropertiesFormat: &properties, - } - - future, err := client.CreateOrUpdate(ctx, resGroup, name, loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", name, resGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", name, resGroup, err) - } - - read, err := client.Get(ctx, resGroup, name, "") - if err != nil { - return fmt.Errorf("Error Retrieving Load Balancer %q (Resource Group %q): %+v", name, resGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %q (resource group %q) ID", name, resGroup) - } - - d.SetId(*read.ID) - - return resourceArmLoadBalancerRead(d, meta) -} - -func resourceArmLoadBalancerRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerID(d.Id()) - if err != nil { - return err - } - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *id) - if err != nil { - return fmt.Errorf("Error retrieving Load Balancer by ID %q: %+v", d.Id(), err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", d.Id()) - return nil - } - - d.Set("name", loadBalancer.Name) - d.Set("resource_group_name", id.ResourceGroup) - if location := loadBalancer.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if sku := loadBalancer.Sku; sku != nil { - d.Set("sku", string(sku.Name)) - } - - if props := loadBalancer.LoadBalancerPropertiesFormat; props != nil { - if feipConfigs := props.FrontendIPConfigurations; feipConfigs != nil { - if err := d.Set("frontend_ip_configuration", flattenLoadBalancerFrontendIpConfiguration(feipConfigs)); err != nil { - return fmt.Errorf("Error flattening `frontend_ip_configuration`: %+v", err) - } - - privateIpAddress := "" - privateIpAddresses := make([]string, 0) - for _, config := range *feipConfigs { - if feipProps := config.FrontendIPConfigurationPropertiesFormat; feipProps != nil { - if ip := feipProps.PrivateIPAddress; ip != nil { - if privateIpAddress == "" { - privateIpAddress = *feipProps.PrivateIPAddress - } - - privateIpAddresses = append(privateIpAddresses, *feipProps.PrivateIPAddress) - } - } - } - - d.Set("private_ip_address", privateIpAddress) - d.Set("private_ip_addresses", privateIpAddresses) - } - } - - return tags.FlattenAndSet(d, loadBalancer.Tags) -} - -func resourceArmLoadBalancerDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerID(d.Id()) - if err != nil { - return err - } - - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("deleting Load Balancer %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of Load Balancer %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) - } - - return nil -} - -func expandAzureRmLoadBalancerFrontendIpConfigurations(d *schema.ResourceData) *[]network.FrontendIPConfiguration { - configs := d.Get("frontend_ip_configuration").([]interface{}) - frontEndConfigs := make([]network.FrontendIPConfiguration, 0, len(configs)) - - for _, configRaw := range configs { - data := configRaw.(map[string]interface{}) - - privateIpAllocationMethod := data["private_ip_address_allocation"].(string) - properties := network.FrontendIPConfigurationPropertiesFormat{ - PrivateIPAllocationMethod: network.IPAllocationMethod(privateIpAllocationMethod), - } - - if v := data["private_ip_address"].(string); v != "" { - properties.PrivateIPAddress = &v - } - - properties.PrivateIPAddressVersion = network.IPVersion(data["private_ip_address_version"].(string)) - - if v := data["public_ip_address_id"].(string); v != "" { - properties.PublicIPAddress = &network.PublicIPAddress{ - ID: &v, - } - } - - if v := data["public_ip_prefix_id"].(string); v != "" { - properties.PublicIPPrefix = &network.SubResource{ - ID: &v, - } - } - - if v := data["subnet_id"].(string); v != "" { - properties.Subnet = &network.Subnet{ - ID: &v, - } - } - - name := data["name"].(string) - zones := azure.ExpandZones(data["zones"].([]interface{})) - frontEndConfig := network.FrontendIPConfiguration{ - Name: &name, - FrontendIPConfigurationPropertiesFormat: &properties, - Zones: zones, - } - - frontEndConfigs = append(frontEndConfigs, frontEndConfig) - } - - return &frontEndConfigs -} - -func flattenLoadBalancerFrontendIpConfiguration(ipConfigs *[]network.FrontendIPConfiguration) []interface{} { - result := make([]interface{}, 0) - if ipConfigs == nil { - return result - } - - for _, config := range *ipConfigs { - ipConfig := make(map[string]interface{}) - - if config.Name != nil { - ipConfig["name"] = *config.Name - } - - if config.ID != nil { - ipConfig["id"] = *config.ID - } - - zones := make([]string, 0) - if zs := config.Zones; zs != nil { - zones = *zs - } - ipConfig["zones"] = zones - - if props := config.FrontendIPConfigurationPropertiesFormat; props != nil { - ipConfig["private_ip_address_allocation"] = string(props.PrivateIPAllocationMethod) - - if subnet := props.Subnet; subnet != nil { - ipConfig["subnet_id"] = *subnet.ID - } - - if pip := props.PrivateIPAddress; pip != nil { - ipConfig["private_ip_address"] = *pip - } - - if props.PrivateIPAddressVersion != "" { - ipConfig["private_ip_address_version"] = string(props.PrivateIPAddressVersion) - } - - if pip := props.PublicIPAddress; pip != nil { - ipConfig["public_ip_address_id"] = *pip.ID - } - - if pip := props.PublicIPPrefix; pip != nil { - ipConfig["public_ip_prefix_id"] = *pip.ID - } - - loadBalancingRules := make([]interface{}, 0) - if rules := props.LoadBalancingRules; rules != nil { - for _, rule := range *rules { - loadBalancingRules = append(loadBalancingRules, *rule.ID) - } - } - ipConfig["load_balancer_rules"] = schema.NewSet(schema.HashString, loadBalancingRules) - - inboundNatRules := make([]interface{}, 0) - if rules := props.InboundNatRules; rules != nil { - for _, rule := range *rules { - inboundNatRules = append(inboundNatRules, *rule.ID) - } - } - ipConfig["inbound_nat_rules"] = schema.NewSet(schema.HashString, inboundNatRules) - - outboundRules := make([]interface{}, 0) - if rules := props.OutboundRules; rules != nil { - for _, rule := range *rules { - outboundRules = append(outboundRules, *rule.ID) - } - } - ipConfig["outbound_rules"] = schema.NewSet(schema.HashString, outboundRules) - } - - result = append(result, ipConfig) - } - return result -} diff --git a/azurerm/internal/services/network/lb_rule_data_source.go b/azurerm/internal/services/network/lb_rule_data_source.go deleted file mode 100644 index 74cba7686fcc..000000000000 --- a/azurerm/internal/services/network/lb_rule_data_source.go +++ /dev/null @@ -1,177 +0,0 @@ -package network - -import ( - "fmt" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmLoadBalancerRule() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmLoadBalancerRuleRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: ValidateArmLoadBalancerRuleName, - }, - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: networkValidate.LoadBalancerID, - }, - - "frontend_ip_configuration_name": { - Type: schema.TypeString, - Computed: true, - }, - - "protocol": { - Type: schema.TypeString, - Computed: true, - }, - - "frontend_port": { - Type: schema.TypeInt, - Computed: true, - }, - - "backend_port": { - Type: schema.TypeInt, - Computed: true, - }, - - "backend_address_pool_id": { - Type: schema.TypeString, - Computed: true, - }, - - "probe_id": { - Type: schema.TypeString, - Computed: true, - }, - - "enable_floating_ip": { - Type: schema.TypeBool, - Computed: true, - }, - - "enable_tcp_reset": { - Type: schema.TypeBool, - Computed: true, - }, - - "disable_outbound_snat": { - Type: schema.TypeBool, - Computed: true, - }, - - "idle_timeout_in_minutes": { - Type: schema.TypeInt, - Computed: true, - }, - - "load_distribution": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmLoadBalancerRuleRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - loadBalancerID, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return err - } - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerID) - if err != nil { - return fmt.Errorf("retrieving Load Balancer by ID: %+v", err) - } - if !exists { - return fmt.Errorf("Load Balancer %q (Resource Group %q) was not found", loadBalancerID.Name, loadBalancerID.ResourceGroup) - } - - lbRuleClient := meta.(*clients.Client).Network.LoadBalancerLoadBalancingRulesClient - ctx, cancel = timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := lbRuleClient.Get(ctx, resourceGroup, *loadBalancer.Name, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Load Balancer Rule %q was not found in Load Balancer %q (Resource Group: %q)", name, *loadBalancer.Name, resourceGroup) - } - - return fmt.Errorf("retrieving Load Balancer %s: %s", name, err) - } - - d.SetId(*resp.ID) - - if props := resp.LoadBalancingRulePropertiesFormat; props != nil { - frontendIPConfigurationName, err := parse.LoadBalancerFrontendIPConfigurationID(*props.FrontendIPConfiguration.ID) - if err != nil { - return err - } - - d.Set("frontend_ip_configuration_name", frontendIPConfigurationName.Name) - d.Set("protocol", props.Protocol) - d.Set("frontend_port", props.FrontendPort) - d.Set("backend_port", props.BackendPort) - - if props.BackendAddressPool != nil { - if err := d.Set("backend_address_pool_id", props.BackendAddressPool.ID); err != nil { - return fmt.Errorf("setting `backend_address_pool_id`: %+v", err) - } - } - - if props.Probe != nil { - if err := d.Set("probe_id", props.Probe.ID); err != nil { - return fmt.Errorf("setting `probe_id`: %+v", err) - } - } - - if err := d.Set("enable_floating_ip", props.EnableFloatingIP); err != nil { - return fmt.Errorf("setting `enable_floating_ip`: %+v", err) - } - - if err := d.Set("enable_tcp_reset", props.EnableTCPReset); err != nil { - return fmt.Errorf("setting `enable_tcp_reset`: %+v", err) - } - - if err := d.Set("disable_outbound_snat", props.DisableOutboundSnat); err != nil { - return fmt.Errorf("setting `disable_outbound_snat`: %+v", err) - } - - if err := d.Set("idle_timeout_in_minutes", props.IdleTimeoutInMinutes); err != nil { - return fmt.Errorf("setting `idle_timeout_in_minutes`: %+v", err) - } - - if err := d.Set("load_distribution", props.LoadDistribution); err != nil { - return fmt.Errorf("setting `load_distribution`: %+v", err) - } - } - - return nil -} diff --git a/azurerm/internal/services/network/lb_rule_resource.go b/azurerm/internal/services/network/lb_rule_resource.go deleted file mode 100644 index 01e1679cb395..000000000000 --- a/azurerm/internal/services/network/lb_rule_resource.go +++ /dev/null @@ -1,446 +0,0 @@ -package network - -import ( - "fmt" - "log" - "regexp" - "time" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmLoadBalancerRule() *schema.Resource { - return &schema.Resource{ - Create: resourceArmLoadBalancerRuleCreateUpdate, - Read: resourceArmLoadBalancerRuleRead, - Update: resourceArmLoadBalancerRuleCreateUpdate, - Delete: resourceArmLoadBalancerRuleDelete, - - Importer: loadBalancerSubResourceImporter(func(input string) (*parse.LoadBalancerId, error) { - id, err := parse.LoadBalancerRuleID(input) - if err != nil { - return nil, err - } - - lbId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - return &lbId, nil - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: ValidateArmLoadBalancerRuleName, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "loadbalancer_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: networkValidate.LoadBalancerID, - }, - - "frontend_ip_configuration_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "frontend_ip_configuration_id": { - Type: schema.TypeString, - Computed: true, - }, - - "backend_address_pool_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - - "protocol": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(network.TransportProtocolAll), - string(network.TransportProtocolTCP), - string(network.TransportProtocolUDP), - }, true), - }, - - "frontend_port": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumberOrZero, - }, - - "backend_port": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validate.PortNumberOrZero, - }, - - "probe_id": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - - "enable_floating_ip": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - - "enable_tcp_reset": { - Type: schema.TypeBool, - Optional: true, - }, - - "disable_outbound_snat": { - Type: schema.TypeBool, - Optional: true, - Default: false, - }, - - "idle_timeout_in_minutes": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - ValidateFunc: validation.IntBetween(4, 30), - }, - - "load_distribution": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - }, - } -} - -func resourceArmLoadBalancerRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - loadBalancerId, err := parse.LoadBalancerID(d.Get("loadbalancer_id").(string)) - if err != nil { - return err - } - - loadBalancerID := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerID) - defer locks.UnlockByID(loadBalancerID) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, *loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", name) - return nil - } - - newLbRule, err := expandAzureRmLoadBalancerRule(d, loadBalancer) - if err != nil { - return fmt.Errorf("Error Expanding Load Balancer Rule: %+v", err) - } - - lbRules := append(*loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules, *newLbRule) - - existingRule, existingRuleIndex, exists := FindLoadBalancerRuleByName(loadBalancer, name) - if exists { - if name == *existingRule.Name { - if d.IsNewResource() { - return tf.ImportAsExistsError("azurerm_lb_rule", *existingRule.ID) - } - - // this rule is being updated/reapplied remove old copy from the slice - lbRules = append(lbRules[:existingRuleIndex], lbRules[existingRuleIndex+1:]...) - } - } - - loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules = &lbRules - - future, err := client.CreateOrUpdate(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, *loadBalancer) - if err != nil { - return fmt.Errorf("Error Creating/Updating LoadBalancer: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion for Load Balancer updates: %+v", err) - } - - read, err := client.Get(ctx, loadBalancerId.ResourceGroup, loadBalancerId.Name, "") - if err != nil { - return fmt.Errorf("Error Getting LoadBalancer: %+v", err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Load Balancer %s (resource group %s) ID", loadBalancerId.Name, loadBalancerId.ResourceGroup) - } - - var ruleId string - for _, LoadBalancingRule := range *read.LoadBalancerPropertiesFormat.LoadBalancingRules { - if *LoadBalancingRule.Name == name { - ruleId = *LoadBalancingRule.ID - } - } - - if ruleId == "" { - return fmt.Errorf("Cannot find created Load Balancer Rule ID %q", ruleId) - } - - d.SetId(ruleId) - - return resourceArmLoadBalancerRuleRead(d, meta) -} - -func resourceArmLoadBalancerRuleRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerRuleID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer %q not found. Removing from state", id.LoadBalancerName) - return nil - } - - config, _, exists := FindLoadBalancerRuleByName(loadBalancer, id.Name) - if !exists { - d.SetId("") - log.Printf("[INFO] Load Balancer Rule %q not found. Removing from state", id.Name) - return nil - } - - d.Set("name", config.Name) - d.Set("resource_group_name", id.ResourceGroup) - - if props := config.LoadBalancingRulePropertiesFormat; props != nil { - d.Set("disable_outbound_snat", props.DisableOutboundSnat) - d.Set("enable_floating_ip", props.EnableFloatingIP) - d.Set("enable_tcp_reset", props.EnableTCPReset) - d.Set("protocol", string(props.Protocol)) - - backendPort := 0 - if props.BackendPort != nil { - backendPort = int(*props.BackendPort) - } - d.Set("backend_port", backendPort) - - backendAddressPoolId := "" - if props.BackendAddressPool != nil && props.BackendAddressPool.ID != nil { - backendAddressPoolId = *props.BackendAddressPool.ID - } - d.Set("backend_address_pool_id", backendAddressPoolId) - - frontendIPConfigName := "" - frontendIPConfigID := "" - if props.FrontendIPConfiguration != nil && props.FrontendIPConfiguration.ID != nil { - feid, err := parse.LoadBalancerFrontendIPConfigurationID(*props.FrontendIPConfiguration.ID) - if err != nil { - return err - } - - frontendIPConfigName = feid.Name - frontendIPConfigID = feid.ID(subscriptionId) - } - d.Set("frontend_ip_configuration_name", frontendIPConfigName) - d.Set("frontend_ip_configuration_id", frontendIPConfigID) - - frontendPort := 0 - if props.FrontendPort != nil { - frontendPort = int(*props.FrontendPort) - } - d.Set("frontend_port", frontendPort) - - idleTimeoutInMinutes := 0 - if props.IdleTimeoutInMinutes != nil { - idleTimeoutInMinutes = int(*props.IdleTimeoutInMinutes) - } - d.Set("idle_timeout_in_minutes", idleTimeoutInMinutes) - - loadDistribution := "" - if props.LoadDistribution != "" { - loadDistribution = string(props.LoadDistribution) - } - d.Set("load_distribution", loadDistribution) - - probeId := "" - if props.Probe != nil && props.Probe.ID != nil { - probeId = *props.Probe.ID - } - d.Set("probe_id", probeId) - } - - return nil -} - -func resourceArmLoadBalancerRuleDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.LoadBalancersClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := parse.LoadBalancerRuleID(d.Id()) - if err != nil { - return err - } - - loadBalancerId := parse.NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName) - loadBalancerIDRaw := loadBalancerId.ID(subscriptionId) - locks.ByID(loadBalancerIDRaw) - defer locks.UnlockByID(loadBalancerIDRaw) - - loadBalancer, exists, err := retrieveLoadBalancerById(ctx, client, loadBalancerId) - if err != nil { - return fmt.Errorf("Error Getting Load Balancer By ID: %+v", err) - } - if !exists { - d.SetId("") - return nil - } - - _, index, exists := FindLoadBalancerRuleByName(loadBalancer, d.Get("name").(string)) - if !exists { - return nil - } - - oldLbRules := *loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules - newLbRules := append(oldLbRules[:index], oldLbRules[index+1:]...) - loadBalancer.LoadBalancerPropertiesFormat.LoadBalancingRules = &newLbRules - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.LoadBalancerName, *loadBalancer) - if err != nil { - return fmt.Errorf("Creating/Updating Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for completion of Load Balancer %q (Resource Group %q): %+v", id.LoadBalancerName, id.ResourceGroup, err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.LoadBalancerName, "") - if err != nil { - return fmt.Errorf("Error Getting LoadBalancer: %+v", err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read ID of Load Balancer %q (resource group %s)", id.LoadBalancerName, id.ResourceGroup) - } - - return nil -} - -func expandAzureRmLoadBalancerRule(d *schema.ResourceData, lb *network.LoadBalancer) (*network.LoadBalancingRule, error) { - properties := network.LoadBalancingRulePropertiesFormat{ - Protocol: network.TransportProtocol(d.Get("protocol").(string)), - FrontendPort: utils.Int32(int32(d.Get("frontend_port").(int))), - BackendPort: utils.Int32(int32(d.Get("backend_port").(int))), - EnableFloatingIP: utils.Bool(d.Get("enable_floating_ip").(bool)), - EnableTCPReset: utils.Bool(d.Get("enable_tcp_reset").(bool)), - DisableOutboundSnat: utils.Bool(d.Get("disable_outbound_snat").(bool)), - } - - if v, ok := d.GetOk("idle_timeout_in_minutes"); ok { - properties.IdleTimeoutInMinutes = utils.Int32(int32(v.(int))) - } - - if v := d.Get("load_distribution").(string); v != "" { - properties.LoadDistribution = network.LoadDistribution(v) - } - - // TODO: ensure these ID's are consistent - if v := d.Get("frontend_ip_configuration_name").(string); v != "" { - rule, exists := FindLoadBalancerFrontEndIpConfigurationByName(lb, v) - if !exists { - return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v) - } - - properties.FrontendIPConfiguration = &network.SubResource{ - ID: rule.ID, - } - } - - if v := d.Get("backend_address_pool_id").(string); v != "" { - properties.BackendAddressPool = &network.SubResource{ - ID: &v, - } - } - - if v := d.Get("probe_id").(string); v != "" { - properties.Probe = &network.SubResource{ - ID: &v, - } - } - - return &network.LoadBalancingRule{ - Name: utils.String(d.Get("name").(string)), - LoadBalancingRulePropertiesFormat: &properties, - }, nil -} - -func ValidateArmLoadBalancerRuleName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - if !regexp.MustCompile(`^[a-zA-Z_0-9.-]+$`).MatchString(value) { - errors = append(errors, fmt.Errorf( - "only word characters, numbers, underscores, periods, and hyphens allowed in %q: %q", - k, value)) - } - - if len(value) > 80 { - errors = append(errors, fmt.Errorf( - "%q cannot be longer than 80 characters: %q", k, value)) - } - - if len(value) == 0 { - errors = append(errors, fmt.Errorf( - "%q cannot be an empty string: %q", k, value)) - } - if !regexp.MustCompile(`[a-zA-Z0-9_]$`).MatchString(value) { - errors = append(errors, fmt.Errorf( - "%q must end with a word character, number, or underscore: %q", k, value)) - } - - if !regexp.MustCompile(`^[a-zA-Z0-9]`).MatchString(value) { - errors = append(errors, fmt.Errorf( - "%q must start with a word character or number: %q", k, value)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/local_network_gateway_resource.go b/azurerm/internal/services/network/local_network_gateway_resource.go index 3ed5684f0b5d..ddca4b27968f 100644 --- a/azurerm/internal/services/network/local_network_gateway_resource.go +++ b/azurerm/internal/services/network/local_network_gateway_resource.go @@ -44,8 +44,15 @@ func resourceArmLocalNetworkGateway() *schema.Resource { "resource_group_name": azure.SchemaResourceGroupName(), "gateway_address": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: []string{"gateway_address", "gateway_fqdn"}, + }, + + "gateway_fqdn": { + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: []string{"gateway_address", "gateway_fqdn"}, }, "address_space": { @@ -108,8 +115,6 @@ func resourceArmLocalNetworkGatewayCreateUpdate(d *schema.ResourceData, meta int } location := azure.NormalizeLocation(d.Get("location").(string)) - ipAddress := d.Get("gateway_address").(string) - t := d.Get("tags").(map[string]interface{}) gateway := network.LocalNetworkGateway{ @@ -117,12 +122,19 @@ func resourceArmLocalNetworkGatewayCreateUpdate(d *schema.ResourceData, meta int Location: &location, LocalNetworkGatewayPropertiesFormat: &network.LocalNetworkGatewayPropertiesFormat{ LocalNetworkAddressSpace: &network.AddressSpace{}, - GatewayIPAddress: &ipAddress, BgpSettings: expandLocalNetworkGatewayBGPSettings(d), }, Tags: tags.Expand(t), } + ipAddress := d.Get("gateway_address").(string) + fqdn := d.Get("gateway_fqdn").(string) + if ipAddress != "" { + gateway.LocalNetworkGatewayPropertiesFormat.GatewayIPAddress = &ipAddress + } else { + gateway.LocalNetworkGatewayPropertiesFormat.Fqdn = &fqdn + } + // There is a bug in the provider where the address space ordering doesn't change as expected. // In the UI we have to remove the current list of addresses in the address space and re-add them in the new order and we'll copy that here. if !d.IsNewResource() && d.HasChange("address_space") { @@ -187,6 +199,7 @@ func resourceArmLocalNetworkGatewayRead(d *schema.ResourceData, meta interface{} if props := resp.LocalNetworkGatewayPropertiesFormat; props != nil { d.Set("gateway_address", props.GatewayIPAddress) + d.Set("gateway_fqdn", props.Fqdn) if lnas := props.LocalNetworkAddressSpace; lnas != nil { d.Set("address_space", lnas.AddressPrefixes) diff --git a/azurerm/internal/services/network/nat_gateway_public_ip_association_resource.go b/azurerm/internal/services/network/nat_gateway_public_ip_association_resource.go index 37621e95fd62..6e328626c984 100644 --- a/azurerm/internal/services/network/nat_gateway_public_ip_association_resource.go +++ b/azurerm/internal/services/network/nat_gateway_public_ip_association_resource.go @@ -47,7 +47,7 @@ func resourceArmNATGatewayPublicIpAssociation() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PublicIPAddressID, + ValidateFunc: validate.PublicIpAddressID, }, }, } diff --git a/azurerm/internal/services/network/network_connection_monitor_resource.go b/azurerm/internal/services/network/network_connection_monitor_resource.go index 2aae8d483fd6..be64d4b6c27c 100644 --- a/azurerm/internal/services/network/network_connection_monitor_resource.go +++ b/azurerm/internal/services/network/network_connection_monitor_resource.go @@ -12,6 +12,10 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + computeValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/compute/validate" + logAnalyticsValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -43,78 +47,390 @@ func resourceArmNetworkConnectionMonitor() *schema.Resource { ValidateFunc: validation.StringIsNotEmpty, }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "network_watcher_name": { + "network_watcher_id": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, + ValidateFunc: networkValidate.NetworkWatcherID, }, "location": azure.SchemaLocation(), "auto_start": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Default: true, + Type: schema.TypeBool, + Optional: true, + Computed: true, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", }, "interval_in_seconds": { Type: schema.TypeInt, Optional: true, - Default: 60, + Computed: true, ValidateFunc: validation.IntAtLeast(30), + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", }, "source": { - Type: schema.TypeList, - Required: true, - MaxItems: 1, + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "virtual_machine_id": { Type: schema.TypeString, - Required: true, + Optional: true, + Computed: true, ValidateFunc: azure.ValidateResourceID, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", }, + "port": { Type: schema.TypeInt, Optional: true, - Default: 0, + Computed: true, ValidateFunc: validate.PortNumberOrZero, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", }, }, }, }, "destination": { - Type: schema.TypeList, - Required: true, - MaxItems: 1, + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "virtual_machine_id": { Type: schema.TypeString, Optional: true, + Computed: true, ValidateFunc: azure.ValidateResourceID, ConflictsWith: []string{"destination.0.address"}, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", }, + "address": { Type: schema.TypeString, Optional: true, + Computed: true, ConflictsWith: []string{"destination.0.virtual_machine_id"}, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", }, + "port": { Type: schema.TypeInt, - Required: true, + Optional: true, + Computed: true, ValidateFunc: validate.PortNumber, + Deprecated: "The field belongs to the v1 network connection monitor, which is now deprecated in favour of v2 by Azure. Please check the document (https://www.terraform.io/docs/providers/azurerm/r/network_connection_monitor.html) for the v2 properties.", + }, + }, + }, + }, + + "endpoint": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "address": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.Any( + validation.IsIPv4Address, + networkValidate.NetworkConnectionMonitorEndpointAddress, + ), + }, + + "filter": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "item": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "address": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "type": { + Type: schema.TypeString, + Optional: true, + Default: string(network.AgentAddress), + ValidateFunc: validation.StringInSlice([]string{ + string(network.AgentAddress), + }, false), + }, + }, + }, + }, + + "type": { + Type: schema.TypeString, + Optional: true, + Default: string(network.Include), + ValidateFunc: validation.StringInSlice([]string{ + string(network.Include), + }, false), + }, + }, + }, + }, + + "virtual_machine_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: computeValidate.VirtualMachineID, }, }, }, }, + "test_configuration": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "protocol": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.ConnectionMonitorTestConfigurationProtocolTCP), + string(network.ConnectionMonitorTestConfigurationProtocolHTTP), + string(network.ConnectionMonitorTestConfigurationProtocolIcmp), + }, false), + }, + + "http_configuration": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "method": { + Type: schema.TypeString, + Optional: true, + Default: string(network.Get), + ValidateFunc: validation.StringInSlice([]string{ + string(network.Get), + string(network.Post), + }, false), + }, + + "path": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: networkValidate.NetworkConnectionMonitorHttpPath, + }, + + "port": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validate.PortNumber, + }, + + "prefer_https": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "request_header": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "value": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "valid_status_code_ranges": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: networkValidate.NetworkConnectionMonitorValidStatusCodeRanges, + }, + }, + }, + }, + }, + + "icmp_configuration": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "trace_route_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + }, + }, + }, + + "preferred_ip_version": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.PreferredIPVersionIPv4), + string(network.PreferredIPVersionIPv6), + }, false), + }, + + "success_threshold": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "checks_failed_percent": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(0, 100), + }, + + "round_trip_time_ms": { + Type: schema.TypeFloat, + Optional: true, + ValidateFunc: validation.FloatAtLeast(0), + }, + }, + }, + }, + + "tcp_configuration": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "port": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validate.PortNumber, + }, + + "trace_route_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + }, + }, + }, + + "test_frequency_in_seconds": { + Type: schema.TypeInt, + Optional: true, + Default: 60, + ValidateFunc: validation.IntBetween(30, 1800), + }, + }, + }, + }, + + "test_group": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "destination_endpoints": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "source_endpoints": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "test_configuration_names": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + }, + }, + }, + + // API accepts any value including empty string. + "notes": { + Type: schema.TypeString, + Optional: true, + }, + + "output_workspace_resource_ids": { + Type: schema.TypeSet, + Optional: true, + Computed: true, + ConfigMode: schema.SchemaConfigModeAttr, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: logAnalyticsValidate.LogAnalyticsWorkspaceID, + }, + }, + "tags": tags.Schema(), }, } @@ -126,22 +442,19 @@ func resourceArmNetworkConnectionMonitorCreateUpdate(d *schema.ResourceData, met defer cancel() name := d.Get("name").(string) - watcherName := d.Get("network_watcher_name").(string) - resourceGroup := d.Get("resource_group_name").(string) location := azure.NormalizeLocation(d.Get("location").(string)) - autoStart := d.Get("auto_start").(bool) - intervalInSeconds := int32(d.Get("interval_in_seconds").(int)) - dest, err := expandArmNetworkConnectionMonitorDestination(d) + watcherId := d.Get("network_watcher_id").(string) + id, err := parse.NetworkWatcherID(watcherId) if err != nil { return err } if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, watcherName, name) + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Connection Monitor %q (Watcher %q / Resource Group %q): %s", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error checking for presence of existing Connection Monitor %q (Watcher %q / Resource Group %q): %s", name, id.Name, id.ResourceGroup, err) } } @@ -150,34 +463,36 @@ func resourceArmNetworkConnectionMonitorCreateUpdate(d *schema.ResourceData, met } } - t := d.Get("tags").(map[string]interface{}) - properties := network.ConnectionMonitor{ Location: utils.String(location), - Tags: tags.Expand(t), + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), ConnectionMonitorParameters: &network.ConnectionMonitorParameters{ - Source: expandArmNetworkConnectionMonitorSource(d), - Destination: dest, - AutoStart: utils.Bool(autoStart), - MonitoringIntervalInSeconds: utils.Int32(intervalInSeconds), + Endpoints: expandArmNetworkConnectionMonitorEndpoint(d.Get("endpoint").(*schema.Set).List()), + Outputs: expandArmNetworkConnectionMonitorOutput(d.Get("output_workspace_resource_ids").(*schema.Set).List()), + TestConfigurations: expandArmNetworkConnectionMonitorTestConfiguration(d.Get("test_configuration").(*schema.Set).List()), + TestGroups: expandArmNetworkConnectionMonitorTestGroup(d.Get("test_group").(*schema.Set).List()), }, } - future, err := client.CreateOrUpdate(ctx, resourceGroup, watcherName, name, properties) + if notes, ok := d.GetOk("notes"); ok { + properties.Notes = utils.String(notes.(string)) + } + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, properties) if err != nil { - return fmt.Errorf("Error creating Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error creating Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, id.Name, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error waiting for completion of Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, id.Name, id.ResourceGroup, err) } - resp, err := client.Get(ctx, resourceGroup, watcherName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) if err != nil { - return fmt.Errorf("Error retrieving Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error retrieving Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, id.Name, id.ResourceGroup, err) } if resp.ID == nil { - return fmt.Errorf("Cannot read Connection Monitor %q (Watcher %q / Resource Group %q) ID", name, watcherName, resourceGroup) + return fmt.Errorf("Cannot read Connection Monitor %q (Watcher %q / Resource Group %q) ID", name, id.Name, id.ResourceGroup) } d.SetId(*resp.ID) @@ -190,42 +505,50 @@ func resourceArmNetworkConnectionMonitorRead(d *schema.ResourceData, meta interf ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ConnectionMonitorID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - watcherName := id.Path["networkWatchers"] - name := id.Path["NetworkConnectionMonitors"] - resp, err := client.Get(ctx, resourceGroup, watcherName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetworkWatcherName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Connection Monitor %q (Watcher %q / Resource Group %q) %+v", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error reading Connection Monitor %q (Watcher %q / Resource Group %q) %+v", id.Name, id.NetworkWatcherName, id.ResourceGroup, err) + } + + if resp.ConnectionMonitorType == network.SingleSourceDestination { + return fmt.Errorf("the resource created via API version 2019-06-01 or before (a.k.a v1) isn't compatible to this version of provider. Please migrate to v2 resource.") } - d.Set("name", name) - d.Set("network_watcher_name", watcherName) - d.Set("resource_group_name", resourceGroup) + d.Set("name", id.Name) + + networkWatcherId := parse.NewNetworkWatcherID(id.SubscriptionId, id.ResourceGroup, id.NetworkWatcherName) + d.Set("network_watcher_id", networkWatcherId.ID()) + if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) } if props := resp.ConnectionMonitorResultProperties; props != nil { - d.Set("auto_start", props.AutoStart) - d.Set("interval_in_seconds", props.MonitoringIntervalInSeconds) + d.Set("notes", props.Notes) + + if err := d.Set("endpoint", flattenArmNetworkConnectionMonitorEndpoint(props.Endpoints)); err != nil { + return fmt.Errorf("setting `endpoint`: %+v", err) + } + + if err := d.Set("output_workspace_resource_ids", flattenArmNetworkConnectionMonitorOutput(props.Outputs)); err != nil { + return fmt.Errorf("setting `output`: %+v", err) + } - source := flattenArmNetworkConnectionMonitorSource(props.Source) - if err := d.Set("source", source); err != nil { - return fmt.Errorf("Error setting `source`: %+v", err) + if err := d.Set("test_configuration", flattenArmNetworkConnectionMonitorTestConfiguration(props.TestConfigurations)); err != nil { + return fmt.Errorf("setting `test_configuration`: %+v", err) } - dest := flattenArmNetworkConnectionMonitorDestination(props.Destination) - if err := d.Set("destination", dest); err != nil { - return fmt.Errorf("Error setting `destination`: %+v", err) + if err := d.Set("test_group", flattenArmNetworkConnectionMonitorTestGroup(props.TestGroups)); err != nil { + return fmt.Errorf("setting `test_group`: %+v", err) } } @@ -237,102 +560,532 @@ func resourceArmNetworkConnectionMonitorDelete(d *schema.ResourceData, meta inte ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ConnectionMonitorID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - watcherName := id.Path["networkWatchers"] - name := id.Path["NetworkConnectionMonitors"] - future, err := client.Delete(ctx, resourceGroup, watcherName, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.NetworkWatcherName, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error deleting Connection Monitor %q (Watcher %q / Resource Group %q): %+v", id.Name, id.NetworkWatcherName, id.ResourceGroup, err) } } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the deletion of Connection Monitor %q (Watcher %q / Resource Group %q): %+v", name, watcherName, resourceGroup, err) + return fmt.Errorf("Error waiting for the deletion of Connection Monitor %q (Watcher %q / Resource Group %q): %+v", id.Name, id.NetworkWatcherName, id.ResourceGroup, err) } return nil } -func flattenArmNetworkConnectionMonitorSource(input *network.ConnectionMonitorSource) []interface{} { +func expandArmNetworkConnectionMonitorEndpoint(input []interface{}) *[]network.ConnectionMonitorEndpoint { + results := make([]network.ConnectionMonitorEndpoint, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.ConnectionMonitorEndpoint{ + Name: utils.String(v["name"].(string)), + Filter: expandArmNetworkConnectionMonitorEndpointFilter(v["filter"].([]interface{})), + } + + if address := v["address"]; address != "" { + result.Address = utils.String(address.(string)) + } + + if resourceId := v["virtual_machine_id"]; resourceId != "" { + result.ResourceID = utils.String(resourceId.(string)) + } + + results = append(results, result) + } + + return &results +} + +func expandArmNetworkConnectionMonitorEndpointFilter(input []interface{}) *network.ConnectionMonitorEndpointFilter { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + return &network.ConnectionMonitorEndpointFilter{ + Type: network.ConnectionMonitorEndpointFilterType(v["type"].(string)), + Items: expandArmNetworkConnectionMonitorEndpointFilterItem(v["item"].(*schema.Set).List()), + } +} + +func expandArmNetworkConnectionMonitorEndpointFilterItem(input []interface{}) *[]network.ConnectionMonitorEndpointFilterItem { + if len(input) == 0 { + return nil + } + + results := make([]network.ConnectionMonitorEndpointFilterItem, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.ConnectionMonitorEndpointFilterItem{ + Type: network.ConnectionMonitorEndpointFilterItemType(v["type"].(string)), + } + + if address := v["address"]; address != "" { + result.Address = utils.String(address.(string)) + } + + results = append(results, result) + } + + return &results +} + +func expandArmNetworkConnectionMonitorTestConfiguration(input []interface{}) *[]network.ConnectionMonitorTestConfiguration { + results := make([]network.ConnectionMonitorTestConfiguration, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.ConnectionMonitorTestConfiguration{ + Name: utils.String(v["name"].(string)), + HTTPConfiguration: expandArmNetworkConnectionMonitorHTTPConfiguration(v["http_configuration"].([]interface{})), + IcmpConfiguration: expandArmNetworkConnectionMonitorIcmpConfiguration(v["icmp_configuration"].([]interface{})), + Protocol: network.ConnectionMonitorTestConfigurationProtocol(v["protocol"].(string)), + SuccessThreshold: expandArmNetworkConnectionMonitorSuccessThreshold(v["success_threshold"].([]interface{})), + TCPConfiguration: expandArmNetworkConnectionMonitorTCPConfiguration(v["tcp_configuration"].([]interface{})), + TestFrequencySec: utils.Int32(int32(v["test_frequency_in_seconds"].(int))), + } + + if preferredIPVersion := v["preferred_ip_version"]; preferredIPVersion != "" { + result.PreferredIPVersion = network.PreferredIPVersion(preferredIPVersion.(string)) + } + + results = append(results, result) + } + + return &results +} + +func expandArmNetworkConnectionMonitorHTTPConfiguration(input []interface{}) *network.ConnectionMonitorHTTPConfiguration { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + props := &network.ConnectionMonitorHTTPConfiguration{ + Method: network.HTTPConfigurationMethod(v["method"].(string)), + PreferHTTPS: utils.Bool(v["prefer_https"].(bool)), + RequestHeaders: expandArmNetworkConnectionMonitorHTTPHeader(v["request_header"].(*schema.Set).List()), + } + + if path := v["path"]; path != "" { + props.Path = utils.String(path.(string)) + } + + if port := v["port"]; port != 0 { + props.Port = utils.Int32(int32(port.(int))) + } + + if ranges := v["valid_status_code_ranges"].(*schema.Set).List(); len(ranges) != 0 { + props.ValidStatusCodeRanges = utils.ExpandStringSlice(ranges) + } + + return props +} + +func expandArmNetworkConnectionMonitorTCPConfiguration(input []interface{}) *network.ConnectionMonitorTCPConfiguration { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + return &network.ConnectionMonitorTCPConfiguration{ + Port: utils.Int32(int32(v["port"].(int))), + DisableTraceRoute: utils.Bool(!v["trace_route_enabled"].(bool)), + } +} + +func expandArmNetworkConnectionMonitorIcmpConfiguration(input []interface{}) *network.ConnectionMonitorIcmpConfiguration { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + return &network.ConnectionMonitorIcmpConfiguration{ + DisableTraceRoute: utils.Bool(!v["trace_route_enabled"].(bool)), + } +} + +func expandArmNetworkConnectionMonitorSuccessThreshold(input []interface{}) *network.ConnectionMonitorSuccessThreshold { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + + return &network.ConnectionMonitorSuccessThreshold{ + ChecksFailedPercent: utils.Int32(int32(v["checks_failed_percent"].(int))), + RoundTripTimeMs: utils.Float(v["round_trip_time_ms"].(float64)), + } +} + +func expandArmNetworkConnectionMonitorHTTPHeader(input []interface{}) *[]network.HTTPHeader { + if len(input) == 0 { + return nil + } + + results := make([]network.HTTPHeader, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.HTTPHeader{ + Name: utils.String(v["name"].(string)), + Value: utils.String(v["value"].(string)), + } + + results = append(results, result) + } + + return &results +} + +func expandArmNetworkConnectionMonitorTestGroup(input []interface{}) *[]network.ConnectionMonitorTestGroup { + results := make([]network.ConnectionMonitorTestGroup, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.ConnectionMonitorTestGroup{ + Name: utils.String(v["name"].(string)), + Destinations: utils.ExpandStringSlice(v["destination_endpoints"].(*schema.Set).List()), + Disable: utils.Bool(!v["enabled"].(bool)), + Sources: utils.ExpandStringSlice(v["source_endpoints"].(*schema.Set).List()), + TestConfigurations: utils.ExpandStringSlice(v["test_configuration_names"].(*schema.Set).List()), + } + + results = append(results, result) + } + + return &results +} + +func expandArmNetworkConnectionMonitorOutput(input []interface{}) *[]network.ConnectionMonitorOutput { + results := make([]network.ConnectionMonitorOutput, 0) + + for _, item := range input { + result := network.ConnectionMonitorOutput{ + Type: network.Workspace, + WorkspaceSettings: &network.ConnectionMonitorWorkspaceSettings{ + WorkspaceResourceID: utils.String(item.(string)), + }, + } + + results = append(results, result) + } + + return &results +} + +func flattenArmNetworkConnectionMonitorEndpoint(input *[]network.ConnectionMonitorEndpoint) []interface{} { + results := make([]interface{}, 0) if input == nil { - return []interface{}{} + return results + } + + for _, item := range *input { + var name string + if item.Name != nil { + name = *item.Name + } + + var address string + if item.Address != nil { + address = *item.Address + } + + var resourceId string + if item.ResourceID != nil { + resourceId = *item.ResourceID + } + + v := map[string]interface{}{ + "name": name, + "address": address, + "filter": flattenArmNetworkConnectionMonitorEndpointFilter(item.Filter), + "virtual_machine_id": resourceId, + } + + results = append(results, v) + } + return results +} + +func flattenArmNetworkConnectionMonitorEndpointFilter(input *network.ConnectionMonitorEndpointFilter) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + var t network.ConnectionMonitorEndpointFilterType + if input.Type != "" { + t = input.Type + } + return []interface{}{ + map[string]interface{}{ + "item": flattenArmNetworkConnectionMonitorEndpointFilterItem(input.Items), + "type": t, + }, + } +} + +func flattenArmNetworkConnectionMonitorEndpointFilterItem(input *[]network.ConnectionMonitorEndpointFilterItem) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + for _, item := range *input { + var address string + if item.Address != nil { + address = *item.Address + } + + var t network.ConnectionMonitorEndpointFilterItemType + if item.Type != "" { + t = item.Type + } + + v := map[string]interface{}{ + "address": address, + "type": t, + } + + results = append(results, v) } - output := make(map[string]interface{}) + return results +} - if resourceID := input.ResourceID; resourceID != nil { - output["virtual_machine_id"] = *resourceID +func flattenArmNetworkConnectionMonitorTestConfiguration(input *[]network.ConnectionMonitorTestConfiguration) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results } - if port := input.Port; port != nil { - output["port"] = *port + + for _, item := range *input { + var name string + if item.Name != nil { + name = *item.Name + } + + var protocol network.ConnectionMonitorTestConfigurationProtocol + if item.Protocol != "" { + protocol = item.Protocol + } + + var preferredIpVersion network.PreferredIPVersion + if item.PreferredIPVersion != "" { + preferredIpVersion = item.PreferredIPVersion + } + + var testFrequencySec int32 + if item.TestFrequencySec != nil { + testFrequencySec = *item.TestFrequencySec + } + + v := map[string]interface{}{ + "name": name, + "protocol": protocol, + "http_configuration": flattenArmNetworkConnectionMonitorHTTPConfiguration(item.HTTPConfiguration), + "icmp_configuration": flattenArmNetworkConnectionMonitorIcmpConfiguration(item.IcmpConfiguration), + "preferred_ip_version": preferredIpVersion, + "success_threshold": flattenArmNetworkConnectionMonitorSuccessThreshold(item.SuccessThreshold), + "tcp_configuration": flattenArmNetworkConnectionMonitorTCPConfiguration(item.TCPConfiguration), + "test_frequency_in_seconds": testFrequencySec, + } + + results = append(results, v) } - return []interface{}{output} + return results } -func expandArmNetworkConnectionMonitorSource(d *schema.ResourceData) *network.ConnectionMonitorSource { - sources := d.Get("source").([]interface{}) - source := sources[0].(map[string]interface{}) +func flattenArmNetworkConnectionMonitorHTTPConfiguration(input *network.ConnectionMonitorHTTPConfiguration) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + var method network.HTTPConfigurationMethod + if input.Method != "" { + method = input.Method + } + + var p string + if input.Path != nil { + p = *input.Path + } + + var port int32 + if input.Port != nil { + port = *input.Port + } + + var preferHttps bool + if input.PreferHTTPS != nil { + preferHttps = *input.PreferHTTPS + } - monitorSource := network.ConnectionMonitorSource{} - if v := source["virtual_machine_id"]; v != "" { - monitorSource.ResourceID = utils.String(v.(string)) + return []interface{}{ + map[string]interface{}{ + "method": method, + "path": p, + "port": port, + "prefer_https": preferHttps, + "request_header": flattenArmNetworkConnectionMonitorHTTPHeader(input.RequestHeaders), + "valid_status_code_ranges": utils.FlattenStringSlice(input.ValidStatusCodeRanges), + }, } - if v := source["port"]; v != "" { - monitorSource.Port = utils.Int32(int32(v.(int))) +} + +func flattenArmNetworkConnectionMonitorIcmpConfiguration(input *network.ConnectionMonitorIcmpConfiguration) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + var enableTraceRoute bool + if input.DisableTraceRoute != nil { + enableTraceRoute = !*input.DisableTraceRoute } - return &monitorSource + return []interface{}{ + map[string]interface{}{ + "trace_route_enabled": enableTraceRoute, + }, + } } -func flattenArmNetworkConnectionMonitorDestination(input *network.ConnectionMonitorDestination) []interface{} { +func flattenArmNetworkConnectionMonitorSuccessThreshold(input *network.ConnectionMonitorSuccessThreshold) []interface{} { if input == nil { - return []interface{}{} + return make([]interface{}, 0) + } + + var checksFailedPercent int32 + if input.ChecksFailedPercent != nil { + checksFailedPercent = *input.ChecksFailedPercent + } + + var roundTripTimeMs float64 + if input.RoundTripTimeMs != nil { + roundTripTimeMs = *input.RoundTripTimeMs } - output := make(map[string]interface{}) + return []interface{}{ + map[string]interface{}{ + "checks_failed_percent": checksFailedPercent, + "round_trip_time_ms": roundTripTimeMs, + }, + } +} - // When monitoring a VM, the address field will contain the current address - // of the VM. We only want to copy over the address field if the virtual - // machine field is not set to avoid unwanted diffs. - if resourceID := input.ResourceID; resourceID != nil { - output["virtual_machine_id"] = *resourceID - } else if address := input.Address; address != nil { - output["address"] = *address +func flattenArmNetworkConnectionMonitorTCPConfiguration(input *network.ConnectionMonitorTCPConfiguration) []interface{} { + if input == nil { + return make([]interface{}, 0) } - if port := input.Port; port != nil { - output["port"] = *port + var enableTraceRoute bool + if input.DisableTraceRoute != nil { + enableTraceRoute = !*input.DisableTraceRoute } - return []interface{}{output} + var port int32 + if input.Port != nil { + port = *input.Port + } + + return []interface{}{ + map[string]interface{}{ + "trace_route_enabled": enableTraceRoute, + "port": port, + }, + } } -func expandArmNetworkConnectionMonitorDestination(d *schema.ResourceData) (*network.ConnectionMonitorDestination, error) { - dests := d.Get("destination").([]interface{}) - dest := dests[0].(map[string]interface{}) +func flattenArmNetworkConnectionMonitorHTTPHeader(input *[]network.HTTPHeader) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } - monitorDest := network.ConnectionMonitorDestination{} + for _, item := range *input { + var name string + if item.Name != nil { + name = *item.Name + } + + var value string + if item.Value != nil { + value = *item.Value + } - if v := dest["virtual_machine_id"]; v != "" { - monitorDest.ResourceID = utils.String(v.(string)) + v := map[string]interface{}{ + "name": name, + "value": value, + } + + results = append(results, v) } - if v := dest["address"]; v != "" { - monitorDest.Address = utils.String(v.(string)) + + return results +} + +func flattenArmNetworkConnectionMonitorTestGroup(input *[]network.ConnectionMonitorTestGroup) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + for _, item := range *input { + var name string + if item.Name != nil { + name = *item.Name + } + + var disable bool + if item.Disable != nil { + disable = *item.Disable + } + + v := map[string]interface{}{ + "name": name, + "destination_endpoints": utils.FlattenStringSlice(item.Destinations), + "source_endpoints": utils.FlattenStringSlice(item.Sources), + "test_configuration_names": utils.FlattenStringSlice(item.TestConfigurations), + "enabled": !disable, + } + + results = append(results, v) } - if v := dest["port"]; v != "" { - monitorDest.Port = utils.Int32(int32(v.(int))) + return results +} + +func flattenArmNetworkConnectionMonitorOutput(input *[]network.ConnectionMonitorOutput) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results } - if monitorDest.ResourceID == nil && monitorDest.Address == nil { - return nil, fmt.Errorf("Error: either `destination.virtual_machine_id` or `destination.address` must be specified") + for _, item := range *input { + var workspaceResourceId string + if item.WorkspaceSettings != nil && item.WorkspaceSettings.WorkspaceResourceID != nil { + workspaceResourceId = *item.WorkspaceSettings.WorkspaceResourceID + } + + results = append(results, workspaceResourceId) } - return &monitorDest, nil + return results } diff --git a/azurerm/internal/services/network/network_interface.go b/azurerm/internal/services/network/network_interface.go index a915aebbb92b..742021741dac 100644 --- a/azurerm/internal/services/network/network_interface.go +++ b/azurerm/internal/services/network/network_interface.go @@ -19,7 +19,7 @@ func parseFieldsFromNetworkInterface(input network.InterfacePropertiesFormat) ne networkSecurityGroupId = *input.NetworkSecurityGroup.ID } - var mapToSlice = func(input map[string]struct{}) []string { + mapToSlice := func(input map[string]struct{}) []string { output := make([]string, 0) for id := range input { diff --git a/azurerm/internal/services/network/network_packet_capture_resource.go b/azurerm/internal/services/network/network_packet_capture_resource.go index 24ce519d7ce2..d0e22a6d98ca 100644 --- a/azurerm/internal/services/network/network_packet_capture_resource.go +++ b/azurerm/internal/services/network/network_packet_capture_resource.go @@ -218,24 +218,24 @@ func resourceArmNetworkPacketCaptureRead(d *schema.ResourceData, meta interface{ ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetworkPacketCaptureID(d.Id()) + id, err := parse.PacketCaptureID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.WatcherName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetworkWatcherName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] Packet Capture %q (Watcher %q / Resource Group %q) %qw not found - removing from state", id.Name, id.WatcherName, id.ResourceGroup, id) + log.Printf("[WARN] Packet Capture %q (Watcher %q / Resource Group %q) %qw not found - removing from state", id.Name, id.NetworkWatcherName, id.ResourceGroup, id) d.SetId("") return nil } - return fmt.Errorf("Error reading Packet Capture %q (Watcher %q / Resource Group %q) %+v", id.Name, id.WatcherName, id.ResourceGroup, err) + return fmt.Errorf("Error reading Packet Capture %q (Watcher %q / Resource Group %q) %+v", id.Name, id.NetworkWatcherName, id.ResourceGroup, err) } d.Set("name", id.Name) - d.Set("network_watcher_name", id.WatcherName) + d.Set("network_watcher_name", id.NetworkWatcherName) d.Set("resource_group_name", id.ResourceGroup) if props := resp.PacketCaptureResultProperties; props != nil { @@ -263,18 +263,18 @@ func resourceArmNetworkPacketCaptureDelete(d *schema.ResourceData, meta interfac ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.NetworkPacketCaptureID(d.Id()) + id, err := parse.PacketCaptureID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.WatcherName, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.NetworkWatcherName, id.Name) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting Packet Capture %q (Watcher %q / Resource Group %q): %+v", id.Name, id.WatcherName, id.ResourceGroup, err) + return fmt.Errorf("Error deleting Packet Capture %q (Watcher %q / Resource Group %q): %+v", id.Name, id.NetworkWatcherName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { @@ -282,7 +282,7 @@ func resourceArmNetworkPacketCaptureDelete(d *schema.ResourceData, meta interfac return nil } - return fmt.Errorf("Error waiting for the deletion of Packet Capture %q (Watcher %q / Resource Group %q): %+v", id.Name, id.WatcherName, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for the deletion of Packet Capture %q (Watcher %q / Resource Group %q): %+v", id.Name, id.NetworkWatcherName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/network/network_security_rule_resource.go b/azurerm/internal/services/network/network_security_rule_resource.go index 8fabb7233a99..4a0076c64973 100644 --- a/azurerm/internal/services/network/network_security_rule_resource.go +++ b/azurerm/internal/services/network/network_security_rule_resource.go @@ -125,7 +125,7 @@ func resourceArmNetworkSecurityRule() *schema.Resource { // lintignore:S018 "source_application_security_group_ids": { Type: schema.TypeSet, - MaxItems: 1, + MaxItems: 10, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, Set: schema.HashString, @@ -134,7 +134,7 @@ func resourceArmNetworkSecurityRule() *schema.Resource { // lintignore:S018 "destination_application_security_group_ids": { Type: schema.TypeSet, - MaxItems: 1, + MaxItems: 10, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, Set: schema.HashString, diff --git a/azurerm/internal/services/network/network_watcher_resource.go b/azurerm/internal/services/network/network_watcher_resource.go index d652832eb797..fec85ba4ac46 100644 --- a/azurerm/internal/services/network/network_watcher_resource.go +++ b/azurerm/internal/services/network/network_watcher_resource.go @@ -10,6 +10,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -100,24 +101,22 @@ func resourceArmNetworkWatcherRead(d *schema.ResourceData, meta interface{}) err ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.NetworkWatcherID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["networkWatchers"] - resp, err := client.Get(ctx, resourceGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Network Watcher %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("Error making Read request on Network Watcher %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) + d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) } @@ -130,22 +129,20 @@ func resourceArmNetworkWatcherDelete(d *schema.ResourceData, meta interface{}) e ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.NetworkWatcherID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["networkWatchers"] - future, err := client.Delete(ctx, resourceGroup, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Network Watcher %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("Error deleting Network Watcher %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the deletion of Network Watcher %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("Error waiting for the deletion of Network Watcher %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/network/parse/bgp_connection.go b/azurerm/internal/services/network/parse/bgp_connection.go new file mode 100644 index 000000000000..ab2d44d28eae --- /dev/null +++ b/azurerm/internal/services/network/parse/bgp_connection.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type BgpConnectionId struct { + SubscriptionId string + ResourceGroup string + VirtualHubName string + Name string +} + +func NewBgpConnectionID(subscriptionId, resourceGroup, virtualHubName, name string) BgpConnectionId { + return BgpConnectionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualHubName: virtualHubName, + Name: name, + } +} + +func (id BgpConnectionId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Virtual Hub Name %q", id.VirtualHubName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Bgp Connection", segmentsStr) +} + +func (id BgpConnectionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualHubs/%s/bgpConnections/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualHubName, id.Name) +} + +// BgpConnectionID parses a BgpConnection ID into an BgpConnectionId struct +func BgpConnectionID(input string) (*BgpConnectionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := BgpConnectionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VirtualHubName, err = id.PopSegment("virtualHubs"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("bgpConnections"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/bgp_connection_test.go b/azurerm/internal/services/network/parse/bgp_connection_test.go new file mode 100644 index 000000000000..0ab480c70a6f --- /dev/null +++ b/azurerm/internal/services/network/parse/bgp_connection_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = BgpConnectionId{} + +func TestBgpConnectionIDFormatter(t *testing.T) { + actual := NewBgpConnectionID("12345678-1234-9876-4563-123456789012", "resGroup1", "virtualHub1", "connection1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/bgpConnections/connection1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestBgpConnectionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *BgpConnectionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/bgpConnections/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/bgpConnections/connection1", + Expected: &BgpConnectionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VirtualHubName: "virtualHub1", + Name: "connection1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1/BGPCONNECTIONS/CONNECTION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := BgpConnectionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VirtualHubName != v.Expected.VirtualHubName { + t.Fatalf("Expected %q but got %q for VirtualHubName", v.Expected.VirtualHubName, actual.VirtualHubName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/connection_monitor.go b/azurerm/internal/services/network/parse/connection_monitor.go new file mode 100644 index 000000000000..332e4d40e707 --- /dev/null +++ b/azurerm/internal/services/network/parse/connection_monitor.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ConnectionMonitorId struct { + SubscriptionId string + ResourceGroup string + NetworkWatcherName string + Name string +} + +func NewConnectionMonitorID(subscriptionId, resourceGroup, networkWatcherName, name string) ConnectionMonitorId { + return ConnectionMonitorId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NetworkWatcherName: networkWatcherName, + Name: name, + } +} + +func (id ConnectionMonitorId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Network Watcher Name %q", id.NetworkWatcherName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Connection Monitor", segmentsStr) +} + +func (id ConnectionMonitorId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/networkWatchers/%s/connectionMonitors/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NetworkWatcherName, id.Name) +} + +// ConnectionMonitorID parses a ConnectionMonitor ID into an ConnectionMonitorId struct +func ConnectionMonitorID(input string) (*ConnectionMonitorId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ConnectionMonitorId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NetworkWatcherName, err = id.PopSegment("networkWatchers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("connectionMonitors"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/connection_monitor_test.go b/azurerm/internal/services/network/parse/connection_monitor_test.go new file mode 100644 index 000000000000..cddeaf81573b --- /dev/null +++ b/azurerm/internal/services/network/parse/connection_monitor_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ConnectionMonitorId{} + +func TestConnectionMonitorIDFormatter(t *testing.T) { + actual := NewConnectionMonitorID("12345678-1234-9876-4563-123456789012", "resGroup1", "watcher1", "connectionMonitor1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/connectionMonitors/connectionMonitor1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestConnectionMonitorID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ConnectionMonitorId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NetworkWatcherName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for NetworkWatcherName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/connectionMonitors/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/connectionMonitors/connectionMonitor1", + Expected: &ConnectionMonitorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NetworkWatcherName: "watcher1", + Name: "connectionMonitor1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKWATCHERS/WATCHER1/CONNECTIONMONITORS/CONNECTIONMONITOR1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ConnectionMonitorID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NetworkWatcherName != v.Expected.NetworkWatcherName { + t.Fatalf("Expected %q but got %q for NetworkWatcherName", v.Expected.NetworkWatcherName, actual.NetworkWatcherName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/firewall_policy.go b/azurerm/internal/services/network/parse/firewall_policy.go deleted file mode 100644 index ec667c98cdeb..000000000000 --- a/azurerm/internal/services/network/parse/firewall_policy.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type FirewallPolicyId struct { - ResourceGroup string - Name string -} - -func FirewallPolicyID(input string) (*FirewallPolicyId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Firewall Policy ID %q: %+v", input, err) - } - - policy := FirewallPolicyId{ - ResourceGroup: id.ResourceGroup, - } - - if policy.Name, err = id.PopSegment("firewallPolicies"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &policy, nil -} diff --git a/azurerm/internal/services/network/parse/firewall_policy_test.go b/azurerm/internal/services/network/parse/firewall_policy_test.go deleted file mode 100644 index 563ae88de2e6..000000000000 --- a/azurerm/internal/services/network/parse/firewall_policy_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestFirewallPolicyID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *FirewallPolicyId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/", - Error: true, - }, - { - Name: "No Policy Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/firewallPolicies", - Error: true, - }, - { - Name: "Vulnerable segments", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/foo/bar/firewallPolicies/policy1", - Error: true, - }, - { - Name: "Missing starting slash", - Input: "subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/firewallPolicies/policy1", - Error: true, - }, - { - Name: "Correct Case", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/firewallPolicies/policy1", - Expect: &FirewallPolicyId{ - ResourceGroup: "group1", - Name: "policy1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := FirewallPolicyID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get") - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/hub_route_table.go b/azurerm/internal/services/network/parse/hub_route_table.go new file mode 100644 index 000000000000..d1e5228cf297 --- /dev/null +++ b/azurerm/internal/services/network/parse/hub_route_table.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type HubRouteTableId struct { + SubscriptionId string + ResourceGroup string + VirtualHubName string + Name string +} + +func NewHubRouteTableID(subscriptionId, resourceGroup, virtualHubName, name string) HubRouteTableId { + return HubRouteTableId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualHubName: virtualHubName, + Name: name, + } +} + +func (id HubRouteTableId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Virtual Hub Name %q", id.VirtualHubName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Hub Route Table", segmentsStr) +} + +func (id HubRouteTableId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualHubs/%s/hubRouteTables/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualHubName, id.Name) +} + +// HubRouteTableID parses a HubRouteTable ID into an HubRouteTableId struct +func HubRouteTableID(input string) (*HubRouteTableId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := HubRouteTableId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VirtualHubName, err = id.PopSegment("virtualHubs"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("hubRouteTables"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/hub_route_table_test.go b/azurerm/internal/services/network/parse/hub_route_table_test.go new file mode 100644 index 000000000000..eca6a9d0e4d0 --- /dev/null +++ b/azurerm/internal/services/network/parse/hub_route_table_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = HubRouteTableId{} + +func TestHubRouteTableIDFormatter(t *testing.T) { + actual := NewHubRouteTableID("12345678-1234-9876-4563-123456789012", "resGroup1", "virtualHub1", "routeTable1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubRouteTables/routeTable1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestHubRouteTableID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *HubRouteTableId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubRouteTables/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubRouteTables/routeTable1", + Expected: &HubRouteTableId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VirtualHubName: "virtualHub1", + Name: "routeTable1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1/HUBROUTETABLES/ROUTETABLE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HubRouteTableID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VirtualHubName != v.Expected.VirtualHubName { + t.Fatalf("Expected %q but got %q for VirtualHubName", v.Expected.VirtualHubName, actual.VirtualHubName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/hub_virtual_network_connection.go b/azurerm/internal/services/network/parse/hub_virtual_network_connection.go new file mode 100644 index 000000000000..392a02786af5 --- /dev/null +++ b/azurerm/internal/services/network/parse/hub_virtual_network_connection.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type HubVirtualNetworkConnectionId struct { + SubscriptionId string + ResourceGroup string + VirtualHubName string + Name string +} + +func NewHubVirtualNetworkConnectionID(subscriptionId, resourceGroup, virtualHubName, name string) HubVirtualNetworkConnectionId { + return HubVirtualNetworkConnectionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualHubName: virtualHubName, + Name: name, + } +} + +func (id HubVirtualNetworkConnectionId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Virtual Hub Name %q", id.VirtualHubName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Hub Virtual Network Connection", segmentsStr) +} + +func (id HubVirtualNetworkConnectionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualHubs/%s/hubVirtualNetworkConnections/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualHubName, id.Name) +} + +// HubVirtualNetworkConnectionID parses a HubVirtualNetworkConnection ID into an HubVirtualNetworkConnectionId struct +func HubVirtualNetworkConnectionID(input string) (*HubVirtualNetworkConnectionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := HubVirtualNetworkConnectionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VirtualHubName, err = id.PopSegment("virtualHubs"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("hubVirtualNetworkConnections"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/hub_virtual_network_connection_test.go b/azurerm/internal/services/network/parse/hub_virtual_network_connection_test.go new file mode 100644 index 000000000000..87e8d6f19743 --- /dev/null +++ b/azurerm/internal/services/network/parse/hub_virtual_network_connection_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = HubVirtualNetworkConnectionId{} + +func TestHubVirtualNetworkConnectionIDFormatter(t *testing.T) { + actual := NewHubVirtualNetworkConnectionID("12345678-1234-9876-4563-123456789012", "resGroup1", "virtualHub1", "hubConnection1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubVirtualNetworkConnections/hubConnection1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestHubVirtualNetworkConnectionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *HubVirtualNetworkConnectionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubVirtualNetworkConnections/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubVirtualNetworkConnections/hubConnection1", + Expected: &HubVirtualNetworkConnectionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VirtualHubName: "virtualHub1", + Name: "hubConnection1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1/HUBVIRTUALNETWORKCONNECTIONS/HUBCONNECTION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HubVirtualNetworkConnectionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VirtualHubName != v.Expected.VirtualHubName { + t.Fatalf("Expected %q but got %q for VirtualHubName", v.Expected.VirtualHubName, actual.VirtualHubName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/ip_group.go b/azurerm/internal/services/network/parse/ip_group.go new file mode 100644 index 000000000000..8fdb6f1f9190 --- /dev/null +++ b/azurerm/internal/services/network/parse/ip_group.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type IpGroupId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewIpGroupID(subscriptionId, resourceGroup, name string) IpGroupId { + return IpGroupId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id IpGroupId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Ip Group", segmentsStr) +} + +func (id IpGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/ipGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// IpGroupID parses a IpGroup ID into an IpGroupId struct +func IpGroupID(input string) (*IpGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := IpGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("ipGroups"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/ip_group_test.go b/azurerm/internal/services/network/parse/ip_group_test.go new file mode 100644 index 000000000000..8e4144d78e8e --- /dev/null +++ b/azurerm/internal/services/network/parse/ip_group_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = IpGroupId{} + +func TestIpGroupIDFormatter(t *testing.T) { + actual := NewIpGroupID("12345678-1234-9876-4563-123456789012", "resGroup1", "group1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/ipGroups/group1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestIpGroupID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *IpGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/ipGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/ipGroups/group1", + Expected: &IpGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "group1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/IPGROUPS/GROUP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := IpGroupID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/load_balancer.go b/azurerm/internal/services/network/parse/load_balancer.go deleted file mode 100644 index 930c9b0f9aed..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer.go +++ /dev/null @@ -1,44 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerId struct { - ResourceGroup string - Name string -} - -func NewLoadBalancerID(resourceGroup, name string) LoadBalancerId { - return LoadBalancerId{ - Name: name, - ResourceGroup: resourceGroup, - } -} - -func (id LoadBalancerId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadBalancers/%s", subscriptionId, id.ResourceGroup, id.Name) -} - -func LoadBalancerID(input string) (*LoadBalancerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer ID %q: %+v", input, err) - } - - loadBalancer := LoadBalancerId{ - ResourceGroup: id.ResourceGroup, - } - - if loadBalancer.Name, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &loadBalancer, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_backend_address_pool.go b/azurerm/internal/services/network/parse/load_balancer_backend_address_pool.go deleted file mode 100644 index ea19aa0b8de3..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_backend_address_pool.go +++ /dev/null @@ -1,51 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerBackendAddressPoolId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func (id LoadBalancerBackendAddressPoolId) ID(subscriptionId string) string { - baseId := NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName).ID(subscriptionId) - return fmt.Sprintf("%s/backendAddressPools/%s", baseId, id.Name) -} - -func NewLoadBalancerBackendAddressPoolId(loadBalancerId LoadBalancerId, name string) LoadBalancerBackendAddressPoolId { - return LoadBalancerBackendAddressPoolId{ - ResourceGroup: loadBalancerId.ResourceGroup, - LoadBalancerName: loadBalancerId.Name, - Name: name, - } -} - -func LoadBalancerBackendAddressPoolID(input string) (*LoadBalancerBackendAddressPoolId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Backend Address Pool ID %q: %+v", input, err) - } - - backendAddressPoolId := LoadBalancerBackendAddressPoolId{ - ResourceGroup: id.ResourceGroup, - } - - if backendAddressPoolId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if backendAddressPoolId.Name, err = id.PopSegment("backendAddressPools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &backendAddressPoolId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_backend_address_pool_test.go b/azurerm/internal/services/network/parse/load_balancer_backend_address_pool_test.go deleted file mode 100644 index f3a1ec50fd10..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_backend_address_pool_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = LoadBalancerBackendAddressPoolId{} - -func TestLoadBalancerBackendAddressPoolIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - loadBalancerId := NewLoadBalancerID("group1", "lb1") - actual := NewLoadBalancerBackendAddressPoolId(loadBalancerId, "pool1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/backendAddressPools/pool1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestLoadBalancerBackendAddressPoolIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerBackendAddressPoolId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // lower-case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/backendaddresspools/pool1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/backendAddressPools/pool1", - expected: &LoadBalancerBackendAddressPoolId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "pool1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Backendaddresspools/pool1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1/BackendAddressPools/pool1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerBackendAddressPoolID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_frontend_ip_configuration.go b/azurerm/internal/services/network/parse/load_balancer_frontend_ip_configuration.go deleted file mode 100644 index 7432e17df50f..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_frontend_ip_configuration.go +++ /dev/null @@ -1,51 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerFrontendIPConfigurationId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func NewLoadBalancerFrontendIPConfigurationId(loadBalancer LoadBalancerId, name string) LoadBalancerFrontendIPConfigurationId { - return LoadBalancerFrontendIPConfigurationId{ - ResourceGroup: loadBalancer.ResourceGroup, - LoadBalancerName: loadBalancer.Name, - Name: name, - } -} - -func (id LoadBalancerFrontendIPConfigurationId) ID(subscriptionId string) string { - baseId := NewLoadBalancerID(id.ResourceGroup, id.LoadBalancerName).ID(subscriptionId) - return fmt.Sprintf("%s/frontendIPConfigurations/%s", baseId, id.Name) -} - -func LoadBalancerFrontendIPConfigurationID(input string) (*LoadBalancerFrontendIPConfigurationId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Frontend IP Configuration ID %q: %+v", input, err) - } - - frontendIPConfigurationId := LoadBalancerFrontendIPConfigurationId{ - ResourceGroup: id.ResourceGroup, - } - - if frontendIPConfigurationId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if frontendIPConfigurationId.Name, err = id.PopSegment("frontendIPConfigurations"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &frontendIPConfigurationId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_frontend_ip_configuration_test.go b/azurerm/internal/services/network/parse/load_balancer_frontend_ip_configuration_test.go deleted file mode 100644 index 5f9a9ca5fc18..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_frontend_ip_configuration_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = LoadBalancerFrontendIPConfigurationId{} - -func TestLoadBalancerFrontendIPConfigurationIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - loadBalancerId := NewLoadBalancerID("group1", "lb1") - actual := NewLoadBalancerFrontendIPConfigurationId(loadBalancerId, "config1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/frontendIPConfigurations/config1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestLoadBalancerFrontendIPConfigurationIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerFrontendIPConfigurationId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // lower-case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/frontendipconfigurations/config1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/frontendIPConfigurations/config1", - expected: &LoadBalancerFrontendIPConfigurationId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "config1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Frontendipconfigurations/config1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1/FrontendIPConfigurations/config1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerFrontendIPConfigurationID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_pool.go b/azurerm/internal/services/network/parse/load_balancer_inbound_nat_pool.go deleted file mode 100644 index 993f454fb445..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_pool.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerInboundNATPoolId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func LoadBalancerInboundNATPoolID(input string) (*LoadBalancerInboundNATPoolId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Inbound NAT Pool ID %q: %+v", input, err) - } - - natPoolId := LoadBalancerInboundNATPoolId{ - ResourceGroup: id.ResourceGroup, - } - - if natPoolId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if natPoolId.Name, err = id.PopSegment("inboundNatPools"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &natPoolId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_pool_test.go b/azurerm/internal/services/network/parse/load_balancer_inbound_nat_pool_test.go deleted file mode 100644 index 7d10d8ac6a90..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_pool_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package parse - -import "testing" - -func TestLoadBalancerInboundNATPoolIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerInboundNATPoolId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // lower-case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/inboundnatpools/pool1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/inboundNatPools/pool1", - expected: &LoadBalancerInboundNATPoolId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "pool1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Inboundnatpools/pool1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1/InboundNatPools/pool1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerInboundNATPoolID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_rule.go b/azurerm/internal/services/network/parse/load_balancer_inbound_nat_rule.go deleted file mode 100644 index c725b76be651..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_rule.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerInboundNATRuleId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func LoadBalancerInboundNATRuleID(input string) (*LoadBalancerInboundNATRuleId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Inbound NAT Rule ID %q: %+v", input, err) - } - - natRuleId := LoadBalancerInboundNATRuleId{ - ResourceGroup: id.ResourceGroup, - } - - if natRuleId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if natRuleId.Name, err = id.PopSegment("inboundNatRules"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &natRuleId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_rule_test.go b/azurerm/internal/services/network/parse/load_balancer_inbound_nat_rule_test.go deleted file mode 100644 index 67464a34062c..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_inbound_nat_rule_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package parse - -import "testing" - -func TestLoadBalancerInboundNATRuleIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerInboundNATRuleId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // lower-case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/inboundnatrules/rule1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/inboundNatRules/rule1", - expected: &LoadBalancerInboundNATRuleId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "rule1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Inboundnatrules/rule1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1/InboundNatRules/rule1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerInboundNATRuleID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_outbound_rule.go b/azurerm/internal/services/network/parse/load_balancer_outbound_rule.go deleted file mode 100644 index 945557763c5a..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_outbound_rule.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerOutboundRuleId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func LoadBalancerOutboundRuleID(input string) (*LoadBalancerOutboundRuleId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Outbound Rule ID %q: %+v", input, err) - } - - outboundRuleId := LoadBalancerOutboundRuleId{ - ResourceGroup: id.ResourceGroup, - } - - if outboundRuleId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if outboundRuleId.Name, err = id.PopSegment("outboundRules"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &outboundRuleId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_outbound_rule_test.go b/azurerm/internal/services/network/parse/load_balancer_outbound_rule_test.go deleted file mode 100644 index 58baca3a4ab6..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_outbound_rule_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package parse - -import "testing" - -func TestLoadBalancerOutboundRuleIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerOutboundRuleId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // lower-case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/outboundrules/rule1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/outboundRules/rule1", - expected: &LoadBalancerOutboundRuleId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "rule1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Outboundrules/rule1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1/OutboundRules/rule1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerOutboundRuleID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_probe.go b/azurerm/internal/services/network/parse/load_balancer_probe.go deleted file mode 100644 index d67c2b1996c2..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_probe.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerProbeId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func LoadBalancerProbeID(input string) (*LoadBalancerProbeId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Probe ID %q: %+v", input, err) - } - - probeId := LoadBalancerProbeId{ - ResourceGroup: id.ResourceGroup, - } - - if probeId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if probeId.Name, err = id.PopSegment("probes"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &probeId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_probe_test.go b/azurerm/internal/services/network/parse/load_balancer_probe_test.go deleted file mode 100644 index 9f162cb2d51d..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_probe_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package parse - -import "testing" - -func TestLoadBalancerProbeIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerProbeId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/probes/probe1", - expected: &LoadBalancerProbeId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "probe1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Probes/probe1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerProbeID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_rule.go b/azurerm/internal/services/network/parse/load_balancer_rule.go deleted file mode 100644 index 187c4870af8c..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_rule.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type LoadBalancerRuleId struct { - ResourceGroup string - LoadBalancerName string - Name string -} - -func LoadBalancerRuleID(input string) (*LoadBalancerRuleId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Load Balancer Rule ID %q: %+v", input, err) - } - - ruleId := LoadBalancerRuleId{ - ResourceGroup: id.ResourceGroup, - } - - if ruleId.LoadBalancerName, err = id.PopSegment("loadBalancers"); err != nil { - return nil, err - } - - if ruleId.Name, err = id.PopSegment("loadBalancingRules"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &ruleId, nil -} diff --git a/azurerm/internal/services/network/parse/load_balancer_rule_test.go b/azurerm/internal/services/network/parse/load_balancer_rule_test.go deleted file mode 100644 index f435b88c7f4b..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_rule_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package parse - -import "testing" - -func TestLoadBalancerRuleIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerRuleId - }{ - { - // load balancer id - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: nil, - }, - { - // lower-case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/loadbalancingrules/rule1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1/loadBalancingRules/rule1", - expected: &LoadBalancerRuleId{ - ResourceGroup: "group1", - LoadBalancerName: "lb1", - Name: "rule1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1/Loadbalancingrules/rule1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1/LoadBalancingRules/rule1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerRuleID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.LoadBalancerName != test.expected.LoadBalancerName { - t.Fatalf("Expected LoadBalancerName to be %q but was %q", test.expected.LoadBalancerName, actual.LoadBalancerName) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/load_balancer_test.go b/azurerm/internal/services/network/parse/load_balancer_test.go deleted file mode 100644 index ba2820aab493..000000000000 --- a/azurerm/internal/services/network/parse/load_balancer_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = LoadBalancerId{} - -func TestLoadBalancerIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewLoadBalancerID("group1", "lb1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestLoadBalancerIDParser(t *testing.T) { - testData := []struct { - input string - expected *LoadBalancerId - }{ - { - // lower case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadbalancers/lb1", - expected: nil, - }, - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/loadBalancers/lb1", - expected: &LoadBalancerId{ - ResourceGroup: "group1", - Name: "lb1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/Loadbalancers/lb1", - expected: nil, - }, - { - // pascal case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/LoadBalancers/lb1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := LoadBalancerID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/network/parse/nat_gateway.go b/azurerm/internal/services/network/parse/nat_gateway.go index c730d001f34d..923e136cdd7e 100644 --- a/azurerm/internal/services/network/parse/nat_gateway.go +++ b/azurerm/internal/services/network/parse/nat_gateway.go @@ -1,25 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( + "fmt" + "strings" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type NatGatewayId struct { - Name string - ResourceGroup string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewNatGatewayID(subscriptionId, resourceGroup, name string) NatGatewayId { + return NatGatewayId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id NatGatewayId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Nat Gateway", segmentsStr) +} + +func (id NatGatewayId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/natGateways/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } +// NatGatewayID parses a NatGateway ID into an NatGatewayId struct func NatGatewayID(input string) (*NatGatewayId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { return nil, err } - natGateway := NatGatewayId{ - ResourceGroup: id.ResourceGroup, + resourceId := NatGatewayId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if natGateway.Name, err = id.PopSegment("natGateways"); err != nil { + if resourceId.Name, err = id.PopSegment("natGateways"); err != nil { return nil, err } @@ -27,5 +65,5 @@ func NatGatewayID(input string) (*NatGatewayId, error) { return nil, err } - return &natGateway, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/nat_gateway_public_ip_association.go b/azurerm/internal/services/network/parse/nat_gateway_public_ip_association.go index 5ea4f18fa583..802ad3cce017 100644 --- a/azurerm/internal/services/network/parse/nat_gateway_public_ip_association.go +++ b/azurerm/internal/services/network/parse/nat_gateway_public_ip_association.go @@ -23,7 +23,7 @@ func NatGatewayPublicIPAddressAssociationID(input string) (*NatGatewayPublicIPAd // whilst we need the Resource ID, we may as well validate it publicIPAddress := segments[1] - if _, err := PublicIPAddressID(publicIPAddress); err != nil { + if _, err := PublicIpAddressID(publicIPAddress); err != nil { return nil, fmt.Errorf("parsing Public IP Address ID %q: %+v", publicIPAddress, err) } diff --git a/azurerm/internal/services/network/parse/nat_gateway_test.go b/azurerm/internal/services/network/parse/nat_gateway_test.go index d93cb021cf27..aff93ee99a28 100644 --- a/azurerm/internal/services/network/parse/nat_gateway_test.go +++ b/azurerm/internal/services/network/parse/nat_gateway_test.go @@ -1,54 +1,91 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = NatGatewayId{} + +func TestNatGatewayIDFormatter(t *testing.T) { + actual := NewNatGatewayID("12345678-1234-9876-4563-123456789012", "resGroup1", "gateway1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/natGateways/gateway1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestNatGatewayID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *NatGatewayId + Input string + Error bool + Expected *NatGatewayId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Nat Gateway Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/natGateways", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Nat Gateway ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/natGateways/gateway1", - Error: false, - Expect: &NatGatewayId{ - Name: "gateway1", - ResourceGroup: "group1", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/natGateways/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/natGateways/gateway1", + Expected: &NatGatewayId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "gateway1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NATGATEWAYS/GATEWAY1", + Error: true, + }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := NatGatewayID(v.Input) if err != nil { @@ -56,15 +93,20 @@ func TestNatGatewayID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/parse/network_interface.go b/azurerm/internal/services/network/parse/network_interface.go index 4f254635f8b6..e36bd497392b 100644 --- a/azurerm/internal/services/network/parse/network_interface.go +++ b/azurerm/internal/services/network/parse/network_interface.go @@ -1,27 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type NetworkInterfaceId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewNetworkInterfaceID(subscriptionId, resourceGroup, name string) NetworkInterfaceId { + return NetworkInterfaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id NetworkInterfaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Network Interface", segmentsStr) } +func (id NetworkInterfaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/networkInterfaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// NetworkInterfaceID parses a NetworkInterface ID into an NetworkInterfaceId struct func NetworkInterfaceID(input string) (*NetworkInterfaceId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Network Interface ID %q: %+v", input, err) + return nil, err + } + + resourceId := NetworkInterfaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - server := NetworkInterfaceId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if server.Name, err = id.PopSegment("networkInterfaces"); err != nil { + if resourceId.Name, err = id.PopSegment("networkInterfaces"); err != nil { return nil, err } @@ -29,5 +65,5 @@ func NetworkInterfaceID(input string) (*NetworkInterfaceId, error) { return nil, err } - return &server, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/network_interface_test.go b/azurerm/internal/services/network/parse/network_interface_test.go index 8182e110c2e2..dbe0bbd07312 100644 --- a/azurerm/internal/services/network/parse/network_interface_test.go +++ b/azurerm/internal/services/network/parse/network_interface_test.go @@ -1,59 +1,91 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = NetworkInterfaceId{} + +func TestNetworkInterfaceIDFormatter(t *testing.T) { + actual := NewNetworkInterfaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "networkInterface1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/networkInterface1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestNetworkInterfaceID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *NetworkInterfaceId + Input string + Error bool + Expected *NetworkInterfaceId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", Error: true, }, + { - Name: "Missing Network Interfaces Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/", Error: true, }, + { - Name: "Network Interface ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/interface1", - Error: false, - Expect: &NetworkInterfaceId{ - ResourceGroup: "resGroup1", - Name: "interface1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/networkInterface1", + Expected: &NetworkInterfaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "networkInterface1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/NetworkInterfaces/set1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKINTERFACES/NETWORKINTERFACE1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := NetworkInterfaceID(v.Input) if err != nil { @@ -61,15 +93,20 @@ func TestNetworkInterfaceID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/parse/network_packet_capture.go b/azurerm/internal/services/network/parse/network_packet_capture.go deleted file mode 100644 index fdf0f26b3e25..000000000000 --- a/azurerm/internal/services/network/parse/network_packet_capture.go +++ /dev/null @@ -1,34 +0,0 @@ -package parse - -import "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - -type NetworkPacketCaptureId struct { - ResourceGroup string - WatcherName string - Name string -} - -func NetworkPacketCaptureID(input string) (*NetworkPacketCaptureId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - packetCapture := NetworkPacketCaptureId{ - ResourceGroup: id.ResourceGroup, - } - - if packetCapture.WatcherName, err = id.PopSegment("networkWatchers"); err != nil { - return nil, err - } - - if packetCapture.Name, err = id.PopSegment("packetCaptures"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &packetCapture, nil -} diff --git a/azurerm/internal/services/network/parse/network_packet_capture_test.go b/azurerm/internal/services/network/parse/network_packet_capture_test.go deleted file mode 100644 index c9d5d8907cf3..000000000000 --- a/azurerm/internal/services/network/parse/network_packet_capture_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestNetworkPacketCaptureID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *NetworkPacketCaptureId - }{ - { - Name: "Empty", - Input: "", - Error: true, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Error: true, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Error: true, - }, - { - Name: "Missing Network Watcher Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/networkWatchers/", - Error: true, - }, - { - Name: "Missing Network Watcher Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/networkWatchers/watcher1", - Error: true, - }, - { - Name: "Missing Network Packet Capture Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures", - Error: true, - }, - { - Name: "Namespace Network Packet Capture Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/packetCapture1", - Error: false, - Expect: &NetworkPacketCaptureId{ - ResourceGroup: "group1", - WatcherName: "watcher1", - Name: "packetCapture1", - }, - }, - { - Name: "Wrong Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/networkWatchers/watcher1/NetworkPacketCaptures/packetCapture1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := NetworkPacketCaptureID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - - if actual.WatcherName != v.Expect.WatcherName { - t.Fatalf("Expected %q but got %q for Name", v.Expect.WatcherName, actual.WatcherName) - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/network/parse/network_watcher.go b/azurerm/internal/services/network/parse/network_watcher.go new file mode 100644 index 000000000000..920c71842d94 --- /dev/null +++ b/azurerm/internal/services/network/parse/network_watcher.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type NetworkWatcherId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewNetworkWatcherID(subscriptionId, resourceGroup, name string) NetworkWatcherId { + return NetworkWatcherId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id NetworkWatcherId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Network Watcher", segmentsStr) +} + +func (id NetworkWatcherId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/networkWatchers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// NetworkWatcherID parses a NetworkWatcher ID into an NetworkWatcherId struct +func NetworkWatcherID(input string) (*NetworkWatcherId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := NetworkWatcherId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("networkWatchers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/network_watcher_test.go b/azurerm/internal/services/network/parse/network_watcher_test.go new file mode 100644 index 000000000000..0297102f4c48 --- /dev/null +++ b/azurerm/internal/services/network/parse/network_watcher_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NetworkWatcherId{} + +func TestNetworkWatcherIDFormatter(t *testing.T) { + actual := NewNetworkWatcherID("12345678-1234-9876-4563-123456789012", "resGroup1", "watcher1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestNetworkWatcherID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *NetworkWatcherId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1", + Expected: &NetworkWatcherId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "watcher1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKWATCHERS/WATCHER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := NetworkWatcherID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/packet_capture.go b/azurerm/internal/services/network/parse/packet_capture.go new file mode 100644 index 000000000000..7dfb43f38138 --- /dev/null +++ b/azurerm/internal/services/network/parse/packet_capture.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PacketCaptureId struct { + SubscriptionId string + ResourceGroup string + NetworkWatcherName string + Name string +} + +func NewPacketCaptureID(subscriptionId, resourceGroup, networkWatcherName, name string) PacketCaptureId { + return PacketCaptureId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NetworkWatcherName: networkWatcherName, + Name: name, + } +} + +func (id PacketCaptureId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Network Watcher Name %q", id.NetworkWatcherName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Packet Capture", segmentsStr) +} + +func (id PacketCaptureId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/networkWatchers/%s/packetCaptures/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NetworkWatcherName, id.Name) +} + +// PacketCaptureID parses a PacketCapture ID into an PacketCaptureId struct +func PacketCaptureID(input string) (*PacketCaptureId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PacketCaptureId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NetworkWatcherName, err = id.PopSegment("networkWatchers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("packetCaptures"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/packet_capture_test.go b/azurerm/internal/services/network/parse/packet_capture_test.go new file mode 100644 index 000000000000..d6ac3c7f4555 --- /dev/null +++ b/azurerm/internal/services/network/parse/packet_capture_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PacketCaptureId{} + +func TestPacketCaptureIDFormatter(t *testing.T) { + actual := NewPacketCaptureID("12345678-1234-9876-4563-123456789012", "resGroup1", "watcher1", "capture1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/capture1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPacketCaptureID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PacketCaptureId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NetworkWatcherName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for NetworkWatcherName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/capture1", + Expected: &PacketCaptureId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NetworkWatcherName: "watcher1", + Name: "capture1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKWATCHERS/WATCHER1/PACKETCAPTURES/CAPTURE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PacketCaptureID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NetworkWatcherName != v.Expected.NetworkWatcherName { + t.Fatalf("Expected %q but got %q for NetworkWatcherName", v.Expected.NetworkWatcherName, actual.NetworkWatcherName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/private_dns_zone_config.go b/azurerm/internal/services/network/parse/private_dns_zone_config.go new file mode 100644 index 000000000000..9cf6b1413501 --- /dev/null +++ b/azurerm/internal/services/network/parse/private_dns_zone_config.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PrivateDnsZoneConfigId struct { + SubscriptionId string + ResourceGroup string + PrivateEndpointName string + PrivateDnsZoneGroupName string + Name string +} + +func NewPrivateDnsZoneConfigID(subscriptionId, resourceGroup, privateEndpointName, privateDnsZoneGroupName, name string) PrivateDnsZoneConfigId { + return PrivateDnsZoneConfigId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateEndpointName: privateEndpointName, + PrivateDnsZoneGroupName: privateDnsZoneGroupName, + Name: name, + } +} + +func (id PrivateDnsZoneConfigId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Private Dns Zone Group Name %q", id.PrivateDnsZoneGroupName), + fmt.Sprintf("Private Endpoint Name %q", id.PrivateEndpointName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Private Dns Zone Config", segmentsStr) +} + +func (id PrivateDnsZoneConfigId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateEndpoints/%s/privateDnsZoneGroups/%s/privateDnsZoneConfigs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateEndpointName, id.PrivateDnsZoneGroupName, id.Name) +} + +// PrivateDnsZoneConfigID parses a PrivateDnsZoneConfig ID into an PrivateDnsZoneConfigId struct +func PrivateDnsZoneConfigID(input string) (*PrivateDnsZoneConfigId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PrivateDnsZoneConfigId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateEndpointName, err = id.PopSegment("privateEndpoints"); err != nil { + return nil, err + } + if resourceId.PrivateDnsZoneGroupName, err = id.PopSegment("privateDnsZoneGroups"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("privateDnsZoneConfigs"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/private_dns_zone_config_test.go b/azurerm/internal/services/network/parse/private_dns_zone_config_test.go new file mode 100644 index 000000000000..c211ea1b9481 --- /dev/null +++ b/azurerm/internal/services/network/parse/private_dns_zone_config_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PrivateDnsZoneConfigId{} + +func TestPrivateDnsZoneConfigIDFormatter(t *testing.T) { + actual := NewPrivateDnsZoneConfigID("12345678-1234-9876-4563-123456789012", "resGroup1", "endpoint1", "privateDnsZoneGroup1", "privateDnsZoneConfig1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/privateDnsZoneConfigs/privateDnsZoneConfig1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPrivateDnsZoneConfigID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PrivateDnsZoneConfigId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/", + Error: true, + }, + + { + // missing PrivateDnsZoneGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/privateDnsZoneConfigs/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/privateDnsZoneConfigs/privateDnsZoneConfig1", + Expected: &PrivateDnsZoneConfigId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateEndpointName: "endpoint1", + PrivateDnsZoneGroupName: "privateDnsZoneGroup1", + Name: "privateDnsZoneConfig1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEENDPOINTS/ENDPOINT1/PRIVATEDNSZONEGROUPS/PRIVATEDNSZONEGROUP1/PRIVATEDNSZONECONFIGS/PRIVATEDNSZONECONFIG1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PrivateDnsZoneConfigID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateEndpointName != v.Expected.PrivateEndpointName { + t.Fatalf("Expected %q but got %q for PrivateEndpointName", v.Expected.PrivateEndpointName, actual.PrivateEndpointName) + } + if actual.PrivateDnsZoneGroupName != v.Expected.PrivateDnsZoneGroupName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneGroupName", v.Expected.PrivateDnsZoneGroupName, actual.PrivateDnsZoneGroupName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/private_dns_zone_group.go b/azurerm/internal/services/network/parse/private_dns_zone_group.go new file mode 100644 index 000000000000..690c63f24efe --- /dev/null +++ b/azurerm/internal/services/network/parse/private_dns_zone_group.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PrivateDnsZoneGroupId struct { + SubscriptionId string + ResourceGroup string + PrivateEndpointName string + Name string +} + +func NewPrivateDnsZoneGroupID(subscriptionId, resourceGroup, privateEndpointName, name string) PrivateDnsZoneGroupId { + return PrivateDnsZoneGroupId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateEndpointName: privateEndpointName, + Name: name, + } +} + +func (id PrivateDnsZoneGroupId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Private Endpoint Name %q", id.PrivateEndpointName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Private Dns Zone Group", segmentsStr) +} + +func (id PrivateDnsZoneGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateEndpoints/%s/privateDnsZoneGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateEndpointName, id.Name) +} + +// PrivateDnsZoneGroupID parses a PrivateDnsZoneGroup ID into an PrivateDnsZoneGroupId struct +func PrivateDnsZoneGroupID(input string) (*PrivateDnsZoneGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PrivateDnsZoneGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateEndpointName, err = id.PopSegment("privateEndpoints"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("privateDnsZoneGroups"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/private_dns_zone_group_test.go b/azurerm/internal/services/network/parse/private_dns_zone_group_test.go new file mode 100644 index 000000000000..07a72ed1fa2c --- /dev/null +++ b/azurerm/internal/services/network/parse/private_dns_zone_group_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PrivateDnsZoneGroupId{} + +func TestPrivateDnsZoneGroupIDFormatter(t *testing.T) { + actual := NewPrivateDnsZoneGroupID("12345678-1234-9876-4563-123456789012", "resGroup1", "endpoint1", "privateDnsZoneGroup1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPrivateDnsZoneGroupID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PrivateDnsZoneGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1", + Expected: &PrivateDnsZoneGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateEndpointName: "endpoint1", + Name: "privateDnsZoneGroup1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEENDPOINTS/ENDPOINT1/PRIVATEDNSZONEGROUPS/PRIVATEDNSZONEGROUP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PrivateDnsZoneGroupID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateEndpointName != v.Expected.PrivateEndpointName { + t.Fatalf("Expected %q but got %q for PrivateEndpointName", v.Expected.PrivateEndpointName, actual.PrivateEndpointName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/private_endpoint.go b/azurerm/internal/services/network/parse/private_endpoint.go index 70650cb9a3de..72dc4e436b70 100644 --- a/azurerm/internal/services/network/parse/private_endpoint.go +++ b/azurerm/internal/services/network/parse/private_endpoint.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" "strings" @@ -7,111 +9,61 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) -type NameResourceGroup struct { - ResourceGroup string - Name string - ID string +type PrivateEndpointId struct { + SubscriptionId string + ResourceGroup string + Name string } -func PrivateDnsZoneGroupResourceID(input string) (*NameResourceGroup, error) { - if len(strings.TrimSpace(input)) == 0 { - return nil, fmt.Errorf("unable to parse Private DNS Zone Group ID %q: input is empty", input) - } - - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse Private DNS Zone Group ID %q: %+v", input, err) +func NewPrivateEndpointID(subscriptionId, resourceGroup, name string) PrivateEndpointId { + return PrivateEndpointId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } - - privateDnsZoneGroup := NameResourceGroup{ - ResourceGroup: id.ResourceGroup, - } - - if privateDnsZoneGroup.Name, err = id.PopSegment("privateDnsZoneGroups"); err != nil { - return nil, err - } - - if privateDnsZoneGroup.ID = input; err != nil { - return nil, err - } - - return &privateDnsZoneGroup, nil } -func PrivateDnsZoneResourceIDs(input []interface{}) (*[]NameResourceGroup, error) { - results := make([]NameResourceGroup, 0) - - for _, item := range input { - v := item.(string) - - if privateDnsZone, err := PrivateDnsZoneResourceID(v); err != nil { - return nil, fmt.Errorf("unable to parse Private DNS Zone ID %q: %+v", v, err) - } else { - results = append(results, *privateDnsZone) - } +func (id PrivateEndpointId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } - - return &results, nil + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Private Endpoint", segmentsStr) } -func PrivateDnsZoneResourceID(input string) (*NameResourceGroup, error) { - if len(strings.TrimSpace(input)) == 0 { - return nil, fmt.Errorf("unable to parse Private DNS Zone ID %q: input is empty", input) - } +func (id PrivateEndpointId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateEndpoints/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} +// PrivateEndpointID parses a PrivateEndpoint ID into an PrivateEndpointId struct +func PrivateEndpointID(input string) (*PrivateEndpointId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("unable to parse Private DNS Zone ID %q: %+v", input, err) - } - - privateDnsZone := NameResourceGroup{ - ResourceGroup: id.ResourceGroup, - } - - if privateDnsZone.Name, err = id.PopSegment("privateDnsZones"); err != nil { return nil, err } - if privateDnsZone.ID = input; err != nil { - return nil, err + resourceId := PrivateEndpointId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - return &privateDnsZone, nil -} - -func PrivateEndpointResourceID(input string) (*NameResourceGroup, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse Private Endpoint ID %q: %+v", input, err) + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - privateEndpoint := NameResourceGroup{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if privateEndpoint.Name, err = id.PopSegment("privateEndpoints"); err != nil { + if resourceId.Name, err = id.PopSegment("privateEndpoints"); err != nil { return nil, err } - if privateEndpoint.ID = input; err != nil { + if err := id.ValidateNoEmptySegments(input); err != nil { return nil, err } - return &privateEndpoint, nil -} - -func ValidatePrivateDnsZoneResourceID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if id, err := azure.ParseAzureResourceID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - } else if _, err = id.PopSegment("privateDnsZones"); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a private dns zone resource id: %v", k, err)) - } - - return warnings, errors + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/private_endpoint_test.go b/azurerm/internal/services/network/parse/private_endpoint_test.go new file mode 100644 index 000000000000..1bb66c2f18bf --- /dev/null +++ b/azurerm/internal/services/network/parse/private_endpoint_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PrivateEndpointId{} + +func TestPrivateEndpointIDFormatter(t *testing.T) { + actual := NewPrivateEndpointID("12345678-1234-9876-4563-123456789012", "resGroup1", "endpoint1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPrivateEndpointID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PrivateEndpointId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1", + Expected: &PrivateEndpointId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "endpoint1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEENDPOINTS/ENDPOINT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PrivateEndpointID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/public_ip_address.go b/azurerm/internal/services/network/parse/public_ip_address.go index 1ff5e2d3446d..46682650ee72 100644 --- a/azurerm/internal/services/network/parse/public_ip_address.go +++ b/azurerm/internal/services/network/parse/public_ip_address.go @@ -1,27 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) -type PublicIPAddressId struct { - ResourceGroup string - Name string +type PublicIpAddressId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewPublicIpAddressID(subscriptionId, resourceGroup, name string) PublicIpAddressId { + return PublicIpAddressId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id PublicIpAddressId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Public Ip Address", segmentsStr) +} + +func (id PublicIpAddressId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/publicIPAddresses/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } -func PublicIPAddressID(input string) (*PublicIPAddressId, error) { +// PublicIpAddressID parses a PublicIpAddress ID into an PublicIpAddressId struct +func PublicIpAddressID(input string) (*PublicIpAddressId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Public IP Address ID %q: %+v", input, err) + return nil, err + } + + resourceId := PublicIpAddressId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - ipAddress := PublicIPAddressId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if ipAddress.Name, err = id.PopSegment("publicIPAddresses"); err != nil { + if resourceId.Name, err = id.PopSegment("publicIPAddresses"); err != nil { return nil, err } @@ -29,5 +65,5 @@ func PublicIPAddressID(input string) (*PublicIPAddressId, error) { return nil, err } - return &ipAddress, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/public_ip_address_test.go b/azurerm/internal/services/network/parse/public_ip_address_test.go index dd87cb2378a5..00094a659bf6 100644 --- a/azurerm/internal/services/network/parse/public_ip_address_test.go +++ b/azurerm/internal/services/network/parse/public_ip_address_test.go @@ -1,75 +1,112 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) -func TestPublicIPAddressID(t *testing.T) { +var _ resourceid.Formatter = PublicIpAddressId{} + +func TestPublicIpAddressIDFormatter(t *testing.T) { + actual := NewPublicIpAddressID("12345678-1234-9876-4563-123456789012", "resGroup1", "publicIpAddress1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/publicIpAddress1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPublicIpAddressID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *PublicIPAddressId + Input string + Error bool + Expected *PublicIpAddressId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", Error: true, }, + { - Name: "Missing Public IP Address Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/", Error: true, }, + { - Name: "Public IP Address ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/address1", - Error: false, - Expect: &PublicIPAddressId{ - ResourceGroup: "resGroup1", - Name: "address1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/publicIpAddress1", + Expected: &PublicIpAddressId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "publicIpAddress1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/PublicIPAddresses/set1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PUBLICIPADDRESSES/PUBLICIPADDRESS1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) - actual, err := PublicIPAddressID(v.Input) + actual, err := PublicIpAddressID(v.Input) if err != nil { if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/parse/route_filter.go b/azurerm/internal/services/network/parse/route_filter.go new file mode 100644 index 000000000000..9c0c9d0ffc26 --- /dev/null +++ b/azurerm/internal/services/network/parse/route_filter.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type RouteFilterId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewRouteFilterID(subscriptionId, resourceGroup, name string) RouteFilterId { + return RouteFilterId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id RouteFilterId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Route Filter", segmentsStr) +} + +func (id RouteFilterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/routeFilters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// RouteFilterID parses a RouteFilter ID into an RouteFilterId struct +func RouteFilterID(input string) (*RouteFilterId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := RouteFilterId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("routeFilters"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/route_filter_test.go b/azurerm/internal/services/network/parse/route_filter_test.go new file mode 100644 index 000000000000..9e16727542bf --- /dev/null +++ b/azurerm/internal/services/network/parse/route_filter_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = RouteFilterId{} + +func TestRouteFilterIDFormatter(t *testing.T) { + actual := NewRouteFilterID("12345678-1234-9876-4563-123456789012", "resGroup1", "filter1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/routeFilters/filter1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestRouteFilterID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *RouteFilterId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/routeFilters/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/routeFilters/filter1", + Expected: &RouteFilterId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "filter1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/ROUTEFILTERS/FILTER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := RouteFilterID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/security_partner_provider.go b/azurerm/internal/services/network/parse/security_partner_provider.go new file mode 100644 index 000000000000..fb58e2b85802 --- /dev/null +++ b/azurerm/internal/services/network/parse/security_partner_provider.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SecurityPartnerProviderId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewSecurityPartnerProviderID(subscriptionId, resourceGroup, name string) SecurityPartnerProviderId { + return SecurityPartnerProviderId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id SecurityPartnerProviderId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Security Partner Provider", segmentsStr) +} + +func (id SecurityPartnerProviderId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/securityPartnerProviders/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// SecurityPartnerProviderID parses a SecurityPartnerProvider ID into an SecurityPartnerProviderId struct +func SecurityPartnerProviderID(input string) (*SecurityPartnerProviderId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SecurityPartnerProviderId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("securityPartnerProviders"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/security_partner_provider_test.go b/azurerm/internal/services/network/parse/security_partner_provider_test.go new file mode 100644 index 000000000000..ebdc43527009 --- /dev/null +++ b/azurerm/internal/services/network/parse/security_partner_provider_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SecurityPartnerProviderId{} + +func TestSecurityPartnerProviderIDFormatter(t *testing.T) { + actual := NewSecurityPartnerProviderID("12345678-1234-9876-4563-123456789012", "resGroup1", "partnerProvider1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/securityPartnerProviders/partnerProvider1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSecurityPartnerProviderID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SecurityPartnerProviderId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/securityPartnerProviders/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/securityPartnerProviders/partnerProvider1", + Expected: &SecurityPartnerProviderId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "partnerProvider1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/SECURITYPARTNERPROVIDERS/PARTNERPROVIDER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SecurityPartnerProviderID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/subnet.go b/azurerm/internal/services/network/parse/subnet.go index a60ca6a00731..9bedb253ad5b 100644 --- a/azurerm/internal/services/network/parse/subnet.go +++ b/azurerm/internal/services/network/parse/subnet.go @@ -1,32 +1,69 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type SubnetId struct { + SubscriptionId string ResourceGroup string VirtualNetworkName string Name string } +func NewSubnetID(subscriptionId, resourceGroup, virtualNetworkName, name string) SubnetId { + return SubnetId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualNetworkName: virtualNetworkName, + Name: name, + } +} + +func (id SubnetId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Virtual Network Name %q", id.VirtualNetworkName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Subnet", segmentsStr) +} + +func (id SubnetId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s/subnets/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualNetworkName, id.Name) +} + +// SubnetID parses a Subnet ID into an SubnetId struct func SubnetID(input string) (*SubnetId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Subnet ID %q: %+v", input, err) + return nil, err } - subnet := SubnetId{ - ResourceGroup: id.ResourceGroup, + resourceId := SubnetId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if subnet.VirtualNetworkName, err = id.PopSegment("virtualNetworks"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if subnet.Name, err = id.PopSegment("subnets"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VirtualNetworkName, err = id.PopSegment("virtualNetworks"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("subnets"); err != nil { return nil, err } @@ -34,5 +71,5 @@ func SubnetID(input string) (*SubnetId, error) { return nil, err } - return &subnet, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/subnet_service_endpoint_storage_policy.go b/azurerm/internal/services/network/parse/subnet_service_endpoint_storage_policy.go new file mode 100644 index 000000000000..65f7c85e366e --- /dev/null +++ b/azurerm/internal/services/network/parse/subnet_service_endpoint_storage_policy.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SubnetServiceEndpointStoragePolicyId struct { + SubscriptionId string + ResourceGroup string + ServiceEndpointPolicyName string +} + +func NewSubnetServiceEndpointStoragePolicyID(subscriptionId, resourceGroup, serviceEndpointPolicyName string) SubnetServiceEndpointStoragePolicyId { + return SubnetServiceEndpointStoragePolicyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServiceEndpointPolicyName: serviceEndpointPolicyName, + } +} + +func (id SubnetServiceEndpointStoragePolicyId) String() string { + segments := []string{ + fmt.Sprintf("Service Endpoint Policy Name %q", id.ServiceEndpointPolicyName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Subnet Service Endpoint Storage Policy", segmentsStr) +} + +func (id SubnetServiceEndpointStoragePolicyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/serviceEndpointPolicies/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServiceEndpointPolicyName) +} + +// SubnetServiceEndpointStoragePolicyID parses a SubnetServiceEndpointStoragePolicy ID into an SubnetServiceEndpointStoragePolicyId struct +func SubnetServiceEndpointStoragePolicyID(input string) (*SubnetServiceEndpointStoragePolicyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SubnetServiceEndpointStoragePolicyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServiceEndpointPolicyName, err = id.PopSegment("serviceEndpointPolicies"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/subnet_service_endpoint_storage_policy_test.go b/azurerm/internal/services/network/parse/subnet_service_endpoint_storage_policy_test.go new file mode 100644 index 000000000000..fbb111544bc7 --- /dev/null +++ b/azurerm/internal/services/network/parse/subnet_service_endpoint_storage_policy_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SubnetServiceEndpointStoragePolicyId{} + +func TestSubnetServiceEndpointStoragePolicyIDFormatter(t *testing.T) { + actual := NewSubnetServiceEndpointStoragePolicyID("12345678-1234-9876-4563-123456789012", "resGroup1", "policy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/serviceEndpointPolicies/policy1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSubnetServiceEndpointStoragePolicyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SubnetServiceEndpointStoragePolicyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServiceEndpointPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for ServiceEndpointPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/serviceEndpointPolicies/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/serviceEndpointPolicies/policy1", + Expected: &SubnetServiceEndpointStoragePolicyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServiceEndpointPolicyName: "policy1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/SERVICEENDPOINTPOLICIES/POLICY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SubnetServiceEndpointStoragePolicyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServiceEndpointPolicyName != v.Expected.ServiceEndpointPolicyName { + t.Fatalf("Expected %q but got %q for ServiceEndpointPolicyName", v.Expected.ServiceEndpointPolicyName, actual.ServiceEndpointPolicyName) + } + } +} diff --git a/azurerm/internal/services/network/parse/subnet_test.go b/azurerm/internal/services/network/parse/subnet_test.go index ef696cba59b8..da1df943e922 100644 --- a/azurerm/internal/services/network/parse/subnet_test.go +++ b/azurerm/internal/services/network/parse/subnet_test.go @@ -1,70 +1,104 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = SubnetId{} + +func TestSubnetIDFormatter(t *testing.T) { + actual := NewSubnetID("12345678-1234-9876-4563-123456789012", "resGroup1", "network1", "subnet1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestSubnetID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *SubnetId + Input string + Error bool + Expected *SubnetId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Missing Virtual Networks Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing Subnets Key", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1", + // missing VirtualNetworkName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", Error: true, }, + { - Name: "Missing Subnets Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/", + // missing value for VirtualNetworkName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/", Error: true, }, + { - Name: "Subnet ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", - Error: false, - Expect: &SubnetId{ + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", + Expected: &SubnetId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", ResourceGroup: "resGroup1", VirtualNetworkName: "network1", Name: "subnet1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/Subnets/subnet1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALNETWORKS/NETWORK1/SUBNETS/SUBNET1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := SubnetID(v.Input) if err != nil { @@ -72,19 +106,23 @@ func TestSubnetID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.VirtualNetworkName != v.Expect.VirtualNetworkName { - t.Fatalf("Expected %q but got %q for Virtual Network Name", v.Expect.VirtualNetworkName, actual.VirtualNetworkName) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VirtualNetworkName != v.Expected.VirtualNetworkName { + t.Fatalf("Expected %q but got %q for VirtualNetworkName", v.Expected.VirtualNetworkName, actual.VirtualNetworkName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/parse/virtual_hub.go b/azurerm/internal/services/network/parse/virtual_hub.go index bfc7dc392742..b51ebc3195c4 100644 --- a/azurerm/internal/services/network/parse/virtual_hub.go +++ b/azurerm/internal/services/network/parse/virtual_hub.go @@ -1,37 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualHubId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func NewVirtualHubID(resourceGroup, name string) VirtualHubId { +func NewVirtualHubID(subscriptionId, resourceGroup, name string) VirtualHubId { return VirtualHubId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func (id VirtualHubId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualHubs/%s", subscriptionId, id.ResourceGroup, id.Name) + +func (id VirtualHubId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Hub", segmentsStr) +} + +func (id VirtualHubId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualHubs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } +// VirtualHubID parses a VirtualHub ID into an VirtualHubId struct func VirtualHubID(input string) (*VirtualHubId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Hub ID %q: %+v", input, err) + return nil, err + } + + resourceId := VirtualHubId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - virtualHub := VirtualHubId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if virtualHub.Name, err = id.PopSegment("virtualHubs"); err != nil { + if resourceId.Name, err = id.PopSegment("virtualHubs"); err != nil { return nil, err } @@ -39,5 +65,5 @@ func VirtualHubID(input string) (*VirtualHubId, error) { return nil, err } - return &virtualHub, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/virtual_hub_connection.go b/azurerm/internal/services/network/parse/virtual_hub_connection.go deleted file mode 100644 index eef518b30347..000000000000 --- a/azurerm/internal/services/network/parse/virtual_hub_connection.go +++ /dev/null @@ -1,50 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type VirtualHubConnectionId struct { - ResourceGroup string - VirtualHubName string - Name string -} - -func NewVirtualHubConnectionID(id VirtualHubId, name string) VirtualHubConnectionId { - return VirtualHubConnectionId{ - ResourceGroup: id.ResourceGroup, - VirtualHubName: id.Name, - Name: name, - } -} - -func (id VirtualHubConnectionId) ID(subscriptionId string) string { - base := NewVirtualHubID(id.ResourceGroup, id.VirtualHubName).ID(subscriptionId) - return fmt.Sprintf("%s/hubVirtualNetworkConnections/%s", base, id.Name) -} - -func VirtualHubConnectionID(input string) (*VirtualHubConnectionId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Hub Connection ID %q: %+v", input, err) - } - - // /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/tom-dev99/providers/Microsoft.Network/virtualHubs/tom-devvh/hubVirtualNetworkConnections/first - connection := VirtualHubConnectionId{ - ResourceGroup: id.ResourceGroup, - VirtualHubName: id.Path["virtualHubs"], - Name: id.Path["hubVirtualNetworkConnections"], - } - - if connection.VirtualHubName == "" { - return nil, fmt.Errorf("ID was missing the `virtualHubs` element") - } - - if connection.Name == "" { - return nil, fmt.Errorf("ID was missing the `hubVirtualNetworkConnections` element") - } - - return &connection, nil -} diff --git a/azurerm/internal/services/network/parse/virtual_hub_connection_test.go b/azurerm/internal/services/network/parse/virtual_hub_connection_test.go deleted file mode 100644 index fb0da33d13df..000000000000 --- a/azurerm/internal/services/network/parse/virtual_hub_connection_test.go +++ /dev/null @@ -1,83 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = VirtualHubConnectionId{} - -func TestVirtualHubConnectionIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - vhubid := NewVirtualHubID("group1", "vhub1") - actual := NewVirtualHubConnectionID(vhubid, "conn1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/virtualHubs/vhub1/hubVirtualNetworkConnections/conn1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestVirtualHubConnectionID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *VirtualHubConnectionId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Virtual Hubs Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Expected: nil, - }, - { - Name: "No Virtual Hubs Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/", - Expected: nil, - }, - { - Name: "No Hub Network Connections Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/virtualHubs/example", - Expected: nil, - }, - { - Name: "No Virtual Hubs Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/hubVirtualNetworkConnections/", - Expected: nil, - }, - { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/example/hubVirtualNetworkConnections/connection1", - Expected: &VirtualHubConnectionId{ - Name: "connection1", - VirtualHubName: "example", - ResourceGroup: "foo", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := VirtualHubConnectionID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/network/parse/virtual_hub_ip_configuration.go b/azurerm/internal/services/network/parse/virtual_hub_ip_configuration.go new file mode 100644 index 000000000000..09c164dc8172 --- /dev/null +++ b/azurerm/internal/services/network/parse/virtual_hub_ip_configuration.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VirtualHubIpConfigurationId struct { + SubscriptionId string + ResourceGroup string + VirtualHubName string + IpConfigurationName string +} + +func NewVirtualHubIpConfigurationID(subscriptionId, resourceGroup, virtualHubName, ipConfigurationName string) VirtualHubIpConfigurationId { + return VirtualHubIpConfigurationId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VirtualHubName: virtualHubName, + IpConfigurationName: ipConfigurationName, + } +} + +func (id VirtualHubIpConfigurationId) String() string { + segments := []string{ + fmt.Sprintf("Ip Configuration Name %q", id.IpConfigurationName), + fmt.Sprintf("Virtual Hub Name %q", id.VirtualHubName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Hub Ip Configuration", segmentsStr) +} + +func (id VirtualHubIpConfigurationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualHubs/%s/ipConfigurations/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VirtualHubName, id.IpConfigurationName) +} + +// VirtualHubIpConfigurationID parses a VirtualHubIpConfiguration ID into an VirtualHubIpConfigurationId struct +func VirtualHubIpConfigurationID(input string) (*VirtualHubIpConfigurationId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VirtualHubIpConfigurationId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VirtualHubName, err = id.PopSegment("virtualHubs"); err != nil { + return nil, err + } + if resourceId.IpConfigurationName, err = id.PopSegment("ipConfigurations"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/virtual_hub_ip_configuration_test.go b/azurerm/internal/services/network/parse/virtual_hub_ip_configuration_test.go new file mode 100644 index 000000000000..0bcc53c8e7e7 --- /dev/null +++ b/azurerm/internal/services/network/parse/virtual_hub_ip_configuration_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VirtualHubIpConfigurationId{} + +func TestVirtualHubIpConfigurationIDFormatter(t *testing.T) { + actual := NewVirtualHubIpConfigurationID("12345678-1234-9876-4563-123456789012", "resGroup1", "virtualHub1", "ipConfiguration1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/ipConfigurations/ipConfiguration1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVirtualHubIpConfigurationID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VirtualHubIpConfigurationId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Error: true, + }, + + { + // missing IpConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/", + Error: true, + }, + + { + // missing value for IpConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/ipConfigurations/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/ipConfigurations/ipConfiguration1", + Expected: &VirtualHubIpConfigurationId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VirtualHubName: "virtualHub1", + IpConfigurationName: "ipConfiguration1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1/IPCONFIGURATIONS/IPCONFIGURATION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VirtualHubIpConfigurationID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VirtualHubName != v.Expected.VirtualHubName { + t.Fatalf("Expected %q but got %q for VirtualHubName", v.Expected.VirtualHubName, actual.VirtualHubName) + } + if actual.IpConfigurationName != v.Expected.IpConfigurationName { + t.Fatalf("Expected %q but got %q for IpConfigurationName", v.Expected.IpConfigurationName, actual.IpConfigurationName) + } + } +} diff --git a/azurerm/internal/services/network/parse/virtual_hub_test.go b/azurerm/internal/services/network/parse/virtual_hub_test.go index 2fb097c3183c..158ec2461cd5 100644 --- a/azurerm/internal/services/network/parse/virtual_hub_test.go +++ b/azurerm/internal/services/network/parse/virtual_hub_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,10 +11,8 @@ import ( var _ resourceid.Formatter = VirtualHubId{} func TestVirtualHubIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - id := NewVirtualHubID("group1", "vhub1") - actual := id.ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/virtualHubs/vhub1" + actual := NewVirtualHubID("12345678-1234-9876-4563-123456789012", "resGroup1", "virtualHub1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -20,53 +20,93 @@ func TestVirtualHubIDFormatter(t *testing.T) { func TestVirtualHubID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *VirtualHubId }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + { - Name: "Empty", - Input: "", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + { - Name: "No Virtual Hubs Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "No Virtual Hubs Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/example", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1", Expected: &VirtualHubId{ - Name: "example", - ResourceGroup: "foo", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "virtualHub1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1", + Error: true, + }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := VirtualHubID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } } } diff --git a/azurerm/internal/services/network/parse/virtual_network.go b/azurerm/internal/services/network/parse/virtual_network.go index b789e8aefb40..9ff476fea212 100644 --- a/azurerm/internal/services/network/parse/virtual_network.go +++ b/azurerm/internal/services/network/parse/virtual_network.go @@ -1,27 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualNetworkId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string +} + +func NewVirtualNetworkID(subscriptionId, resourceGroup, name string) VirtualNetworkId { + return VirtualNetworkId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id VirtualNetworkId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Network", segmentsStr) } +func (id VirtualNetworkId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// VirtualNetworkID parses a VirtualNetwork ID into an VirtualNetworkId struct func VirtualNetworkID(input string) (*VirtualNetworkId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Virtual Network ID %q: %+v", input, err) + return nil, err + } + + resourceId := VirtualNetworkId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - vnet := VirtualNetworkId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if vnet.Name, err = id.PopSegment("virtualNetworks"); err != nil { + if resourceId.Name, err = id.PopSegment("virtualNetworks"); err != nil { return nil, err } @@ -29,5 +65,5 @@ func VirtualNetworkID(input string) (*VirtualNetworkId, error) { return nil, err } - return &vnet, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/virtual_network_test.go b/azurerm/internal/services/network/parse/virtual_network_test.go index b9eb6b35d357..0e4d31722f50 100644 --- a/azurerm/internal/services/network/parse/virtual_network_test.go +++ b/azurerm/internal/services/network/parse/virtual_network_test.go @@ -1,54 +1,91 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = VirtualNetworkId{} + +func TestVirtualNetworkIDFormatter(t *testing.T) { + actual := NewVirtualNetworkID("12345678-1234-9876-4563-123456789012", "resGroup1", "network1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestVirtualNetworkID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *VirtualNetworkId + Input string + Error bool + Expected *VirtualNetworkId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "Virtual Network ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1", - Error: false, - Expect: &VirtualNetworkId{ - ResourceGroup: "resGroup1", - Name: "network1", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1", + Expected: &VirtualNetworkId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "network1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Network/VirtualNetworks/network1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALNETWORKS/NETWORK1", Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := VirtualNetworkID(v.Input) if err != nil { @@ -56,15 +93,20 @@ func TestVirtualNetworkID(t *testing.T) { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if v.Error { + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/parse/virtual_wan.go b/azurerm/internal/services/network/parse/virtual_wan.go index fed766678a59..2618f2cf5fdf 100644 --- a/azurerm/internal/services/network/parse/virtual_wan.go +++ b/azurerm/internal/services/network/parse/virtual_wan.go @@ -1,39 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VirtualWanId struct { - ResourceGroup string - Name string + SubscriptionId string + ResourceGroup string + Name string } -func (id VirtualWanId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualWans/%s", - subscriptionId, id.ResourceGroup, id.Name) +func NewVirtualWanID(subscriptionId, resourceGroup, name string) VirtualWanId { + return VirtualWanId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } } -func NewVirtualWanID(resourceGroup, name string) VirtualWanId { - return VirtualWanId{ - ResourceGroup: resourceGroup, - Name: name, +func (id VirtualWanId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Wan", segmentsStr) +} + +func (id VirtualWanId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualWans/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } +// VirtualWanID parses a VirtualWan ID into an VirtualWanId struct func VirtualWanID(input string) (*VirtualWanId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Virtual Wan ID %q: %+v", input, err) + return nil, err + } + + resourceId := VirtualWanId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - vwanId := VirtualWanId{ - ResourceGroup: id.ResourceGroup, + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if vwanId.Name, err = id.PopSegment("virtualWans"); err != nil { + if resourceId.Name, err = id.PopSegment("virtualWans"); err != nil { return nil, err } @@ -41,5 +65,5 @@ func VirtualWanID(input string) (*VirtualWanId, error) { return nil, err } - return &vwanId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/virtual_wan_test.go b/azurerm/internal/services/network/parse/virtual_wan_test.go index e5d3af773074..c868e68780a1 100644 --- a/azurerm/internal/services/network/parse/virtual_wan_test.go +++ b/azurerm/internal/services/network/parse/virtual_wan_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = VirtualWanId{} func TestVirtualWanIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewVirtualWanID("group1", "wan1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/virtualWans/wan1" + actual := NewVirtualWanID("12345678-1234-9876-4563-123456789012", "resGroup1", "virtualWan1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualWans/virtualWan1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,53 +20,72 @@ func TestVirtualWanIDFormatter(t *testing.T) { func TestVirtualWanID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *VirtualWanId + Input string + Error bool + Expected *VirtualWanId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111", + // missing SubscriptionId + Input: "/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + { - Name: "Missing leading slash", - Input: "subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Malformed segments", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/foo/bar", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", Error: true, }, + { - Name: "Missing vwan segment", - Input: "subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualWans/", Error: true, }, + { - Name: "Correct", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network/virtualWans/wan1", - Expect: &VirtualWanId{ - ResourceGroup: "group1", - Name: "wan1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualWans/virtualWan1", + Expected: &VirtualWanId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "virtualWan1", }, }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALWANS/VIRTUALWAN1", + Error: true, + }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := VirtualWanID(v.Input) if err != nil { @@ -76,15 +96,17 @@ func TestVirtualWanID(t *testing.T) { t.Fatalf("Expect a value but got an error: %s", err) } if v.Error { - t.Fatal("Expect an error but didn't get") + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/parse/vpn_connection.go b/azurerm/internal/services/network/parse/vpn_connection.go new file mode 100644 index 000000000000..21641b9ef200 --- /dev/null +++ b/azurerm/internal/services/network/parse/vpn_connection.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VpnConnectionId struct { + SubscriptionId string + ResourceGroup string + VpnGatewayName string + Name string +} + +func NewVpnConnectionID(subscriptionId, resourceGroup, vpnGatewayName, name string) VpnConnectionId { + return VpnConnectionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VpnGatewayName: vpnGatewayName, + Name: name, + } +} + +func (id VpnConnectionId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Vpn Gateway Name %q", id.VpnGatewayName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Vpn Connection", segmentsStr) +} + +func (id VpnConnectionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/vpnGateways/%s/vpnConnections/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VpnGatewayName, id.Name) +} + +// VpnConnectionID parses a VpnConnection ID into an VpnConnectionId struct +func VpnConnectionID(input string) (*VpnConnectionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VpnConnectionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VpnGatewayName, err = id.PopSegment("vpnGateways"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("vpnConnections"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/vpn_connection_test.go b/azurerm/internal/services/network/parse/vpn_connection_test.go new file mode 100644 index 000000000000..cdb22e97cf44 --- /dev/null +++ b/azurerm/internal/services/network/parse/vpn_connection_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VpnConnectionId{} + +func TestVpnConnectionIDFormatter(t *testing.T) { + actual := NewVpnConnectionID("12345678-1234-9876-4563-123456789012", "resGroup1", "vpnGateway1", "vpnConnection1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/vpnConnections/vpnConnection1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVpnConnectionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VpnConnectionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing VpnGatewayName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for VpnGatewayName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/vpnConnections/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/vpnConnections/vpnConnection1", + Expected: &VpnConnectionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VpnGatewayName: "vpnGateway1", + Name: "vpnConnection1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNGATEWAYS/VPNGATEWAY1/VPNCONNECTIONS/VPNCONNECTION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VpnConnectionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VpnGatewayName != v.Expected.VpnGatewayName { + t.Fatalf("Expected %q but got %q for VpnGatewayName", v.Expected.VpnGatewayName, actual.VpnGatewayName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/vpn_gateway.go b/azurerm/internal/services/network/parse/vpn_gateway.go new file mode 100644 index 000000000000..3fbcffce3375 --- /dev/null +++ b/azurerm/internal/services/network/parse/vpn_gateway.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VpnGatewayId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewVpnGatewayID(subscriptionId, resourceGroup, name string) VpnGatewayId { + return VpnGatewayId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id VpnGatewayId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Vpn Gateway", segmentsStr) +} + +func (id VpnGatewayId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/vpnGateways/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// VpnGatewayID parses a VpnGateway ID into an VpnGatewayId struct +func VpnGatewayID(input string) (*VpnGatewayId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VpnGatewayId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("vpnGateways"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/vpn_gateway_test.go b/azurerm/internal/services/network/parse/vpn_gateway_test.go new file mode 100644 index 000000000000..ec617ee99c6e --- /dev/null +++ b/azurerm/internal/services/network/parse/vpn_gateway_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VpnGatewayId{} + +func TestVpnGatewayIDFormatter(t *testing.T) { + actual := NewVpnGatewayID("12345678-1234-9876-4563-123456789012", "resGroup1", "vpnGateway1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVpnGatewayID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VpnGatewayId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1", + Expected: &VpnGatewayId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "vpnGateway1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNGATEWAYS/VPNGATEWAY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VpnGatewayID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/vpn_site.go b/azurerm/internal/services/network/parse/vpn_site.go index 19e6878540ef..7b0affd2452d 100644 --- a/azurerm/internal/services/network/parse/vpn_site.go +++ b/azurerm/internal/services/network/parse/vpn_site.go @@ -1,83 +1,63 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type VpnSiteId struct { - ResourceGroup string - Name string -} - -func (id VpnSiteId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/vpnSites/%s", - subscriptionId, id.ResourceGroup, id.Name) + SubscriptionId string + ResourceGroup string + Name string } -func NewVpnSiteID(resourceGroup, name string) VpnSiteId { +func NewVpnSiteID(subscriptionId, resourceGroup, name string) VpnSiteId { return VpnSiteId{ - ResourceGroup: resourceGroup, - Name: name, + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, } } -func VpnSiteID(input string) (*VpnSiteId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("parsing Vpn Site ID %q: %+v", input, err) - } - - vpnSiteId := VpnSiteId{ - ResourceGroup: id.ResourceGroup, - } - - if vpnSiteId.Name, err = id.PopSegment("vpnSites"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err +func (id VpnSiteId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } - - return &vpnSiteId, nil + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Vpn Site", segmentsStr) } -type VpnSiteLinkId struct { - ResourceGroup string - Site string - Name string +func (id VpnSiteId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/vpnSites/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) } -func (id VpnSiteLinkId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/vpnSites/%s/vpnSiteLinks/%s", - subscriptionId, id.ResourceGroup, id.Site, id.Name) -} - -func NewVpnSiteLinkID(vpnSiteId VpnSiteId, name string) VpnSiteLinkId { - return VpnSiteLinkId{ - ResourceGroup: vpnSiteId.ResourceGroup, - Site: vpnSiteId.Name, - Name: name, - } -} - -func VpnSiteLinkID(input string) (*VpnSiteLinkId, error) { +// VpnSiteID parses a VpnSite ID into an VpnSiteId struct +func VpnSiteID(input string) (*VpnSiteId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Vpn Site Link ID %q: %+v", input, err) + return nil, err } - vpnSiteLinkId := VpnSiteLinkId{ - ResourceGroup: id.ResourceGroup, + resourceId := VpnSiteId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if vpnSiteLinkId.Site, err = id.PopSegment("vpnSites"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if vpnSiteLinkId.Name, err = id.PopSegment("vpnSiteLinks"); err != nil { + if resourceId.Name, err = id.PopSegment("vpnSites"); err != nil { return nil, err } @@ -85,5 +65,5 @@ func VpnSiteLinkID(input string) (*VpnSiteLinkId, error) { return nil, err } - return &vpnSiteLinkId, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/network/parse/vpn_site_link.go b/azurerm/internal/services/network/parse/vpn_site_link.go new file mode 100644 index 000000000000..3acd1b2faee8 --- /dev/null +++ b/azurerm/internal/services/network/parse/vpn_site_link.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VpnSiteLinkId struct { + SubscriptionId string + ResourceGroup string + VpnSiteName string + Name string +} + +func NewVpnSiteLinkID(subscriptionId, resourceGroup, vpnSiteName, name string) VpnSiteLinkId { + return VpnSiteLinkId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + VpnSiteName: vpnSiteName, + Name: name, + } +} + +func (id VpnSiteLinkId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Vpn Site Name %q", id.VpnSiteName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Vpn Site Link", segmentsStr) +} + +func (id VpnSiteLinkId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/vpnSites/%s/vpnSiteLinks/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.VpnSiteName, id.Name) +} + +// VpnSiteLinkID parses a VpnSiteLink ID into an VpnSiteLinkId struct +func VpnSiteLinkID(input string) (*VpnSiteLinkId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VpnSiteLinkId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.VpnSiteName, err = id.PopSegment("vpnSites"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("vpnSiteLinks"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/network/parse/vpn_site_link_test.go b/azurerm/internal/services/network/parse/vpn_site_link_test.go new file mode 100644 index 000000000000..3fe4f0c0aa26 --- /dev/null +++ b/azurerm/internal/services/network/parse/vpn_site_link_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VpnSiteLinkId{} + +func TestVpnSiteLinkIDFormatter(t *testing.T) { + actual := NewVpnSiteLinkID("12345678-1234-9876-4563-123456789012", "resGroup1", "vpnSite1", "vpnSiteLink1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/vpnSiteLinks/vpnSiteLink1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVpnSiteLinkID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VpnSiteLinkId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing VpnSiteName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for VpnSiteName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/vpnSiteLinks/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/vpnSiteLinks/vpnSiteLink1", + Expected: &VpnSiteLinkId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + VpnSiteName: "vpnSite1", + Name: "vpnSiteLink1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNSITES/VPNSITE1/VPNSITELINKS/VPNSITELINK1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VpnSiteLinkID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.VpnSiteName != v.Expected.VpnSiteName { + t.Fatalf("Expected %q but got %q for VpnSiteName", v.Expected.VpnSiteName, actual.VpnSiteName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/network/parse/vpn_site_test.go b/azurerm/internal/services/network/parse/vpn_site_test.go index 1d24dcfba2e0..9e8e3c7bced1 100644 --- a/azurerm/internal/services/network/parse/vpn_site_test.go +++ b/azurerm/internal/services/network/parse/vpn_site_test.go @@ -1,5 +1,7 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" @@ -9,9 +11,8 @@ import ( var _ resourceid.Formatter = VpnSiteId{} func TestVpnSiteIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewVpnSiteID("group1", "site1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/vpnSites/site1" + actual := NewVpnSiteID("12345678-1234-9876-4563-123456789012", "resGroup1", "vpnSite1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } @@ -19,144 +20,74 @@ func TestVpnSiteIDFormatter(t *testing.T) { func TestVpnSiteID(t *testing.T) { testData := []struct { - Name string - Input string - Error bool - Expect *VpnSiteId + Input string + Error bool + Expected *VpnSiteId }{ + { - Name: "Empty", + // empty Input: "", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111", - Error: true, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/", - Error: true, - }, - { - Name: "Missing leading slash", - Input: "subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1", - Error: true, - }, - { - Name: "Malformed segments", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/foo/bar", - Error: true, - }, - { - Name: "No vpn site segment", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network", + // missing SubscriptionId + Input: "/", Error: true, }, - { - Name: "Correct", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network/vpnSites/site1", - Expect: &VpnSiteId{ - ResourceGroup: "group1", - Name: "site1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - actual, err := VpnSiteID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get") - } - - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) - } - } -} - -var _ resourceid.Formatter = VpnSiteLinkId{} - -func TestVpnSiteLinkIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewVpnSiteLinkID(NewVpnSiteID("group1", "site1"), "link1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Network/vpnSites/site1/vpnSiteLinks/link1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestVpnSiteLinkID(t *testing.T) { - testData := []struct { - Name string - Input string - Error bool - Expect *VpnSiteLinkId - }{ { - Name: "Empty", - Input: "", + // missing value for SubscriptionId + Input: "/subscriptions/", Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111", + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/", + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Error: true, }, + { - Name: "Missing leading slash", - Input: "subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1", + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", Error: true, }, + { - Name: "Malformed segments", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/foo/bar", + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/", Error: true, }, + { - Name: "No vpn site segment", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network", - Error: true, + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1", + Expected: &VpnSiteId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "vpnSite1", + }, }, + { - Name: "No link segment", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network/vpnSites/site1", + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNSITES/VPNSITE1", Error: true, }, - { - Name: "Correct", - Input: "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/group1/providers/Microsoft.Network/vpnSites/site1/vpnSiteLinks/link1", - Expect: &VpnSiteLinkId{ - ResourceGroup: "group1", - Site: "site1", - Name: "link1", - }, - }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) - actual, err := VpnSiteLinkID(v.Input) + actual, err := VpnSiteID(v.Input) if err != nil { if v.Error { continue @@ -165,19 +96,17 @@ func TestVpnSiteLinkID(t *testing.T) { t.Fatalf("Expect a value but got an error: %s", err) } if v.Error { - t.Fatal("Expect an error but didn't get") + t.Fatal("Expect an error but didn't get one") } - if actual.ResourceGroup != v.Expect.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expect.ResourceGroup, actual.ResourceGroup) + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } - - if actual.Site != v.Expect.Site { - t.Fatalf("Expected %q but got %q for Site", v.Expect.Site, actual.Site) + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - - if actual.Name != v.Expect.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expect.Name, actual.Name) + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } } } diff --git a/azurerm/internal/services/network/point_to_site_vpn_gateway_resource.go b/azurerm/internal/services/network/point_to_site_vpn_gateway_resource.go index 58a8120a84c7..eeffce930d30 100644 --- a/azurerm/internal/services/network/point_to_site_vpn_gateway_resource.go +++ b/azurerm/internal/services/network/point_to_site_vpn_gateway_resource.go @@ -52,7 +52,7 @@ func resourceArmPointToSiteVPNGateway() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate2.ValidateVirtualHubID, + ValidateFunc: validate2.VirtualHubID, }, "vpn_server_configuration_id": { @@ -93,6 +93,49 @@ func resourceArmPointToSiteVPNGateway() *schema.Resource { }, }, }, + + "route": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "associated_route_table_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate2.HubRouteTableID, + }, + + "propagated_route_table": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "ids": { + Type: schema.TypeList, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validate2.HubRouteTableID, + }, + }, + + "labels": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + }, + }, + }, + }, }, }, }, @@ -103,6 +146,15 @@ func resourceArmPointToSiteVPNGateway() *schema.Resource { ValidateFunc: validation.IntAtLeast(0), }, + "dns_servers": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.IsIPv4Address, + }, + }, + "tags": tags.Schema(), }, } @@ -152,6 +204,10 @@ func resourceArmPointToSiteVPNGatewayCreateUpdate(d *schema.ResourceData, meta i }, Tags: tags.Expand(t), } + customDNSServers := utils.ExpandStringSlice(d.Get("dns_servers").([]interface{})) + if len(*customDNSServers) != 0 { + parameters.P2SVpnGatewayProperties.CustomDNSServers = customDNSServers + } future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) if err != nil { @@ -201,6 +257,7 @@ func resourceArmPointToSiteVPNGatewayRead(d *schema.ResourceData, meta interface } if props := resp.P2SVpnGatewayProperties; props != nil { + d.Set("dns_servers", utils.FlattenStringSlice(props.CustomDNSServers)) flattenedConfigurations := flattenPointToSiteVPNGatewayConnectionConfiguration(props.P2SConnectionConfigurations) if err := d.Set("connection_configuration", flattenedConfigurations); err != nil { return fmt.Errorf("Error setting `connection_configuration`: %+v", err) @@ -275,6 +332,7 @@ func expandPointToSiteVPNGatewayConnectionConfiguration(input []interface{}) *[] VpnClientAddressPool: &network.AddressSpace{ AddressPrefixes: &addressPrefixes, }, + RoutingConfiguration: expandPointToSiteVPNGatewayConnectionRouteConfiguration(raw["route"].([]interface{})), }, }) } @@ -282,6 +340,37 @@ func expandPointToSiteVPNGatewayConnectionConfiguration(input []interface{}) *[] return &configurations } +func expandPointToSiteVPNGatewayConnectionRouteConfiguration(input []interface{}) *network.RoutingConfiguration { + if len(input) == 0 { + return nil + } + v := input[0].(map[string]interface{}) + return &network.RoutingConfiguration{ + AssociatedRouteTable: &network.SubResource{ + ID: utils.String(v["associated_route_table_id"].(string)), + }, + PropagatedRouteTables: expandPointToSiteVPNGatewayConnectionRouteConfigurationPropagatedRouteTable(v["propagated_route_table"].([]interface{})), + } +} + +func expandPointToSiteVPNGatewayConnectionRouteConfigurationPropagatedRouteTable(input []interface{}) *network.PropagatedRouteTable { + if len(input) == 0 { + return nil + } + v := input[0].(map[string]interface{}) + idRaws := utils.ExpandStringSlice(v["ids"].([]interface{})) + ids := make([]network.SubResource, len(*idRaws)) + for i, item := range *idRaws { + ids[i] = network.SubResource{ + ID: utils.String(item), + } + } + return &network.PropagatedRouteTable{ + Labels: utils.ExpandStringSlice(v["labels"].(*schema.Set).List()), + Ids: &ids, + } +} + func flattenPointToSiteVPNGatewayConnectionConfiguration(input *[]network.P2SConnectionConfiguration) []interface{} { if input == nil { return []interface{}{} @@ -315,8 +404,45 @@ func flattenPointToSiteVPNGatewayConnectionConfiguration(input *[]network.P2SCon "address_prefixes": addressPrefixes, }, }, + "route": flattenPointToSiteVPNGatewayConnectionRouteConfiguration(v.RoutingConfiguration), }) } return output } + +func flattenPointToSiteVPNGatewayConnectionRouteConfiguration(input *network.RoutingConfiguration) []interface{} { + if input == nil { + return []interface{}{} + } + var associatedRouteTableId string + if input.AssociatedRouteTable != nil && input.AssociatedRouteTable.ID != nil { + associatedRouteTableId = *input.AssociatedRouteTable.ID + } + return []interface{}{ + map[string]interface{}{ + "associated_route_table_id": associatedRouteTableId, + "propagated_route_table": flattenPointToSiteVPNGatewayConnectionRouteConfigurationPropagatedRouteTable(input.PropagatedRouteTables), + }, + } +} + +func flattenPointToSiteVPNGatewayConnectionRouteConfigurationPropagatedRouteTable(input *network.PropagatedRouteTable) []interface{} { + if input == nil { + return []interface{}{} + } + ids := make([]string, 0) + if input.Ids != nil { + for _, item := range *input.Ids { + if item.ID != nil { + ids = append(ids, *item.ID) + } + } + } + return []interface{}{ + map[string]interface{}{ + "ids": ids, + "labels": utils.FlattenStringSlice(input.Labels), + }, + } +} diff --git a/azurerm/internal/services/network/private_endpoint_resource.go b/azurerm/internal/services/network/private_endpoint_resource.go index 0e1f1b598894..e575de62ec24 100644 --- a/azurerm/internal/services/network/private_endpoint_resource.go +++ b/azurerm/internal/services/network/private_endpoint_resource.go @@ -1,6 +1,7 @@ package network import ( + "context" "fmt" "log" "strings" @@ -13,21 +14,26 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + privateDnsParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + privateDnsValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) func resourceArmPrivateEndpoint() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateEndpointCreateUpdate, + Create: resourceArmPrivateEndpointCreate, Read: resourceArmPrivateEndpointRead, - Update: resourceArmPrivateEndpointCreateUpdate, + Update: resourceArmPrivateEndpointUpdate, Delete: resourceArmPrivateEndpointDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.PrivateEndpointID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(60 * time.Minute), @@ -75,7 +81,7 @@ func resourceArmPrivateEndpoint() *schema.Resource { Required: true, Elem: &schema.Schema{ Type: schema.TypeString, - ValidateFunc: parse.ValidatePrivateDnsZoneResourceID, + ValidateFunc: privateDnsValidate.PrivateDnsZoneID, }, }, }, @@ -204,30 +210,28 @@ func resourceArmPrivateEndpoint() *schema.Resource { } } -func resourceArmPrivateEndpointCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceArmPrivateEndpointCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Network.PrivateEndpointClient dnsClient := meta.(*clients.Client).Network.PrivateDnsZoneGroupClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewPrivateEndpointID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) if err := ValidatePrivateEndpointSettings(d); err != nil { - return fmt.Errorf("validating the configuration for the Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("validating the configuration for the Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) - } + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } + } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_private_endpoint", *existing.ID) - } + if existing.PrivateEndpointProperties != nil { + return tf.ImportAsExistsError("azurerm_private_endpoint", id.ID()) } location := azure.NormalizeLocation(d.Get("location").(string)) @@ -247,101 +251,121 @@ func resourceArmPrivateEndpointCreateUpdate(d *schema.ResourceData, meta interfa Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } - future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, parameters) if err != nil { if strings.EqualFold(err.Error(), "is missing required parameter 'group Id'") { - return fmt.Errorf("creating Private Endpoint %q (Resource Group %q) due to missing 'group Id', ensure that the 'subresource_names' type is populated: %+v", name, resourceGroup, err) + return fmt.Errorf("creating Private Endpoint %q (Resource Group %q) due to missing 'group Id', ensure that the 'subresource_names' type is populated: %+v", id.Name, id.ResourceGroup, err) } else { - return fmt.Errorf("creating Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("creating Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for creation of Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("waiting for creation of Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - resp, err := client.Get(ctx, resourceGroup, name, "") + d.SetId(id.ID()) + + // 1 Private Endpoint can have 1 Private DNS Zone Group + // since this is a new resource, there shouldn't be an existing one - so there's no need to delete it + if len(privateDnsZoneGroup) > 0 { + log.Printf("[DEBUG] Creating Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q..", id.Name, id.ResourceGroup) + if err := createPrivateDnsZoneGroupForPrivateEndpoint(ctx, dnsClient, id, privateDnsZoneGroup); err != nil { + return err + } + log.Printf("[DEBUG] Created the Existing Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q.", id.Name, id.ResourceGroup) + } + + return resourceArmPrivateEndpointRead(d, meta) +} + +func resourceArmPrivateEndpointUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.PrivateEndpointClient + dnsClient := meta.(*clients.Client).Network.PrivateDnsZoneGroupClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.PrivateEndpointID(d.Id()) if err != nil { - return fmt.Errorf("retrieving Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) + return err } - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("API returns a nil/empty id on Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) + + if err := ValidatePrivateEndpointSettings(d); err != nil { + return fmt.Errorf("validating the configuration for the Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.SetId(*resp.ID) - // now create the dns zone group - // first I have to see if the dns zone group exists, if it does I need to delete it an re-create it because you can only have one per private endpoint - if d.HasChange("private_dns_zone_group") || d.IsNewResource() { - oldRaw, newRaw := d.GetChange("private_dns_zone_group") - oldPrivateDnsZoneGroup := make(map[string]interface{}) - if oldRaw != nil { - for _, v := range oldRaw.([]interface{}) { - oldPrivateDnsZoneGroup = v.(map[string]interface{}) - } - } + location := azure.NormalizeLocation(d.Get("location").(string)) + privateDnsZoneGroup := d.Get("private_dns_zone_group").([]interface{}) + privateServiceConnections := d.Get("private_service_connection").([]interface{}) + subnetId := d.Get("subnet_id").(string) - newPrivateDnsZoneGroup := make(map[string]interface{}) - if newRaw != nil { - for _, v := range newRaw.([]interface{}) { - newPrivateDnsZoneGroup = v.(map[string]interface{}) - } - } + // TODO: in future it'd be nice to support conditional updates here, but one problem at a time + parameters := network.PrivateEndpoint{ + Location: utils.String(location), + PrivateEndpointProperties: &network.PrivateEndpointProperties{ + PrivateLinkServiceConnections: expandArmPrivateLinkEndpointServiceConnection(privateServiceConnections, false), + ManualPrivateLinkServiceConnections: expandArmPrivateLinkEndpointServiceConnection(privateServiceConnections, true), + Subnet: &network.Subnet{ + ID: utils.String(subnetId), + }, + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } - if len(newPrivateDnsZoneGroup) == 0 && len(oldPrivateDnsZoneGroup) != 0 { - if err := resourceArmPrivateDnsZoneGroupDelete(d, meta, oldPrivateDnsZoneGroup["id"].(string)); err != nil { - return err - } - } else if len(newPrivateDnsZoneGroup) != 0 && len(oldPrivateDnsZoneGroup) != 0 { - if oldPrivateDnsZoneGroup["name"].(string) != newPrivateDnsZoneGroup["name"].(string) { - if err := resourceArmPrivateDnsZoneGroupDelete(d, meta, oldPrivateDnsZoneGroup["id"].(string)); err != nil { - return err - } - } + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, parameters) + if err != nil { + if strings.EqualFold(err.Error(), "is missing required parameter 'group Id'") { + return fmt.Errorf("updating Private Endpoint %q (Resource Group %q) due to missing 'group Id', ensure that the 'subresource_names' type is populated: %+v", id.Name, id.ResourceGroup, err) + } else { + return fmt.Errorf("updating Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for update of Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } - for _, v := range privateDnsZoneGroup { - item := v.(map[string]interface{}) - dnsGroupName := item["name"].(string) - privateDnsZoneIds := item["private_dns_zone_ids"].([]interface{}) - privateDnsZones, err := parse.PrivateDnsZoneResourceIDs(privateDnsZoneIds) + // 1 Private Endpoint can have 1 Private DNS Zone Group - so to update we need to Delete & Recreate + if d.HasChange("private_dns_zone_group") { + existingDnsZoneGroups, err := retrievePrivateDnsZoneGroupsForPrivateEndpoint(ctx, dnsClient, *id) if err != nil { return err } - privateDnsZoneConfigs := make([]network.PrivateDNSZoneConfig, 0) - - for _, item := range *privateDnsZones { - v := network.PrivateDNSZoneConfig{ - Name: utils.String(item.Name), - PrivateDNSZonePropertiesFormat: &network.PrivateDNSZonePropertiesFormat{ - PrivateDNSZoneID: utils.String(item.ID), - }, - } - - privateDnsZoneConfigs = append(privateDnsZoneConfigs, v) + newDnsZoneGroups := d.Get("private_dns_zone_group").([]interface{}) + newDnsZoneName := "" + if len(newDnsZoneGroups) > 0 { + groupRaw := newDnsZoneGroups[0].(map[string]interface{}) + newDnsZoneName = groupRaw["name"].(string) } - parameters := network.PrivateDNSZoneGroup{} - parameters.Name = utils.String(name) - parameters.PrivateDNSZoneGroupPropertiesFormat = &network.PrivateDNSZoneGroupPropertiesFormat{ - PrivateDNSZoneConfigs: &privateDnsZoneConfigs, - } + needToRemove := newDnsZoneName == "" + nameHasChanged := false + if existingDnsZoneGroups != nil && newDnsZoneName != "" { + needToRemove = len(*existingDnsZoneGroups) > 0 && len(newDnsZoneGroups) == 0 - future, err := dnsClient.CreateOrUpdate(ctx, resourceGroup, name, dnsGroupName, parameters) - if err != nil { - return fmt.Errorf("creating Private DNS Zone Group %q Private Endpoint %q (Resource Group %q): %+v", dnsGroupName, name, resourceGroup, err) - } - if err = future.WaitForCompletionRef(ctx, dnsClient.Client); err != nil { - return fmt.Errorf("waiting for creation of Private DNS Zone Group %q Private Endpoint %q (Resource Group %q): %+v", dnsGroupName, name, resourceGroup, err) + // there should only be a single one, but there's no harm checking all returned + for _, existing := range *existingDnsZoneGroups { + if existing.Name != newDnsZoneName { + nameHasChanged = true + break + } + } } - resp, err := dnsClient.Get(ctx, resourceGroup, name, dnsGroupName) - if err != nil { - return fmt.Errorf("retrieving Private DNS Zone Group %q (Resource Group %q): %+v", dnsGroupName, resourceGroup, err) + if needToRemove || nameHasChanged { + log.Printf("[DEBUG] Deleting the Existing Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q..", id.Name, id.ResourceGroup) + if err := deletePrivateDnsZoneGroupForPrivateEndpoint(ctx, dnsClient, *id); err != nil { + return err + } + log.Printf("[DEBUG] Deleted the Existing Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q.", id.Name, id.ResourceGroup) } - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("API returns a nil/empty id on Private DNS Zone Group %q (Resource Group %q): %+v", dnsGroupName, resourceGroup, err) + } + + if len(privateDnsZoneGroup) > 0 { + log.Printf("[DEBUG] Creating Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q..", id.Name, id.ResourceGroup) + if err := createPrivateDnsZoneGroupForPrivateEndpoint(ctx, dnsClient, *id, privateDnsZoneGroup); err != nil { + return err } + log.Printf("[DEBUG] Created the Existing Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q.", id.Name, id.ResourceGroup) } return resourceArmPrivateEndpointRead(d, meta) @@ -354,159 +378,114 @@ func resourceArmPrivateEndpointRead(d *schema.ResourceData, meta interface{}) er ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.PrivateEndpointID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["privateEndpoints"] - resp, err := client.Get(ctx, resourceGroup, name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Private Endpoint %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("reading Private Endpoint %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("reading Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) + privateDnsZoneIds, err := retrievePrivateDnsZoneGroupsForPrivateEndpoint(ctx, dnsClient, *id) + if err != nil { + return err } + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + if props := resp.PrivateEndpointProperties; props != nil { - privateIpAddress := "" + if err := d.Set("custom_dns_configs", flattenArmCustomDnsConfigs(props.CustomDNSConfigs)); err != nil { + return fmt.Errorf("setting `custom_dns_configs`: %+v", err) + } + privateIpAddress := "" if nics := props.NetworkInterfaces; nics != nil && len(*nics) > 0 { nic := (*nics)[0] if nic.ID != nil && *nic.ID != "" { privateIpAddress = getPrivateIpAddress(ctx, nicsClient, *nic.ID) } } - - flattenedConnection := flattenArmPrivateLinkEndpointServiceConnection(props.PrivateLinkServiceConnections, props.ManualPrivateLinkServiceConnections) - for _, item := range flattenedConnection { - v := item.(map[string]interface{}) - v["private_ip_address"] = privateIpAddress - } + flattenedConnection := flattenArmPrivateLinkEndpointServiceConnection(props.PrivateLinkServiceConnections, props.ManualPrivateLinkServiceConnections, privateIpAddress) if err := d.Set("private_service_connection", flattenedConnection); err != nil { return fmt.Errorf("setting `private_service_connection`: %+v", err) } subnetId := "" - if subnet := props.Subnet; subnet != nil { - subnetId = *subnet.ID + if props.Subnet != nil && props.Subnet.ID != nil { + subnetId = *props.Subnet.ID } d.Set("subnet_id", subnetId) - d.Set("custom_dns_configs", flattenArmCustomDnsConfigs(props.CustomDNSConfigs)) } - // DNS Zone Read Here... - privateDnsZoneGroup := d.Get("private_dns_zone_group").([]interface{}) - if len(privateDnsZoneGroup) > 0 { - for _, v := range privateDnsZoneGroup { - dnsZoneGroup := v.(map[string]interface{}) - - dnsResp, err := dnsClient.Get(ctx, resourceGroup, name, dnsZoneGroup["name"].(string)) + privateDnsZoneConfigs := make([]interface{}, 0) + privateDnsZoneGroups := make([]interface{}, 0) + if privateDnsZoneIds != nil { + for _, dnsZoneId := range *privateDnsZoneIds { + flattened, err := retrieveAndFlattenPrivateDnsZone(ctx, dnsClient, dnsZoneId) if err != nil { - return fmt.Errorf("reading Private DNS Zone Group %q (Resource Group %q): %+v", dnsZoneGroup["name"].(string), resourceGroup, err) - } - - if err := d.Set("private_dns_zone_group", flattenArmPrivateDnsZoneGroup(dnsResp)); err != nil { - return err + return nil } - // now split out the private dns zone configs into there own block - if props := dnsResp.PrivateDNSZoneGroupPropertiesFormat; props != nil { - if err := d.Set("private_dns_zone_configs", flattenArmPrivateDnsZoneConfigs(props.PrivateDNSZoneConfigs, *dnsResp.ID)); err != nil { - return fmt.Errorf("setting private_dns_zone_configs : %+v", err) - } + // an exceptional case but no harm in handling + if flattened == nil { + continue } - } - } else { - // remove associated configs, if any - d.Set("private_dns_zone_configs", make([]interface{}, 0)) - } - return tags.FlattenAndSet(d, resp.Tags) -} -func resourceArmPrivateEndpointDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Network.PrivateEndpointClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - // delete private dns zone first if it exists - dnsRaw := d.Get("private_dns_zone_group") - privateDnsZoneGroup := make(map[string]interface{}) - if dnsRaw != nil { - for _, v := range dnsRaw.([]interface{}) { - privateDnsZoneGroup = v.(map[string]interface{}) + privateDnsZoneConfigs = append(privateDnsZoneConfigs, flattened.DnsZoneConfig...) + privateDnsZoneGroups = append(privateDnsZoneGroups, flattened.DnsZoneGroup) } } - - if len(privateDnsZoneGroup) != 0 { - if err := resourceArmPrivateDnsZoneGroupDelete(d, meta, privateDnsZoneGroup["id"].(string)); err != nil { - return err - } + if err := d.Set("private_dns_zone_configs", privateDnsZoneConfigs); err != nil { + return fmt.Errorf("setting `private_dns_zone_configs`: %+v", err) } - - privateEndpoint, err := parse.PrivateEndpointResourceID(d.Id()) - if err != nil { - return err + if err := d.Set("private_dns_zone_group", privateDnsZoneGroups); err != nil { + return fmt.Errorf("setting `private_dns_zone_group`: %+v", err) } - future, err := client.Delete(ctx, privateEndpoint.ResourceGroup, privateEndpoint.Name) - if err != nil { - if response.WasNotFound(future.Response()) { - return nil - } - return fmt.Errorf("deleting Private Endpoint %q (Resource Group %q): %+v", privateEndpoint.Name, privateEndpoint.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - if !response.WasNotFound(future.Response()) { - return fmt.Errorf("waiting for deletion of Private Endpoint %q (Resource Group %q): %+v", privateEndpoint.Name, privateEndpoint.ResourceGroup, err) - } - } - - return nil + return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmPrivateDnsZoneGroupDelete(d *schema.ResourceData, meta interface{}, oldId string) error { - client := meta.(*clients.Client).Network.PrivateDnsZoneGroupClient +func resourceArmPrivateEndpointDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.PrivateEndpointClient + dnsZoneGroupsClient := meta.(*clients.Client).Network.PrivateDnsZoneGroupClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - if oldId == "" { - return nil - } - - privateEndpoint, err := parse.PrivateEndpointResourceID(d.Id()) + id, err := parse.PrivateEndpointID(d.Id()) if err != nil { return err } - privateDnsZoneGroupId, err := parse.PrivateDnsZoneGroupResourceID(oldId) - if err != nil { + log.Printf("[DEBUG] Deleting the Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q..", id.Name, id.ResourceGroup) + if err := deletePrivateDnsZoneGroupForPrivateEndpoint(ctx, dnsZoneGroupsClient, *id); err != nil { return err } + log.Printf("[DEBUG] Deleted the Private DNS Zone Group associated with Private Endpoint %q / Resource Group %q.", id.Name, id.ResourceGroup) - future, err := client.Delete(ctx, privateEndpoint.ResourceGroup, privateEndpoint.Name, privateDnsZoneGroupId.Name) + log.Printf("[DEBUG] Deleting the Private Endpoint %q / Resource Group %q..", id.Name, id.ResourceGroup) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("deleting Private DNS Zone Group %q (Resource Group %q): %+v", privateDnsZoneGroupId.Name, privateEndpoint.ResourceGroup, err) + return fmt.Errorf("deleting Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("waiting for deletion of Private DNS Zone Group %q (Resource Group %q): %+v", privateDnsZoneGroupId.Name, privateEndpoint.ResourceGroup, err) + return fmt.Errorf("waiting for deletion of Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } } + log.Printf("[DEBUG] Deleted the Private Endpoint %q / Resource Group %q.", id.Name, id.ResourceGroup) return nil } @@ -542,24 +521,6 @@ func expandArmPrivateLinkEndpointServiceConnection(input []interface{}, parseMan return &results } -func flattenArmPrivateDnsZoneGroup(input network.PrivateDNSZoneGroup) []interface{} { - output := make([]interface{}, 0) - result := make(map[string]interface{}) - - if id := input.ID; id != nil { - result["id"] = *id - } - if name := input.Name; name != nil { - result["name"] = *name - } - - if props := input.PrivateDNSZoneGroupPropertiesFormat; props != nil { - result["private_dns_zone_ids"] = flattenArmPrivateDnsZoneIds(props.PrivateDNSZoneConfigs) - } - output = append(output, result) - return output -} - func flattenArmCustomDnsConfigs(customDnsConfigs *[]network.CustomDNSConfigPropertiesFormat) []interface{} { results := make([]interface{}, 0) if customDnsConfigs == nil { @@ -576,90 +537,7 @@ func flattenArmCustomDnsConfigs(customDnsConfigs *[]network.CustomDNSConfigPrope return results } -func flattenArmPrivateDnsZoneIds(input *[]network.PrivateDNSZoneConfig) []string { - results := make([]string, 0) - if input == nil { - return results - } - - for _, item := range *input { - if props := item.PrivateDNSZonePropertiesFormat; props != nil { - if privateDnsZoneId := props.PrivateDNSZoneID; privateDnsZoneId != nil { - results = append(results, *props.PrivateDNSZoneID) - } - } - } - - return results -} - -func flattenArmPrivateDnsZoneConfigs(input *[]network.PrivateDNSZoneConfig, zoneGroupId string) []interface{} { - output := make([]interface{}, 0) - if input == nil { - return output - } - - for _, v := range *input { - result := make(map[string]interface{}) - - if name := v.Name; name != nil { - result["name"] = *name - // I have to consturct this because the SDK does not expose it in its PrivateDNSZoneConfig struct - result["id"] = fmt.Sprintf("%s/privateDnsZoneConfigs/%s", zoneGroupId, *name) - } - - if props := v.PrivateDNSZonePropertiesFormat; props != nil { - if zoneId := props.PrivateDNSZoneID; zoneId != nil { - result["private_dns_zone_id"] = *zoneId - } - - if recordSets := props.RecordSets; recordSets != nil { - result["record_sets"] = flattenArmPrivateDnsZoneRecordSets(recordSets) - } - } - - output = append(output, result) - } - - return output -} - -func flattenArmPrivateDnsZoneRecordSets(input *[]network.RecordSet) []map[string]interface{} { - output := make([]map[string]interface{}, 0) - if input == nil { - return output - } - - for _, v := range *input { - result := make(map[string]interface{}) - - if recordName := v.RecordSetName; recordName != nil { - result["name"] = *recordName - } - - if recordType := v.RecordType; recordType != nil { - result["type"] = *recordType - } - - if fqdn := v.Fqdn; fqdn != nil { - result["fqdn"] = *fqdn - } - - if ttl := v.TTL; ttl != nil { - result["ttl"] = int(*ttl) - } - - if ipAddresses := v.IPAddresses; ipAddresses != nil { - result["ip_addresses"] = *ipAddresses - } - - output = append(output, result) - } - - return output -} - -func flattenArmPrivateLinkEndpointServiceConnection(serviceConnections *[]network.PrivateLinkServiceConnection, manualServiceConnections *[]network.PrivateLinkServiceConnection) []interface{} { +func flattenArmPrivateLinkEndpointServiceConnection(serviceConnections *[]network.PrivateLinkServiceConnection, manualServiceConnections *[]network.PrivateLinkServiceConnection, privateIPAddress string) []interface{} { results := make([]interface{}, 0) if serviceConnections == nil && manualServiceConnections == nil { return results @@ -687,6 +565,7 @@ func flattenArmPrivateLinkEndpointServiceConnection(serviceConnections *[]networ "name": name, "is_manual_connection": false, "private_connection_resource_id": privateConnectionId, + "private_ip_address": privateIPAddress, "subresource_names": subResourceNames, }) } @@ -719,6 +598,7 @@ func flattenArmPrivateLinkEndpointServiceConnection(serviceConnections *[]networ "name": name, "is_manual_connection": true, "private_connection_resource_id": privateConnectionId, + "private_ip_address": privateIPAddress, "request_message": requestMessage, "subresource_names": subResourceNames, }) @@ -727,3 +607,203 @@ func flattenArmPrivateLinkEndpointServiceConnection(serviceConnections *[]networ return results } + +func createPrivateDnsZoneGroupForPrivateEndpoint(ctx context.Context, client *network.PrivateDNSZoneGroupsClient, id parse.PrivateEndpointId, inputRaw []interface{}) error { + if len(inputRaw) != 1 { + return fmt.Errorf("expected a single Private DNS Zone Groups but got %d", len(inputRaw)) + } + item := inputRaw[0].(map[string]interface{}) + + dnsGroupName := item["name"].(string) + privateDnsZoneIdsRaw := item["private_dns_zone_ids"].([]interface{}) + privateDnsZoneConfigs := make([]network.PrivateDNSZoneConfig, 0) + for _, item := range privateDnsZoneIdsRaw { + v := item.(string) + + privateDnsZone, err := privateDnsParse.PrivateDnsZoneID(v) + if err != nil { + return err + } + + privateDnsZoneConfigs = append(privateDnsZoneConfigs, network.PrivateDNSZoneConfig{ + Name: utils.String(privateDnsZone.Name), + PrivateDNSZonePropertiesFormat: &network.PrivateDNSZonePropertiesFormat{ + PrivateDNSZoneID: utils.String(privateDnsZone.ID()), + }, + }) + } + + parameters := network.PrivateDNSZoneGroup{ + Name: utils.String(id.Name), + PrivateDNSZoneGroupPropertiesFormat: &network.PrivateDNSZoneGroupPropertiesFormat{ + PrivateDNSZoneConfigs: &privateDnsZoneConfigs, + }, + } + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, dnsGroupName, parameters) + if err != nil { + return fmt.Errorf("creating Private DNS Zone Group %q for Private Endpoint %q (Resource Group %q): %+v", dnsGroupName, id.Name, id.ResourceGroup, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation of Private DNS Zone Group %q for Private Endpoint %q (Resource Group %q): %+v", dnsGroupName, id.Name, id.ResourceGroup, err) + } + + return nil +} + +func deletePrivateDnsZoneGroupForPrivateEndpoint(ctx context.Context, client *network.PrivateDNSZoneGroupsClient, id parse.PrivateEndpointId) error { + // lookup and delete the (should be, Single) Private DNS Zone Group associated with this Private Endpoint + privateDnsZoneIds, err := retrievePrivateDnsZoneGroupsForPrivateEndpoint(ctx, client, id) + if err != nil { + return err + } + + for _, privateDnsZoneId := range *privateDnsZoneIds { + log.Printf("[DEBUG] Deleting Private DNS Zone Group %q (Private Endpoint %q / Resource Group %q)..", privateDnsZoneId.Name, privateDnsZoneId.PrivateEndpointName, privateDnsZoneId.ResourceGroup) + future, err := client.Delete(ctx, privateDnsZoneId.ResourceGroup, privateDnsZoneId.PrivateEndpointName, privateDnsZoneId.Name) + if err != nil { + if !response.WasNotFound(future.Response()) { + return fmt.Errorf("deleting Private DNS Zone Group %q (Private Endpoint %q / Resource Group %q): %+v", privateDnsZoneId.Name, privateDnsZoneId.PrivateEndpointName, privateDnsZoneId.ResourceGroup, err) + } + } + + if !response.WasNotFound(future.Response()) { + log.Printf("[DEBUG] Waiting for deletion of Private DNS Zone Group %q (Private Endpoint %q / Resource Group %q)..", privateDnsZoneId.Name, privateDnsZoneId.PrivateEndpointName, privateDnsZoneId.ResourceGroup) + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + if !response.WasNotFound(future.Response()) { + return fmt.Errorf("waiting for deletion of Private DNS Zone Group %q (Private Endpoint %q / Resource Group %q): %+v", privateDnsZoneId.Name, privateDnsZoneId.PrivateEndpointName, privateDnsZoneId.ResourceGroup, err) + } + } + log.Printf("[DEBUG] Deleted Private DNS Zone Group %q (Private Endpoint %q / Resource Group %q).", privateDnsZoneId.Name, privateDnsZoneId.PrivateEndpointName, privateDnsZoneId.ResourceGroup) + } + } + + return nil +} + +func retrievePrivateDnsZoneGroupsForPrivateEndpoint(ctx context.Context, client *network.PrivateDNSZoneGroupsClient, id parse.PrivateEndpointId) (*[]parse.PrivateDnsZoneGroupId, error) { + output := make([]parse.PrivateDnsZoneGroupId, 0) + + dnsZones, err := client.ListComplete(ctx, id.Name, id.ResourceGroup) // looks odd.. matches the SDK method + if err != nil { + if utils.ResponseWasNotFound(dnsZones.Response().Response) { + return &output, nil + } + + return nil, fmt.Errorf("retrieving Private DNS Zone Groups for Private Endpoint %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + for dnsZones.NotDone() { + privateDnsZoneGroup := dnsZones.Value() + if privateDnsZoneGroup.ID != nil { + groupId, err := parse.PrivateDnsZoneGroupID(*privateDnsZoneGroup.ID) + if err != nil { + return nil, err + } + + output = append(output, *groupId) + } + + if err := dnsZones.NextWithContext(ctx); err != nil { + return nil, err + } + } + + return &output, nil +} + +type flattenedPrivateDnsZoneGroup struct { + DnsZoneConfig []interface{} + DnsZoneGroup map[string]interface{} +} + +func retrieveAndFlattenPrivateDnsZone(ctx context.Context, client *network.PrivateDNSZoneGroupsClient, id parse.PrivateDnsZoneGroupId) (*flattenedPrivateDnsZoneGroup, error) { + resp, err := client.Get(ctx, id.ResourceGroup, id.PrivateEndpointName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil, nil + } + + return nil, fmt.Errorf("retrieving Private DNS Zone Group %q (Private Endpoint %q / Resource Group %q): %+v", id.Name, id.PrivateEndpointName, id.ResourceGroup, err) + } + + privateDnsZoneIds := make([]string, 0) + dnsZoneConfigs := make([]interface{}, 0) + + if resp.PrivateDNSZoneGroupPropertiesFormat != nil && resp.PrivateDNSZoneGroupPropertiesFormat.PrivateDNSZoneConfigs != nil { + for _, config := range *resp.PrivateDNSZoneGroupPropertiesFormat.PrivateDNSZoneConfigs { + if config.Name == nil { + // necessary to build up the ID + continue + } + if config.PrivateDNSZonePropertiesFormat == nil || config.PrivateDNSZonePropertiesFormat.PrivateDNSZoneID == nil { + // necessary for a bunch of other things + continue + } + props := *config.PrivateDNSZonePropertiesFormat + name := *config.Name + privateDnsZoneId := *props.PrivateDNSZoneID + + privateDnsZoneIds = append(privateDnsZoneIds, privateDnsZoneId) + + recordSets := flattenPrivateDnsZoneGroupRecordSets(props.RecordSets) + dnsZoneConfigs = append(dnsZoneConfigs, map[string]interface{}{ + "id": parse.NewPrivateDnsZoneConfigID(id.SubscriptionId, id.ResourceGroup, id.PrivateEndpointName, id.Name, name).ID(), + "name": name, + "private_dns_zone_id": privateDnsZoneId, + "record_sets": recordSets, + }) + } + } + + return &flattenedPrivateDnsZoneGroup{ + DnsZoneConfig: dnsZoneConfigs, + DnsZoneGroup: map[string]interface{}{ + "id": id.ID(), + "name": id.Name, + "private_dns_zone_ids": privateDnsZoneIds, + }, + }, nil +} + +func flattenPrivateDnsZoneGroupRecordSets(input *[]network.RecordSet) []interface{} { + output := make([]interface{}, 0) + if input == nil { + return output + } + + for _, v := range *input { + fqdn := "" + if v.Fqdn != nil { + fqdn = *v.Fqdn + } + + name := "" + if v.RecordSetName != nil { + name = *v.RecordSetName + } + + recordType := "" + if v.RecordType != nil { + recordType = *v.RecordType + } + + ttl := 0 + if v.TTL != nil { + ttl = int(*v.TTL) + } + + ipAddresses := make([]string, 0) + if v.IPAddresses != nil { + ipAddresses = *v.IPAddresses + } + + output = append(output, map[string]interface{}{ + "fqdn": fqdn, + "ip_addresses": ipAddresses, + "name": name, + "ttl": ttl, + "type": recordType, + }) + } + + return output +} diff --git a/azurerm/internal/services/network/private_link_service_resource.go b/azurerm/internal/services/network/private_link_service_resource.go index e1a73279210f..769082723da3 100644 --- a/azurerm/internal/services/network/private_link_service_resource.go +++ b/azurerm/internal/services/network/private_link_service_resource.go @@ -200,14 +200,6 @@ func resourceArmPrivateLinkServiceCreateUpdate(d *schema.ResourceData, meta inte return fmt.Errorf("Error waiting for creation of Private Link Service %q (Resource Group %q): %+v", name, resourceGroup, err) } - resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { - return fmt.Errorf("Error retrieving Private Link Service %q (Resource Group %q): %+v", name, resourceGroup, err) - } - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("API returns a nil/empty id on Private Link Service %q (Resource Group %q): %+v", name, resourceGroup, err) - } - // we can't rely on the use of the Future here due to the resource being successfully completed but now the service is applying those values. // currently being tracked with issue #6466: https://github.com/Azure/azure-sdk-for-go/issues/6466 log.Printf("[DEBUG] Waiting for Private Link Service to %q (Resource Group %q) to finish applying", name, resourceGroup) @@ -228,6 +220,15 @@ func resourceArmPrivateLinkServiceCreateUpdate(d *schema.ResourceData, meta inte return fmt.Errorf("Error waiting for Private Link Service %q (Resource Group %q) to become available: %s", name, resourceGroup, err) } + // TODO: switch over to using an ID parser + resp, err := client.Get(ctx, resourceGroup, name, "") + if err != nil { + return fmt.Errorf("Error retrieving Private Link Service %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("API returns a nil/empty id on Private Link Service %q (Resource Group %q): %+v", name, resourceGroup, err) + } + d.SetId(*resp.ID) return resourceArmPrivateLinkServiceRead(d, meta) @@ -441,6 +442,11 @@ func privateLinkServiceWaitForReadyRefreshFunc(ctx context.Context, client *netw return func() (interface{}, string, error) { res, err := client.Get(ctx, resourceGroupName, name, "") if err != nil { + // the API is eventually consistent during recreates.. + if utils.ResponseWasNotFound(res.Response) { + return res, "Pending", nil + } + return nil, "Error", fmt.Errorf("Error issuing read request in privateLinkServiceWaitForReadyRefreshFunc %q (Resource Group %q): %s", name, resourceGroupName, err) } if props := res.PrivateLinkServiceProperties; props != nil { diff --git a/azurerm/internal/services/network/registration.go b/azurerm/internal/services/network/registration.go index 9d5fbf82c84e..d3503faa8766 100644 --- a/azurerm/internal/services/network/registration.go +++ b/azurerm/internal/services/network/registration.go @@ -14,7 +14,6 @@ func (r Registration) Name() string { // WebsiteCategories returns a list of categories which can be used for the sidebar func (r Registration) WebsiteCategories() []string { return []string{ - "Load Balancer", "Network", } } @@ -24,12 +23,7 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ "azurerm_application_security_group": dataSourceArmApplicationSecurityGroup(), "azurerm_express_route_circuit": dataSourceArmExpressRouteCircuit(), - "azurerm_firewall": dataSourceArmFirewall(), - "azurerm_firewall_policy": dataSourceArmFirewallPolicy(), "azurerm_ip_group": dataSourceArmIpGroup(), - "azurerm_lb": dataSourceArmLoadBalancer(), - "azurerm_lb_backend_address_pool": dataSourceArmLoadBalancerBackendAddressPool(), - "azurerm_lb_rule": dataSourceArmLoadBalancerRule(), "azurerm_nat_gateway": dataSourceArmNatGateway(), "azurerm_network_ddos_protection_plan": dataSourceNetworkDDoSProtectionPlan(), "azurerm_network_interface": dataSourceArmNetworkInterface(), @@ -50,37 +44,26 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { "azurerm_virtual_network_gateway_connection": dataSourceArmVirtualNetworkGatewayConnection(), "azurerm_virtual_network": dataSourceArmVirtualNetwork(), "azurerm_web_application_firewall_policy": dataArmWebApplicationFirewallPolicy(), + "azurerm_virtual_wan": dataSourceArmVirtualWan(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_application_gateway": resourceArmApplicationGateway(), - "azurerm_application_security_group": resourceArmApplicationSecurityGroup(), - "azurerm_bastion_host": resourceArmBastionHost(), - "azurerm_express_route_circuit_authorization": resourceArmExpressRouteCircuitAuthorization(), - "azurerm_express_route_circuit_peering": resourceArmExpressRouteCircuitPeering(), - "azurerm_express_route_circuit": resourceArmExpressRouteCircuit(), - "azurerm_express_route_gateway": resourceArmExpressRouteGateway(), - "azurerm_firewall_application_rule_collection": resourceArmFirewallApplicationRuleCollection(), - "azurerm_firewall_policy": resourceArmFirewallPolicy(), - "azurerm_firewall_nat_rule_collection": resourceArmFirewallNatRuleCollection(), - "azurerm_firewall_network_rule_collection": resourceArmFirewallNetworkRuleCollection(), - "azurerm_firewall": resourceArmFirewall(), - "azurerm_ip_group": resourceArmIpGroup(), - "azurerm_local_network_gateway": resourceArmLocalNetworkGateway(), - "azurerm_lb_backend_address_pool": resourceArmLoadBalancerBackendAddressPool(), - "azurerm_lb_nat_pool": resourceArmLoadBalancerNatPool(), - "azurerm_lb_nat_rule": resourceArmLoadBalancerNatRule(), - "azurerm_lb_probe": resourceArmLoadBalancerProbe(), - "azurerm_lb_outbound_rule": resourceArmLoadBalancerOutboundRule(), - "azurerm_lb_rule": resourceArmLoadBalancerRule(), - "azurerm_lb": resourceArmLoadBalancer(), - "azurerm_nat_gateway": resourceArmNatGateway(), - "azurerm_network_connection_monitor": resourceArmNetworkConnectionMonitor(), - "azurerm_network_ddos_protection_plan": resourceArmNetworkDDoSProtectionPlan(), - "azurerm_network_interface": resourceArmNetworkInterface(), + "azurerm_application_gateway": resourceArmApplicationGateway(), + "azurerm_application_security_group": resourceArmApplicationSecurityGroup(), + "azurerm_bastion_host": resourceArmBastionHost(), + "azurerm_express_route_circuit_authorization": resourceArmExpressRouteCircuitAuthorization(), + "azurerm_express_route_circuit_peering": resourceArmExpressRouteCircuitPeering(), + "azurerm_express_route_circuit": resourceArmExpressRouteCircuit(), + "azurerm_express_route_gateway": resourceArmExpressRouteGateway(), + "azurerm_ip_group": resourceArmIpGroup(), + "azurerm_local_network_gateway": resourceArmLocalNetworkGateway(), + "azurerm_nat_gateway": resourceArmNatGateway(), + "azurerm_network_connection_monitor": resourceArmNetworkConnectionMonitor(), + "azurerm_network_ddos_protection_plan": resourceArmNetworkDDoSProtectionPlan(), + "azurerm_network_interface": resourceArmNetworkInterface(), "azurerm_network_interface_application_gateway_backend_address_pool_association": resourceArmNetworkInterfaceApplicationGatewayBackendAddressPoolAssociation(), "azurerm_network_interface_application_security_group_association": resourceArmNetworkInterfaceApplicationSecurityGroupAssociation(), "azurerm_network_interface_backend_address_pool_association": resourceArmNetworkInterfaceBackendAddressPoolAssociation(), @@ -102,18 +85,24 @@ func (r Registration) SupportedResources() map[string]*schema.Resource { "azurerm_route_filter": resourceArmRouteFilter(), "azurerm_route_table": resourceArmRouteTable(), "azurerm_route": resourceArmRoute(), + "azurerm_virtual_hub_security_partner_provider": resourceArmVirtualHubSecurityPartnerProvider(), + "azurerm_subnet_service_endpoint_storage_policy": resourceArmSubnetServiceEndpointStoragePolicy(), "azurerm_subnet_network_security_group_association": resourceArmSubnetNetworkSecurityGroupAssociation(), "azurerm_subnet_route_table_association": resourceArmSubnetRouteTableAssociation(), "azurerm_subnet_nat_gateway_association": resourceArmSubnetNatGatewayAssociation(), "azurerm_subnet": resourceArmSubnet(), "azurerm_virtual_hub": resourceArmVirtualHub(), + "azurerm_virtual_hub_bgp_connection": resourceArmVirtualHubBgpConnection(), "azurerm_virtual_hub_connection": resourceArmVirtualHubConnection(), + "azurerm_virtual_hub_ip": resourceArmVirtualHubIP(), + "azurerm_virtual_hub_route_table": resourceArmVirtualHubRouteTable(), "azurerm_virtual_network_gateway_connection": resourceArmVirtualNetworkGatewayConnection(), "azurerm_virtual_network_gateway": resourceArmVirtualNetworkGateway(), "azurerm_virtual_network_peering": resourceArmVirtualNetworkPeering(), "azurerm_virtual_network": resourceArmVirtualNetwork(), "azurerm_virtual_wan": resourceArmVirtualWan(), "azurerm_vpn_gateway": resourceArmVPNGateway(), + "azurerm_vpn_gateway_connection": resourceArmVPNGatewayConnection(), "azurerm_vpn_server_configuration": resourceArmVPNServerConfiguration(), "azurerm_vpn_site": resourceArmVpnSite(), "azurerm_web_application_firewall_policy": resourceArmWebApplicationFirewallPolicy(), diff --git a/azurerm/internal/services/network/resourceids.go b/azurerm/internal/services/network/resourceids.go new file mode 100644 index 000000000000..f99f942caf3f --- /dev/null +++ b/azurerm/internal/services/network/resourceids.go @@ -0,0 +1,42 @@ +package network + +// Core bits and pieces +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=IpGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/ipGroups/group1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NetworkInterface -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/networkInterface1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PublicIpAddress -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/publicIpAddress1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Subnet -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualNetwork -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1 + +// NAT Gateway +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NatGateway -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/natGateways/gateway1 +// NOTE: the Nat Gateway <-> Public IP Association can't be generated at this time + +// Network Watcher +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ConnectionMonitor -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/connectionMonitors/connectionMonitor1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NetworkWatcher -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PacketCapture -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/capture1 + +// Private Link +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PrivateEndpoint -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PrivateDnsZoneConfig -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/privateDnsZoneConfigs/privateDnsZoneConfig1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PrivateDnsZoneGroup -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1 +// ^ these two looks like it should be in Private DNS - alas no, it's actually nested and entirely managed within the Private Endpoint + +// Routing +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=RouteFilter -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/routeFilters/filter1 + +// Virtual Hubs +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=BgpConnection -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/bgpConnections/connection1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=HubRouteTable -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubRouteTables/routeTable1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=HubVirtualNetworkConnection -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubVirtualNetworkConnections/hubConnection1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SecurityPartnerProvider -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/securityPartnerProviders/partnerProvider1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualHub -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualHubIpConfiguration -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/ipConfigurations/ipConfiguration1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualWan -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualWans/virtualWan1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VpnGateway -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VpnConnection -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/vpnConnections/vpnConnection1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VpnSite -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VpnSiteLink -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/vpnSiteLinks/vpnSiteLink1 + +// Subnet Service Endpoint Policy +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SubnetServiceEndpointStoragePolicy -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/serviceEndpointPolicies/policy1 diff --git a/azurerm/internal/services/network/route_filter.go b/azurerm/internal/services/network/route_filter.go deleted file mode 100644 index f5711a48a8cd..000000000000 --- a/azurerm/internal/services/network/route_filter.go +++ /dev/null @@ -1,36 +0,0 @@ -package network - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -// NOTE: there's some nice things we can do with this around validation -// since these top level objects exist - -type RouteFilterResourceID struct { - ResourceGroup string - Name string -} - -func ParseRouteFilterID(input string) (*RouteFilterResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Route Filter ID %q: %+v", input, err) - } - - routeFilter := RouteFilterResourceID{ - ResourceGroup: id.ResourceGroup, - } - - if routeFilter.Name, err = id.PopSegment("routeFilters"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &routeFilter, nil -} diff --git a/azurerm/internal/services/network/route_filter_resource.go b/azurerm/internal/services/network/route_filter_resource.go index 011412e3cdca..252313ff53db 100644 --- a/azurerm/internal/services/network/route_filter_resource.go +++ b/azurerm/internal/services/network/route_filter_resource.go @@ -12,6 +12,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" @@ -26,7 +27,7 @@ func resourceArmRouteFilter() *schema.Resource { Delete: resourceArmRouteFilterDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := ParseRouteFilterID(id) + _, err := parse.RouteFilterID(id) return err }), @@ -158,7 +159,7 @@ func resourceArmRouteFilterRead(d *schema.ResourceData, meta interface{}) error ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := ParseRouteFilterID(d.Id()) + id, err := parse.RouteFilterID(d.Id()) if err != nil { return err } @@ -192,7 +193,7 @@ func resourceArmRouteFilterDelete(d *schema.ResourceData, meta interface{}) erro ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := ParseRouteFilterID(d.Id()) + id, err := parse.RouteFilterID(d.Id()) if err != nil { return err } diff --git a/azurerm/internal/services/network/route_filter_test.go b/azurerm/internal/services/network/route_filter_test.go deleted file mode 100644 index 9b9f17c05319..000000000000 --- a/azurerm/internal/services/network/route_filter_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package network - -import ( - "testing" -) - -func TestParseRouteFilter(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *RouteFilterResourceID - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Route Filter Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Expected: nil, - }, - { - Name: "No Route Filter Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/routeFilters/", - Expected: nil, - }, - { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/routeFilters/example", - Expected: &RouteFilterResourceID{ - Name: "example", - ResourceGroup: "foo", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ParseRouteFilterID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/network/subnet_network_security_group_association_resource.go b/azurerm/internal/services/network/subnet_network_security_group_association_resource.go index 05ed264cc955..f721d9ff13d9 100644 --- a/azurerm/internal/services/network/subnet_network_security_group_association_resource.go +++ b/azurerm/internal/services/network/subnet_network_security_group_association_resource.go @@ -137,7 +137,6 @@ func resourceArmSubnetNetworkSecurityGroupAssociationRead(d *schema.ResourceData subnetName := id.Path["subnets"] resp, err := client.Get(ctx, resourceGroup, virtualNetworkName, subnetName, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Subnet %q (Virtual Network %q / Resource Group %q) could not be found - removing from state!", subnetName, virtualNetworkName, resourceGroup) diff --git a/azurerm/internal/services/network/subnet_resource.go b/azurerm/internal/services/network/subnet_resource.go index beae3205cefe..d40f42d19865 100644 --- a/azurerm/internal/services/network/subnet_resource.go +++ b/azurerm/internal/services/network/subnet_resource.go @@ -6,6 +6,8 @@ import ( "strings" "time" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" @@ -77,6 +79,16 @@ func resourceArmSubnet() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, }, + "service_endpoint_policy_ids": { + Type: schema.TypeSet, + Optional: true, + MinItems: 1, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validate.SubnetServiceEndpointStoragePolicyID, + }, + }, + "delegation": { Type: schema.TypeList, Optional: true, @@ -215,6 +227,9 @@ func resourceArmSubnetCreate(d *schema.ResourceData, meta interface{}) error { serviceEndpointsRaw := d.Get("service_endpoints").([]interface{}) properties.ServiceEndpoints = expandSubnetServiceEndpoints(serviceEndpointsRaw) + serviceEndpointPoliciesRaw := d.Get("service_endpoint_policy_ids").(*schema.Set).List() + properties.ServiceEndpointPolicies = expandSubnetServiceEndpointPolicies(serviceEndpointPoliciesRaw) + delegationsRaw := d.Get("delegation").([]interface{}) properties.Delegations = expandSubnetDelegation(delegationsRaw) @@ -304,6 +319,11 @@ func resourceArmSubnetUpdate(d *schema.ResourceData, meta interface{}) error { props.ServiceEndpoints = expandSubnetServiceEndpoints(serviceEndpointsRaw) } + if d.HasChange("service_endpoint_policy_ids") { + serviceEndpointPoliciesRaw := d.Get("service_endpoint_policy_ids").(*schema.Set).List() + props.ServiceEndpointPolicies = expandSubnetServiceEndpointPolicies(serviceEndpointPoliciesRaw) + } + subnet := network.Subnet{ Name: utils.String(name), SubnetPropertiesFormat: &props, @@ -335,7 +355,6 @@ func resourceArmSubnetRead(d *schema.ResourceData, meta interface{}) error { name := id.Path["subnets"] resp, err := client.Get(ctx, resourceGroup, networkName, name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") @@ -372,6 +391,11 @@ func resourceArmSubnetRead(d *schema.ResourceData, meta interface{}) error { if err := d.Set("service_endpoints", serviceEndpoints); err != nil { return fmt.Errorf("Error setting `service_endpoints`: %+v", err) } + + serviceEndpointPolicies := flattenSubnetServiceEndpointPolicies(props.ServiceEndpointPolicies) + if err := d.Set("service_endpoint_policy_ids", serviceEndpointPolicies); err != nil { + return fmt.Errorf("Error setting `service_endpoint_policy_ids`: %+v", err) + } } return nil @@ -528,3 +552,28 @@ func flattenSubnetPrivateLinkNetworkPolicy(input *string) bool { return strings.EqualFold(*input, "Disabled") } + +func expandSubnetServiceEndpointPolicies(input []interface{}) *[]network.ServiceEndpointPolicy { + output := make([]network.ServiceEndpointPolicy, 0) + for _, policy := range input { + policy := policy.(string) + output = append(output, network.ServiceEndpointPolicy{ID: &policy}) + } + return &output +} + +func flattenSubnetServiceEndpointPolicies(input *[]network.ServiceEndpointPolicy) []interface{} { + if input == nil { + return nil + } + + var output []interface{} + for _, policy := range *input { + id := "" + if policy.ID != nil { + id = *policy.ID + } + output = append(output, id) + } + return output +} diff --git a/azurerm/internal/services/network/subnet_route_table_association_resource.go b/azurerm/internal/services/network/subnet_route_table_association_resource.go index 9c5383239c0e..f4d7aa357bd4 100644 --- a/azurerm/internal/services/network/subnet_route_table_association_resource.go +++ b/azurerm/internal/services/network/subnet_route_table_association_resource.go @@ -134,7 +134,6 @@ func resourceArmSubnetRouteTableAssociationRead(d *schema.ResourceData, meta int subnetName := id.Path["subnets"] resp, err := client.Get(ctx, resourceGroup, virtualNetworkName, subnetName, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Subnet %q (Virtual Network %q / Resource Group %q) could not be found - removing from state!", subnetName, virtualNetworkName, resourceGroup) diff --git a/azurerm/internal/services/network/subnet_service_endpoint_storage_policy_resource.go b/azurerm/internal/services/network/subnet_service_endpoint_storage_policy_resource.go new file mode 100644 index 000000000000..c0081f0cdb48 --- /dev/null +++ b/azurerm/internal/services/network/subnet_service_endpoint_storage_policy_resource.go @@ -0,0 +1,244 @@ +package network + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + + mgValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/managementgroup/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmSubnetServiceEndpointStoragePolicy() *schema.Resource { + return &schema.Resource{ + Create: resourceArmSubnetServiceEndpointStoragePolicyCreateUpdate, + Read: resourceArmSubnetServiceEndpointStoragePolicyRead, + Update: resourceArmSubnetServiceEndpointStoragePolicyCreateUpdate, + Delete: resourceArmSubnetServiceEndpointStoragePolicyDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.SubnetServiceEndpointStoragePolicyID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.SubnetServiceEndpointStoragePolicyName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": location.Schema(), + + "definition": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.SubnetServiceEndpointStoragePolicyDefinitionName, + }, + + "service_resources": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.Any( + azure.ValidateResourceID, + mgValidate.ManagementGroupID, + ), + }, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringLenBetween(0, 140), + }, + }, + }, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceArmSubnetServiceEndpointStoragePolicyCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.ServiceEndpointPoliciesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceId := parse.NewSubnetServiceEndpointStoragePolicyID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + resp, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.ServiceEndpointPolicyName, "") + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("checking for existing %s: %+v", resourceId, err) + } + } + + if !utils.ResponseWasNotFound(resp.Response) { + return tf.ImportAsExistsError("azurerm_subnet_service_endpoint_storage_policy", resourceId.ID()) + } + } + + location := azure.NormalizeLocation(d.Get("location").(string)) + param := network.ServiceEndpointPolicy{ + Location: &location, + ServiceEndpointPolicyPropertiesFormat: &network.ServiceEndpointPolicyPropertiesFormat{ + ServiceEndpointPolicyDefinitions: expandServiceEndpointPolicyDefinitions(d.Get("definition").([]interface{})), + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + future, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.ServiceEndpointPolicyName, param) + if err != nil { + return fmt.Errorf("creating Subnet Service Endpoint Storage Policy %q (Resource Group %q): %+v", resourceId.ServiceEndpointPolicyName, resourceId.ResourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation of Subnet Service Endpoint Storage Policy %q (Resource Group %q): %+v", resourceId.ServiceEndpointPolicyName, resourceId.ResourceGroup, err) + } + + d.SetId(resourceId.ID()) + + return resourceArmSubnetServiceEndpointStoragePolicyRead(d, meta) +} + +func resourceArmSubnetServiceEndpointStoragePolicyRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.ServiceEndpointPoliciesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SubnetServiceEndpointStoragePolicyID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceEndpointPolicyName, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Subnet Service Endpoint Storage Policy %q was not found in Resource Group %q - removing from state!", id.ServiceEndpointPolicyName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Subnet Service Endpoint Storage Policy %q (Resource Group %q): %+v", id.ServiceEndpointPolicyName, id.ResourceGroup, err) + } + + d.Set("name", id.ServiceEndpointPolicyName) + d.Set("resource_group_name", id.ResourceGroup) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + if prop := resp.ServiceEndpointPolicyPropertiesFormat; prop != nil { + if err := d.Set("definition", flattenServiceEndpointPolicyDefinitions(prop.ServiceEndpointPolicyDefinitions)); err != nil { + return fmt.Errorf("setting `definition`: %v", err) + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmSubnetServiceEndpointStoragePolicyDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.ServiceEndpointPoliciesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SubnetServiceEndpointStoragePolicyID(d.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, id.ResourceGroup, id.ServiceEndpointPolicyName); err != nil { + return fmt.Errorf("deleting Subnet Service Endpoint Storage Policy %q (Resource Group %q): %+v", id.ServiceEndpointPolicyName, id.ResourceGroup, err) + } + + return nil +} + +func expandServiceEndpointPolicyDefinitions(input []interface{}) *[]network.ServiceEndpointPolicyDefinition { + if len(input) == 0 { + return nil + } + + output := make([]network.ServiceEndpointPolicyDefinition, 0) + for _, e := range input { + e := e.(map[string]interface{}) + output = append(output, network.ServiceEndpointPolicyDefinition{ + Name: utils.String(e["name"].(string)), + ServiceEndpointPolicyDefinitionPropertiesFormat: &network.ServiceEndpointPolicyDefinitionPropertiesFormat{ + Description: utils.String(e["description"].(string)), + Service: utils.String("Microsoft.Storage"), + ServiceResources: utils.ExpandStringSlice(e["service_resources"].(*schema.Set).List()), + }, + }) + } + + return &output +} + +func flattenServiceEndpointPolicyDefinitions(input *[]network.ServiceEndpointPolicyDefinition) []interface{} { + if input == nil { + return []interface{}{} + } + + output := make([]interface{}, 0) + for _, e := range *input { + name := "" + if e.Name != nil { + name = *e.Name + } + + var ( + description = "" + serviceResource = []interface{}{} + ) + if b := e.ServiceEndpointPolicyDefinitionPropertiesFormat; b != nil { + if b.Description != nil { + description = *b.Description + } + serviceResource = utils.FlattenStringSlice(b.ServiceResources) + } + + output = append(output, map[string]interface{}{ + "name": name, + "description": description, + "service_resources": serviceResource, + }) + } + + return output +} diff --git a/azurerm/internal/services/network/tests/application_gateway_resource_test.go b/azurerm/internal/services/network/tests/application_gateway_resource_test.go index 8fb0c340d571..f9c16fd0b6a9 100644 --- a/azurerm/internal/services/network/tests/application_gateway_resource_test.go +++ b/azurerm/internal/services/network/tests/application_gateway_resource_test.go @@ -867,7 +867,7 @@ func TestAccAzureRMApplicationGateway_webApplicationFirewall_exclusions(t *testi resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.rule_set_version", "3.0"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.request_body_check", "true"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.max_request_body_size_kb", "128"), - resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.file_upload_limit_mb", "100"), + resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.file_upload_limit_mb", "750"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.exclusion.0.match_variable", "RequestArgNames"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.exclusion.0.selector_match_operator", "Equals"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.exclusion.0.selector", "displayNameHtml"), @@ -898,7 +898,7 @@ func TestAccAzureRMApplicationGateway_webApplicationFirewall_exclusions(t *testi resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.rule_set_version", "3.0"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.request_body_check", "true"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.max_request_body_size_kb", "128"), - resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.file_upload_limit_mb", "100"), + resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.file_upload_limit_mb", "750"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.exclusion.0.match_variable", "RequestArgNames"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.exclusion.0.selector_match_operator", "Equals"), resource.TestCheckResourceAttr(data.ResourceName, "waf_configuration.0.exclusion.0.selector", "displayNameHtml"), @@ -1147,7 +1147,6 @@ func testCheckAzureRMApplicationGatewayDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, gatewayName) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil @@ -4419,6 +4418,7 @@ resource "azurerm_application_gateway" "test" { } `, template, data.RandomInteger) } + func testAccAzureRMApplicationGateway_webApplicationFirewall_disabledRuleGroups(data acceptance.TestData) string { template := testAccAzureRMApplicationGateway_template(data) return fmt.Sprintf(` @@ -4661,7 +4661,7 @@ resource "azurerm_application_gateway" "test" { rule_set_version = "3.0" request_body_check = true max_request_body_size_kb = 128 - file_upload_limit_mb = 100 + file_upload_limit_mb = 750 exclusion { match_variable = "RequestArgNames" @@ -4736,6 +4736,7 @@ resource "azurerm_application_gateway" "test" { } `, template, data.RandomInteger, data.RandomInteger) } + func testAccAzureRMApplicationGateway_webApplicationFirewall_exclusions_one(data acceptance.TestData) string { template := testAccAzureRMApplicationGateway_template(data) return fmt.Sprintf(` @@ -4777,7 +4778,7 @@ resource "azurerm_application_gateway" "test" { rule_set_version = "3.0" request_body_check = true max_request_body_size_kb = 128 - file_upload_limit_mb = 100 + file_upload_limit_mb = 750 exclusion { match_variable = "RequestArgNames" diff --git a/azurerm/internal/services/network/tests/application_security_group_resource_test.go b/azurerm/internal/services/network/tests/application_security_group_resource_test.go index 14ab538411f3..90d50309b4da 100644 --- a/azurerm/internal/services/network/tests/application_security_group_resource_test.go +++ b/azurerm/internal/services/network/tests/application_security_group_resource_test.go @@ -112,7 +112,6 @@ func testCheckAzureRMApplicationSecurityGroupDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil @@ -145,7 +144,6 @@ func testCheckAzureRMApplicationSecurityGroupExists(resourceName string) resourc } resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Application Security Group %q (resource group: %q) was not found: %+v", name, resourceGroup, err) diff --git a/azurerm/internal/services/network/tests/express_route_circuit_authorization_resource_test.go b/azurerm/internal/services/network/tests/express_route_circuit_authorization_resource_test.go index e977f57616cd..4d8bcdfe72dc 100644 --- a/azurerm/internal/services/network/tests/express_route_circuit_authorization_resource_test.go +++ b/azurerm/internal/services/network/tests/express_route_circuit_authorization_resource_test.go @@ -121,7 +121,6 @@ func testCheckAzureRMExpressRouteCircuitAuthorizationDestroy(s *terraform.State) resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, expressRouteCircuitName, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/express_route_circuit_peering_resource_test.go b/azurerm/internal/services/network/tests/express_route_circuit_peering_resource_test.go index acb10bc042d2..27fdf878d444 100644 --- a/azurerm/internal/services/network/tests/express_route_circuit_peering_resource_test.go +++ b/azurerm/internal/services/network/tests/express_route_circuit_peering_resource_test.go @@ -73,6 +73,63 @@ func testAccAzureRMExpressRouteCircuitPeering_microsoftPeering(t *testing.T) { }) } +func testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringIpv6(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_express_route_circuit_peering", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMExpressRouteCircuitPeeringDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMExpressRouteCircuitPeering_msPeeringIpv6(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMExpressRouteCircuitPeeringExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringIpv6CustomerRouting(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_express_route_circuit_peering", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMExpressRouteCircuitPeeringDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMExpressRouteCircuitPeering_msPeeringIpv6CustomerRouting(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMExpressRouteCircuitPeeringExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringIpv6WithRouteFilter(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_express_route_circuit_peering", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMExpressRouteCircuitPeeringDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMExpressRouteCircuitPeering_msPeeringIpv6WithRouteFilter(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMExpressRouteCircuitPeeringExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringCustomerRouting(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_express_route_circuit_peering", "test") @@ -191,7 +248,6 @@ func testCheckAzureRMExpressRouteCircuitPeeringDestroy(s *terraform.State) error resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, circuitName, peeringType) - if err != nil { return nil } @@ -311,6 +367,189 @@ resource "azurerm_express_route_circuit_peering" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } +func testAccAzureRMExpressRouteCircuitPeering_msPeeringIpv6(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-expressroute-%d" + location = "%s" +} + +resource "azurerm_express_route_circuit" "test" { + name = "acctest-erc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + service_provider_name = "Equinix" + peering_location = "Silicon Valley" + bandwidth_in_mbps = 50 + + sku { + tier = "Premium" + family = "MeteredData" + } + + tags = { + Env = "Test" + Purpose = "AcceptanceTests" + } +} + +resource "azurerm_express_route_circuit_peering" "test" { + peering_type = "MicrosoftPeering" + express_route_circuit_name = azurerm_express_route_circuit.test.name + resource_group_name = azurerm_resource_group.test.name + peer_asn = 100 + primary_peer_address_prefix = "192.168.3.0/30" + secondary_peer_address_prefix = "192.168.4.0/30" + vlan_id = 300 + + microsoft_peering_config { + advertised_public_prefixes = ["123.2.0.0/24"] + } + + ipv6 { + primary_peer_address_prefix = "2002:db01::/126" + secondary_peer_address_prefix = "2003:db01::/126" + + microsoft_peering { + advertised_public_prefixes = ["2002:db01::/126"] + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMExpressRouteCircuitPeering_msPeeringIpv6CustomerRouting(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-expressroute-%d" + location = "%s" +} + +resource "azurerm_express_route_circuit" "test" { + name = "acctest-erc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + service_provider_name = "Equinix" + peering_location = "Silicon Valley" + bandwidth_in_mbps = 50 + + sku { + tier = "Premium" + family = "MeteredData" + } + + tags = { + Env = "Test" + Purpose = "AcceptanceTests" + } +} + +resource "azurerm_express_route_circuit_peering" "test" { + peering_type = "MicrosoftPeering" + express_route_circuit_name = azurerm_express_route_circuit.test.name + resource_group_name = azurerm_resource_group.test.name + peer_asn = 100 + primary_peer_address_prefix = "192.168.3.0/30" + secondary_peer_address_prefix = "192.168.4.0/30" + vlan_id = 300 + + microsoft_peering_config { + advertised_public_prefixes = ["123.2.0.0/24"] + } + ipv6 { + primary_peer_address_prefix = "2002:db01::/126" + secondary_peer_address_prefix = "2003:db01::/126" + + microsoft_peering { + advertised_public_prefixes = ["2002:db01::/126"] + customer_asn = 64511 + routing_registry_name = "ARIN" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMExpressRouteCircuitPeering_msPeeringIpv6WithRouteFilter(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-expressroute-%d" + location = "%s" +} + +resource "azurerm_route_filter" "test" { + name = "acctestrf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + rule { + name = "acctestrule%d" + access = "Allow" + rule_type = "Community" + communities = ["12076:52005", "12076:52006"] + } +} + +resource "azurerm_express_route_circuit" "test" { + name = "acctest-erc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + service_provider_name = "Equinix" + peering_location = "Silicon Valley" + bandwidth_in_mbps = 50 + + sku { + tier = "Premium" + family = "MeteredData" + } + + tags = { + Env = "Test" + Purpose = "AcceptanceTests" + } +} + +resource "azurerm_express_route_circuit_peering" "test" { + peering_type = "MicrosoftPeering" + express_route_circuit_name = azurerm_express_route_circuit.test.name + resource_group_name = azurerm_resource_group.test.name + peer_asn = 100 + primary_peer_address_prefix = "192.168.3.0/30" + secondary_peer_address_prefix = "192.168.4.0/30" + vlan_id = 300 + route_filter_id = azurerm_route_filter.test.id + + microsoft_peering_config { + advertised_public_prefixes = ["123.2.0.0/24"] + } + + ipv6 { + primary_peer_address_prefix = "2002:db01::/126" + secondary_peer_address_prefix = "2003:db01::/126" + route_filter_id = azurerm_route_filter.test.id + + microsoft_peering { + advertised_public_prefixes = ["2002:db01::/126"] + customer_asn = 64511 + routing_registry_name = "ARIN" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + func testAccAzureRMExpressRouteCircuitPeering_msPeeringCustomerRouting(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/network/tests/express_route_circuit_resource_test.go b/azurerm/internal/services/network/tests/express_route_circuit_resource_test.go index 4551ecf9f814..43f7f4a6fe6f 100644 --- a/azurerm/internal/services/network/tests/express_route_circuit_resource_test.go +++ b/azurerm/internal/services/network/tests/express_route_circuit_resource_test.go @@ -36,9 +36,12 @@ func TestAccAzureRMExpressRouteCircuit(t *testing.T) { "requiresImport": testAccAzureRMExpressRouteCircuitPeering_requiresImport, }, "MicrosoftPeering": { - "microsoftPeering": testAccAzureRMExpressRouteCircuitPeering_microsoftPeering, - "microsoftPeeringCustomerRouting": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringCustomerRouting, - "microsoftPeeringWithRouteFilter": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringWithRouteFilter, + "microsoftPeering": testAccAzureRMExpressRouteCircuitPeering_microsoftPeering, + "microsoftPeeringCustomerRouting": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringCustomerRouting, + "microsoftPeeringWithRouteFilter": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringWithRouteFilter, + "microsoftPeeringIpv6": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringIpv6, + "microsoftPeeringIpv6CustomerRouting": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringIpv6CustomerRouting, + "microsoftPeeringIpv6WithRouteFilter": testAccAzureRMExpressRouteCircuitPeering_microsoftPeeringIpv6WithRouteFilter, }, "authorization": { "basic": testAccAzureRMExpressRouteCircuitAuthorization_basic, @@ -346,7 +349,6 @@ func testCheckAzureRMExpressRouteCircuitDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/firewall_application_rule_collection_resource_test.go b/azurerm/internal/services/network/tests/firewall_application_rule_collection_resource_test.go deleted file mode 100644 index 0c20474fce41..000000000000 --- a/azurerm/internal/services/network/tests/firewall_application_rule_collection_resource_test.go +++ /dev/null @@ -1,952 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" -) - -func TestAccAzureRMFirewallApplicationRuleCollection_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.name", "rule1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.source_addresses.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.target_fqdns.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.port", "443"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.type", "Https"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_firewall_application_rule_collection"), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_updatedName(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.name", "rule1"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_updatedName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.name", "rule2"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_multipleRuleCollections(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - secondRule := "azurerm_firewall_application_rule_collection.test_add" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallApplicationRuleCollectionExists(secondRule), - resource.TestCheckResourceAttr(secondRule, "name", "acctestarc_add"), - resource.TestCheckResourceAttr(secondRule, "priority", "200"), - resource.TestCheckResourceAttr(secondRule, "action", "Deny"), - resource.TestCheckResourceAttr(secondRule, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallApplicationRuleCollectionDoesNotExist("azurerm_firewall.test", "acctestarc_add"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - secondResourceName := "azurerm_firewall_application_rule_collection.test_add" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallApplicationRuleCollectionExists(secondResourceName), - resource.TestCheckResourceAttr(secondResourceName, "name", "acctestarc_add"), - resource.TestCheckResourceAttr(secondResourceName, "priority", "200"), - resource.TestCheckResourceAttr(secondResourceName, "action", "Deny"), - resource.TestCheckResourceAttr(secondResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multipleUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "300"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Deny"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallApplicationRuleCollectionExists(secondResourceName), - resource.TestCheckResourceAttr(secondResourceName, "name", "acctestarc_add"), - resource.TestCheckResourceAttr(secondResourceName, "priority", "400"), - resource.TestCheckResourceAttr(secondResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(secondResourceName, "rule.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallApplicationRuleCollectionDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_multipleRules(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multipleRules(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "2"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_multipleProtocols(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multipleProtocols(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.port", "8000"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.type", "Http"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.port", "8001"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.type", "Https"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_updateProtocols(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multipleProtocols(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.port", "8000"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.type", "Http"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.port", "8001"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.type", "Https"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multipleProtocolsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.port", "9000"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.type", "Https"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.port", "9001"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.type", "Http"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_multipleProtocols(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.port", "8000"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.0.type", "Http"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.port", "8001"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.protocol.1.type", "Https"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_updateFirewallTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.name", "rule1"), - ), - }, - { - Config: testAccAzureRMFirewallApplicationRuleCollection_updateFirewallTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestarc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.0.name", "rule1"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_ipGroups(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_ipGroups(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallApplicationRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallApplicationRuleCollection_noSource(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_application_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallApplicationRuleCollection_noSource(data), - ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "source_addresses", "source_ip_groups")), - }, - }, - }) -} - -func testCheckAzureRMFirewallApplicationRuleCollectionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - firewallName := rs.Primary.Attributes["azure_firewall_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - found := false - for _, collection := range *read.AzureFirewallPropertiesFormat.ApplicationRuleCollections { - if *collection.Name == name { - found = true - break - } - } - - if !found { - return fmt.Errorf("Expected Application Rule Collection %q (Firewall %q / Resource Group %q) to exist but it didn't", name, firewallName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMFirewallApplicationRuleCollectionDoesNotExist(resourceName string, collectionName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - firewallName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - for _, collection := range *read.AzureFirewallPropertiesFormat.ApplicationRuleCollections { - if *collection.Name == collectionName { - return fmt.Errorf("Application Rule Collection %q exists in Firewall %q: %+v", collectionName, firewallName, collection) - } - } - - return nil - } -} - -func testCheckAzureRMFirewallApplicationRuleCollectionDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - firewallName := rs.Primary.Attributes["azure_firewall_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - rules := make([]network.AzureFirewallApplicationRuleCollection, 0) - for _, collection := range *read.AzureFirewallPropertiesFormat.ApplicationRuleCollections { - if *collection.Name != name { - rules = append(rules, collection) - } - } - - read.AzureFirewallPropertiesFormat.ApplicationRuleCollections = &rules - future, err := client.CreateOrUpdate(ctx, resourceGroup, firewallName, read) - if err != nil { - return fmt.Errorf("Error removing Application Rule Collection from Firewall: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the removal of Application Rule Collection from Firewall: %+v", err) - } - - _, err = client.Get(ctx, resourceGroup, firewallName) - return err - } -} - -func testAccAzureRMFirewallApplicationRuleCollection_basic(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFirewallApplicationRuleCollection_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "import" { - name = azurerm_firewall_application_rule_collection.test.name - azure_firewall_name = azurerm_firewall_application_rule_collection.test.azure_firewall_name - resource_group_name = azurerm_firewall_application_rule_collection.test.resource_group_name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_updatedName(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule2" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_multiple(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} - -resource "azurerm_firewall_application_rule_collection" "test_add" { - name = "acctestarc_add" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 200 - action = "Deny" - - rule { - name = "acctestruleadd" - - source_addresses = [ - "192.168.0.1", - ] - - target_fqdns = [ - "*.microsoft.com", - ] - - protocol { - port = 80 - type = "Http" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_multipleUpdate(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 300 - action = "Deny" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} - -resource "azurerm_firewall_application_rule_collection" "test_add" { - name = "acctestarc_add" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 400 - action = "Allow" - - rule { - name = "acctestruleadd" - - source_addresses = [ - "192.168.0.1", - ] - - target_fqdns = [ - "*.microsoft.com", - ] - - protocol { - port = 80 - type = "Http" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_multipleRules(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } - - rule { - name = "acctestruleadd" - - source_addresses = [ - "192.168.0.1", - ] - - target_fqdns = [ - "*.microsoft.com", - ] - - protocol { - port = 80 - type = "Http" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_multipleProtocols(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 8000 - type = "Http" - } - - protocol { - port = 8001 - type = "Https" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_multipleProtocolsUpdate(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 9000 - type = "Https" - } - - protocol { - port = 9001 - type = "Http" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_updateFirewallTags(data acceptance.TestData) string { - template := testAccAzureRMFirewall_withTags(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_ipGroups(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_ip_group" "test" { - name = "acctestIpGroupForFirewallAppRules" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - cidrs = ["192.168.0.0/25", "192.168.0.192/26"] -} - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_ip_groups = [ - azurerm_ip_group.test.id, - ] - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} -`, template) -} - -func testAccAzureRMFirewallApplicationRuleCollection_noSource(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_application_rule_collection" "test" { - name = "acctestarc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - target_fqdns = [ - "*.google.com", - ] - - protocol { - port = 443 - type = "Https" - } - } -} -`, template) -} diff --git a/azurerm/internal/services/network/tests/firewall_data_source_test.go b/azurerm/internal/services/network/tests/firewall_data_source_test.go deleted file mode 100644 index 7144d209f6e2..000000000000 --- a/azurerm/internal/services/network/tests/firewall_data_source_test.go +++ /dev/null @@ -1,80 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMFirewall_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceFirewall_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "ip_configuration.0.name", "configuration"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ip_configuration.0.private_ip_address"), - ), - }, - }, - }) -} - -func testAccDataSourceFirewall_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } -} - -data "azurerm_firewall" "test" { - name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/firewall_nat_rule_collection_resource_test.go b/azurerm/internal/services/network/tests/firewall_nat_rule_collection_resource_test.go deleted file mode 100644 index 4a07f72e749b..000000000000 --- a/azurerm/internal/services/network/tests/firewall_nat_rule_collection_resource_test.go +++ /dev/null @@ -1,797 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMFirewallNatRuleCollection_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMFirewallNatRuleCollection_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_firewall_nat_rule_collection"), - }, - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_updatedName(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_updatedName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_multipleRuleCollections(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - secondRule := "azurerm_firewall_nat_rule_collection.test_add" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - testCheckAzureRMFirewallNatRuleCollectionExists(secondRule), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - testCheckAzureRMFirewallNatRuleCollectionDoesNotExist("azurerm_firewall.test", "acctestnrc_add"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - secondResourceName := "azurerm_firewall_nat_rule_collection.test_add" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - testCheckAzureRMFirewallNatRuleCollectionExists(secondResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_multipleUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - testCheckAzureRMFirewallNatRuleCollectionExists(secondResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - testCheckAzureRMFirewallNatRuleCollectionDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_multipleRules(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_multipleRules(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_updateFirewallTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallNatRuleCollection_updateFirewallTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_ipGroup(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_ipGroup(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNatRuleCollectionExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNatRuleCollection_noSource(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_nat_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNatRuleCollection_noSource(data), - ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "source_addresses", "source_ip_groups")), - }, - }, - }) -} - -func testCheckAzureRMFirewallNatRuleCollectionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - firewallName := rs.Primary.Attributes["azure_firewall_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - found := false - for _, collection := range *read.AzureFirewallPropertiesFormat.NatRuleCollections { - if *collection.Name == name { - found = true - break - } - } - - if !found { - return fmt.Errorf("Expected NAT Rule Collection %q (Firewall %q / Resource Group %q) to exist but it didn't", name, firewallName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMFirewallNatRuleCollectionDoesNotExist(resourceName string, collectionName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - firewallName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - for _, collection := range *read.AzureFirewallPropertiesFormat.NatRuleCollections { - if *collection.Name == collectionName { - return fmt.Errorf("NAT Rule Collection %q exists in Firewall %q: %+v", collectionName, firewallName, collection) - } - } - - return nil - } -} - -func testCheckAzureRMFirewallNatRuleCollectionDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - firewallName := rs.Primary.Attributes["azure_firewall_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - rules := make([]network.AzureFirewallNatRuleCollection, 0) - for _, collection := range *read.AzureFirewallPropertiesFormat.NatRuleCollections { - if *collection.Name != name { - rules = append(rules, collection) - } - } - - read.AzureFirewallPropertiesFormat.NatRuleCollections = &rules - - future, err := client.CreateOrUpdate(ctx, resourceGroup, firewallName, read) - if err != nil { - return fmt.Errorf("Error removing NAT Rule Collection from Firewall: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the removal of NAT Rule Collection from Firewall: %+v", err) - } - - _, err = client.Get(ctx, resourceGroup, firewallName) - return err - } -} - -func testAccAzureRMFirewallNatRuleCollection_basic(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "Any", - ] - - translated_port = 53 - translated_address = "8.8.8.8" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFirewallNatRuleCollection_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "import" { - name = azurerm_firewall_nat_rule_collection.test.name - azure_firewall_name = azurerm_firewall_nat_rule_collection.test.azure_firewall_name - resource_group_name = azurerm_firewall_nat_rule_collection.test.resource_group_name - priority = 100 - action = "Dnat" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "Any", - ] - - translated_port = 53 - translated_address = "8.8.8.8" - } -} -`, template) -} - -func testAccAzureRMFirewallNatRuleCollection_updatedName(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "rule2" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 53 - translated_address = "8.8.8.8" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_multiple(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "acctestrule" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 53 - translated_address = "8.8.8.8" - } -} - -resource "azurerm_firewall_nat_rule_collection" "test_add" { - name = "acctestnrc_add-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 200 - action = "Dnat" - - rule { - name = "acctestruleadd" - - source_addresses = [ - "10.0.0.0/8", - ] - - destination_ports = [ - "8080", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 8080 - translated_address = "8.8.4.4" - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_multipleUpdate(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 300 - action = "Dnat" - - rule { - name = "acctestrule" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 53 - translated_address = "10.0.0.1" - } -} - -resource "azurerm_firewall_nat_rule_collection" "test_add" { - name = "acctestnrc_add-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 400 - action = "Dnat" - - rule { - name = "acctestruleadd" - - source_addresses = [ - "10.0.0.0/8", - ] - - destination_ports = [ - "8080", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 8080 - translated_address = "10.0.0.1" - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_multipleRules(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "acctestrule" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 53 - translated_address = "10.0.0.1" - } - - rule { - name = "acctestrule_add" - - source_addresses = [ - "192.168.0.1", - ] - - destination_ports = [ - "8888", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 8888 - translated_address = "192.168.0.1" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_updateFirewallTags(data acceptance.TestData) string { - template := testAccAzureRMFirewall_withTags(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "TCP", - ] - - translated_port = 53 - translated_address = "10.0.0.1" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_ipGroup(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_ip_group" "test" { - name = "acctestIpGroupForFirewallNatRules" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - cidrs = ["192.168.0.0/25", "192.168.0.192/26"] -} - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "rule1" - - source_ip_groups = [ - azurerm_ip_group.test.id, - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "Any", - ] - - translated_port = 53 - translated_address = "8.8.8.8" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallNatRuleCollection_noSource(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_nat_rule_collection" "test" { - name = "acctestnrc-%d" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Dnat" - - rule { - name = "rule1" - - destination_ports = [ - "53", - ] - - destination_addresses = [ - azurerm_public_ip.test.ip_address, - ] - - protocols = [ - "Any", - ] - - translated_port = 53 - translated_address = "8.8.8.8" - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/firewall_network_rule_collection_resource_test.go b/azurerm/internal/services/network/tests/firewall_network_rule_collection_resource_test.go deleted file mode 100644 index 2cf989a34da8..000000000000 --- a/azurerm/internal/services/network/tests/firewall_network_rule_collection_resource_test.go +++ /dev/null @@ -1,932 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMFirewallNetworkRuleCollection_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_firewall_network_rule_collection"), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_updatedName(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.3765122797.name", "rule1"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_updatedName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.1700340761.name", "rule2"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_multipleRuleCollections(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - secondRule := "azurerm_firewall_network_rule_collection.test_add" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallNetworkRuleCollectionExists(secondRule), - resource.TestCheckResourceAttr(secondRule, "name", "acctestnrc_add"), - resource.TestCheckResourceAttr(secondRule, "priority", "200"), - resource.TestCheckResourceAttr(secondRule, "action", "Deny"), - resource.TestCheckResourceAttr(secondRule, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallNetworkRuleCollectionDoesNotExist("azurerm_firewall.test", "acctestnrc_add"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - secondResourceName := "azurerm_firewall_network_rule_collection.test_add" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_multiple(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallNetworkRuleCollectionExists(secondResourceName), - resource.TestCheckResourceAttr(secondResourceName, "name", "acctestnrc_add"), - resource.TestCheckResourceAttr(secondResourceName, "priority", "200"), - resource.TestCheckResourceAttr(secondResourceName, "action", "Deny"), - resource.TestCheckResourceAttr(secondResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_multipleUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "300"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Deny"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallNetworkRuleCollectionExists(secondResourceName), - resource.TestCheckResourceAttr(secondResourceName, "name", "acctestnrc_add"), - resource.TestCheckResourceAttr(secondResourceName, "priority", "400"), - resource.TestCheckResourceAttr(secondResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(secondResourceName, "rule.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - testCheckAzureRMFirewallNetworkRuleCollectionDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_multipleRules(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_multipleRules(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "2"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_updateFirewallTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - { - Config: testAccAzureRMFirewallNetworkRuleCollection_updateFirewallTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_serviceTag(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_serviceTag(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "name", "acctestnrc"), - resource.TestCheckResourceAttr(data.ResourceName, "priority", "100"), - resource.TestCheckResourceAttr(data.ResourceName, "action", "Allow"), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_ipGroup(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_ipGroup(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallNetworkRuleCollectionExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "rule.#", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_noSource(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_noSource(data), - ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "source_addresses", "source_ip_groups")), - }, - }, - }) -} - -func TestAccAzureRMFirewallNetworkRuleCollection_noDestination(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_network_rule_collection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallNetworkRuleCollection_noDestination(data), - ExpectError: regexp.MustCompile(fmt.Sprintf("at least one of %q and %q must be specified", "destination_addresses", "destination_ip_groups")), - }, - }, - }) -} - -func testCheckAzureRMFirewallNetworkRuleCollectionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - firewallName := rs.Primary.Attributes["azure_firewall_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - found := false - for _, collection := range *read.AzureFirewallPropertiesFormat.NetworkRuleCollections { - if *collection.Name == name { - found = true - break - } - } - - if !found { - return fmt.Errorf("Expected Network Rule Collection %q (Firewall %q / Resource Group %q) to exist but it didn't", name, firewallName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMFirewallNetworkRuleCollectionDoesNotExist(resourceName string, collectionName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - firewallName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - for _, collection := range *read.AzureFirewallPropertiesFormat.NetworkRuleCollections { - if *collection.Name == collectionName { - return fmt.Errorf("Network Rule Collection %q exists in Firewall %q: %+v", collectionName, firewallName, collection) - } - } - - return nil - } -} - -func testCheckAzureRMFirewallNetworkRuleCollectionDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - firewallName := rs.Primary.Attributes["azure_firewall_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - read, err := client.Get(ctx, resourceGroup, firewallName) - if err != nil { - return err - } - - rules := make([]network.AzureFirewallNetworkRuleCollection, 0) - for _, collection := range *read.AzureFirewallPropertiesFormat.NetworkRuleCollections { - if *collection.Name != name { - rules = append(rules, collection) - } - } - - read.AzureFirewallPropertiesFormat.NetworkRuleCollections = &rules - - future, err := client.CreateOrUpdate(ctx, resourceGroup, firewallName, read) - if err != nil { - return fmt.Errorf("Error removing Network Rule Collection from Firewall: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the removal of Network Rule Collection from Firewall: %+v", err) - } - - _, err = client.Get(ctx, resourceGroup, firewallName) - return err - } -} - -func testAccAzureRMFirewallNetworkRuleCollection_basic(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFirewallNetworkRuleCollection_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "import" { - name = azurerm_firewall_network_rule_collection.test.name - azure_firewall_name = azurerm_firewall_network_rule_collection.test.azure_firewall_name - resource_group_name = azurerm_firewall_network_rule_collection.test.resource_group_name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_updatedName(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule2" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_multiple(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "acctestrule" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} - -resource "azurerm_firewall_network_rule_collection" "test_add" { - name = "acctestnrc_add" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 200 - action = "Deny" - - rule { - name = "acctestruleadd" - - source_addresses = [ - "10.0.0.0/8", - ] - - destination_ports = [ - "8080", - ] - - destination_addresses = [ - "8.8.4.4", - ] - - protocols = [ - "TCP", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_multipleUpdate(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 300 - action = "Deny" - - rule { - name = "acctestrule" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} - -resource "azurerm_firewall_network_rule_collection" "test_add" { - name = "acctestnrc_add" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 400 - action = "Allow" - - rule { - name = "acctestruleadd" - - source_addresses = [ - "10.0.0.0/8", - ] - - destination_ports = [ - "8080", - ] - - destination_addresses = [ - "8.8.4.4", - ] - - protocols = [ - "TCP", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_multipleRules(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "acctestrule" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } - - rule { - name = "acctestrule_add" - - source_addresses = [ - "192.168.0.1", - ] - - destination_ports = [ - "8888", - ] - - destination_addresses = [ - "1.1.1.1", - ] - - protocols = [ - "TCP", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_updateFirewallTags(data acceptance.TestData) string { - template := testAccAzureRMFirewall_withTags(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_serviceTag(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "ApiManagement", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_ipGroup(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_ip_group" "test_source" { - name = "acctestIpGroupForFirewallNetworkRulesSource" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - cidrs = ["1.2.3.4/32", "12.34.56.0/24"] -} - -resource "azurerm_ip_group" "test_destination" { - name = "acctestIpGroupForFirewallNetworkRulesDestination" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - cidrs = ["192.168.0.0/25", "192.168.0.192/26"] -} - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_ip_groups = [ - azurerm_ip_group.test_source.id, - ] - - destination_ports = [ - "53", - ] - - destination_ip_groups = [ - azurerm_ip_group.test_destination.id, - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_noSource(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - destination_ports = [ - "53", - ] - - destination_addresses = [ - "8.8.8.8", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} - -func testAccAzureRMFirewallNetworkRuleCollection_noDestination(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_network_rule_collection" "test" { - name = "acctestnrc" - azure_firewall_name = azurerm_firewall.test.name - resource_group_name = azurerm_resource_group.test.name - priority = 100 - action = "Allow" - - rule { - name = "rule1" - - source_addresses = [ - "10.0.0.0/16", - ] - - destination_ports = [ - "53", - ] - - protocols = [ - "Any", - ] - } -} -`, template) -} diff --git a/azurerm/internal/services/network/tests/firewall_policy_data_source_test.go b/azurerm/internal/services/network/tests/firewall_policy_data_source_test.go deleted file mode 100644 index a92eb003ccde..000000000000 --- a/azurerm/internal/services/network/tests/firewall_policy_data_source_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceFirewallPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_firewall_policy", "test") - dataParent := acceptance.BuildTestData(t, "data.azurerm_firewall_policy", "test-parent") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "location", location.Normalize(data.Locations.Primary)), - resource.TestCheckResourceAttrSet(data.ResourceName, "base_policy_id"), - resource.TestCheckResourceAttr(dataParent.ResourceName, "child_policies.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "dns.0.proxy_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "dns.0.network_rule_fqdn_enabled", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "dns.0.servers.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_intelligence_mode", string(network.AzureFirewallThreatIntelModeAlert)), - resource.TestCheckResourceAttr(data.ResourceName, "threat_intelligence_allowlist.0.ip_addresses.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "threat_intelligence_allowlist.0.fqdns.#", "2"), - ), - }, - }, - }) -} - -func testAccDataSourceFirewallPolicy_basic(data acceptance.TestData) string { - config := testAccAzureRMFirewallPolicy_inherit(data) - - // We deliberately set add a dependency between "data.azurerm_firewall_policy.test-parent" - // and "azurerm_firewall_policy.test" so that we can test "data.azurerm_firewall_policy.test-parent.child_policies" - return fmt.Sprintf(` -%s - -data "azurerm_firewall_policy" "test-parent" { - name = azurerm_firewall_policy.test-parent.name - resource_group_name = azurerm_firewall_policy.test.resource_group_name -} - -data "azurerm_firewall_policy" "test" { - name = azurerm_firewall_policy.test.name - resource_group_name = azurerm_firewall_policy.test.resource_group_name -} -`, config) -} diff --git a/azurerm/internal/services/network/tests/firewall_policy_resource_test.go b/azurerm/internal/services/network/tests/firewall_policy_resource_test.go deleted file mode 100644 index 013f9ea4807f..000000000000 --- a/azurerm/internal/services/network/tests/firewall_policy_resource_test.go +++ /dev/null @@ -1,273 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMFirewallPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallPolicy_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallPolicy_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallPolicy_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewallPolicy_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMFirewallPolicy_requiresImport), - }, - }) -} - -func TestAccAzureRMFirewallPolicy_inherit(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewallPolicy_inherit(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMFirewallPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.FirewallPolicyClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Firewall Policy not found: %s", resourceName) - } - - id, err := parse.FirewallPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - if resp, err := client.Get(ctx, id.ResourceGroup, id.Name, ""); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Firewall Policy %q (Resource Group %q) does not exist", id.Name, id.ResourceGroup) - } - return fmt.Errorf("Getting on Network.FirewallPolicies: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMFirewallPolicyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.FirewallPolicyClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_firewall_policy" { - continue - } - - id, err := parse.FirewallPolicyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "") - if err == nil { - return fmt.Errorf("Network.FirewallPolicies still exists") - } - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Getting on Network.FirewallPolicies: %+v", err) - } - - return nil - } - - return nil -} - -func testAccAzureRMFirewallPolicy_basic(data acceptance.TestData) string { - template := testAccAzureRMFirewallPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_policy" "test" { - name = "acctest-networkfw-Policy-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallPolicy_complete(data acceptance.TestData) string { - template := testAccAzureRMFirewallPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_policy" "test" { - name = "acctest-networkfw-Policy-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - threat_intelligence_mode = "Off" - threat_intelligence_allowlist { - ip_addresses = ["1.1.1.1", "2.2.2.2"] - fqdns = ["foo.com", "bar.com"] - } - dns { - servers = ["1.1.1.1", "2.2.2.2"] - proxy_enabled = true - network_rule_fqdn_enabled = true - } - tags = { - env = "Test" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMFirewallPolicy_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFirewallPolicy_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_policy" "import" { - name = azurerm_firewall_policy.test.name - resource_group_name = azurerm_firewall_policy.test.resource_group_name - location = azurerm_firewall_policy.test.location -} -`, template) -} - -func testAccAzureRMFirewallPolicy_inherit(data acceptance.TestData) string { - template := testAccAzureRMFirewallPolicy_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall_policy" "test-parent" { - name = "acctest-networkfw-Policy-%d-parent" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} - -resource "azurerm_firewall_policy" "test" { - name = "acctest-networkfw-Policy-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - base_policy_id = azurerm_firewall_policy.test-parent.id - threat_intelligence_allowlist { - ip_addresses = ["1.1.1.1", "2.2.2.2"] - fqdns = ["foo.com", "bar.com"] - } - dns { - servers = ["1.1.1.1", "2.2.2.2"] - proxy_enabled = true - network_rule_fqdn_enabled = true - } - tags = { - env = "Test" - } -} -`, template, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewallPolicy_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-networkfw-%d" - location = "%s" -} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/network/tests/firewall_resource_test.go b/azurerm/internal/services/network/tests/firewall_resource_test.go deleted file mode 100644 index 6bb1a781bb12..000000000000 --- a/azurerm/internal/services/network/tests/firewall_resource_test.go +++ /dev/null @@ -1,726 +0,0 @@ -package tests - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestValidateFirewallName(t *testing.T) { - // The name must begin with a letter or number, end with a letter, number or underscore, and may contain only letters, numbers, underscores, periods, or hyphens. - validNames := []string{ - "a", - "abc123", - "a_b_c", - "hy-ph-en", - "valid_", - "v-a_l1.d_", - strings.Repeat("w", 65), - } - for _, v := range validNames { - _, errors := network.ValidateAzureFirewallName(v, "name") - if len(errors) != 0 { - t.Fatalf("%q should be a valid Firewall Name: %q", v, errors) - } - } - - invalidNames := []string{ - "_invalid", - "-invalid", - ".invalid", - "!invalid", - "hel!!o", - "invalid.", - "invalid-", - "invalid!", - } - for _, v := range invalidNames { - _, errors := network.ValidateAzureFirewallName(v, "name") - if len(errors) == 0 { - t.Fatalf("%q should be an invalid Firewall Name", v) - } - } -} - -func TestAccAzureRMFirewall_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "ip_configuration.0.name", "configuration"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ip_configuration.0.private_ip_address"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewall_withManagementIp(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_withManagementIp(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "ip_configuration.0.name", "configuration"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ip_configuration.0.private_ip_address"), - resource.TestCheckResourceAttr(data.ResourceName, "management_ip_configuration.0.name", "management_configuration"), - resource.TestCheckResourceAttrSet(data.ResourceName, "management_ip_configuration.0.public_ip_address_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewall_withMultiplePublicIPs(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_multiplePublicIps(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "ip_configuration.0.name", "configuration"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ip_configuration.0.private_ip_address"), - resource.TestCheckResourceAttr(data.ResourceName, "ip_configuration.1.name", "configuration_2"), - resource.TestCheckResourceAttrSet(data.ResourceName, "ip_configuration.1.public_ip_address_id"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewall_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMFirewall_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_firewall"), - }, - }, - }) -} - -func TestAccAzureRMFirewall_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "Production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.cost_center", "MSFT"), - ), - }, - { - Config: testAccAzureRMFirewall_withUpdatedTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "staging"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewall_withZones(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - zones := []string{"1"} - zonesUpdate := []string{"1", "3"} - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_withZones(data, zones), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "zones.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "zones.0", "1"), - ), - }, - { - Config: testAccAzureRMFirewall_withZones(data, zonesUpdate), - Check: resource.ComposeTestCheckFunc( - - testCheckAzureRMFirewallExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "zones.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "zones.0", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "zones.1", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMFirewall_withoutZone(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_withoutZone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMFirewall_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_firewall", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMFirewallDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMFirewall_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMFirewallExists(data.ResourceName), - testCheckAzureRMFirewallDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMFirewallExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Azure Firewall: %q", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Azure Firewall %q (Resource Group: %q) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on azureFirewallsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMFirewallDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Azure Firewall: %q", name) - } - - future, err := client.Delete(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Delete on azureFirewallsClient: %+v", err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Bad: waiting for Deletion on azureFirewallsClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMFirewallDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.AzureFirewallsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_firewall" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Firewall still exists:\n%#v", resp.AzureFirewallPropertiesFormat) - } - - return nil -} - -func testAccAzureRMFirewall_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - threat_intel_mode = "Deny" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewall_withManagementIp(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_subnet" "test_mgmt" { - name = "AzureFirewallManagementSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.2.0/24"] -} - -resource "azurerm_public_ip" "test_mgmt" { - name = "acctestmgmtpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - - management_ip_configuration { - name = "management_configuration" - subnet_id = azurerm_subnet.test_mgmt.id - public_ip_address_id = azurerm_public_ip.test_mgmt.id - } - - threat_intel_mode = "Alert" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewall_multiplePublicIps(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_public_ip" "test_2" { - name = "acctestpip2%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - - ip_configuration { - name = "configuration_2" - public_ip_address_id = azurerm_public_ip.test_2.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewall_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMFirewall_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_firewall" "import" { - name = azurerm_firewall.test.name - location = azurerm_firewall.test.location - resource_group_name = azurerm_firewall.test.resource_group_name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - threat_intel_mode = azurerm_firewall.test.threat_intel_mode -} -`, template) -} - -func testAccAzureRMFirewall_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewall_withUpdatedTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMFirewall_withZones(data acceptance.TestData, zones []string) string { - zoneString := strings.Join(zones, ",") - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - - zones = [%s] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, zoneString) -} - -func testAccAzureRMFirewall_withoutZone(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestvirtnet%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "AzureFirewallSubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefixes = ["10.0.1.0/24"] -} - -resource "azurerm_public_ip" "test" { - name = "acctestpip%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_firewall" "test" { - name = "acctestfirewall%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "configuration" - subnet_id = azurerm_subnet.test.id - public_ip_address_id = azurerm_public_ip.test.id - } - - zones = [] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/ip_group_resource_test.go b/azurerm/internal/services/network/tests/ip_group_resource_test.go index 60d4ff8607d2..1a7713f719ac 100644 --- a/azurerm/internal/services/network/tests/ip_group_resource_test.go +++ b/azurerm/internal/services/network/tests/ip_group_resource_test.go @@ -166,7 +166,6 @@ func testCheckAzureRMIpGroupDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil diff --git a/azurerm/internal/services/network/tests/ip_group_test.go b/azurerm/internal/services/network/tests/ip_group_test.go index fc038ea56c58..130f51b22ba7 100644 --- a/azurerm/internal/services/network/tests/ip_group_test.go +++ b/azurerm/internal/services/network/tests/ip_group_test.go @@ -3,14 +3,14 @@ package tests import ( "testing" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" ) func TestParseIpGroup(t *testing.T) { testData := []struct { Name string Input string - Expected *network.IpGroupResourceID + Expected *parse.IpGroupId }{ { Name: "Empty", @@ -30,7 +30,7 @@ func TestParseIpGroup(t *testing.T) { { Name: "Completed", Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/ipGroups/example", - Expected: &network.IpGroupResourceID{ + Expected: &parse.IpGroupId{ Name: "example", ResourceGroup: "foo", }, @@ -40,7 +40,7 @@ func TestParseIpGroup(t *testing.T) { for _, v := range testData { t.Logf("[DEBUG] Testing %q", v.Name) - actual, err := network.ParseIpGroupID(v.Input) + actual, err := parse.IpGroupID(v.Input) if err != nil { if v.Expected == nil { continue diff --git a/azurerm/internal/services/network/tests/loadbalancer_backend_address_pool_data_source_test.go b/azurerm/internal/services/network/tests/loadbalancer_backend_address_pool_data_source_test.go deleted file mode 100644 index 8fc7d6de8dfa..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_backend_address_pool_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" -) - -func TestAccAzureRMDataSourceLoadBalancerBackEndAddressPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_lb_backend_address_pool", "test") - addressPoolName := fmt.Sprintf("%d-address-pool", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataSourceLoadBalancerBackEndAddressPool_basic(data, addressPoolName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - ), - }, - }, - }) -} - -func testAccAzureRMDataSourceLoadBalancerBackEndAddressPool_basic(data acceptance.TestData, name string) string { - resource := testAccAzureRMLoadBalancerBackEndAddressPool_basic(data, name) - return fmt.Sprintf(` -%s - -data "azurerm_lb_backend_address_pool" "test" { - name = azurerm_lb_backend_address_pool.test.name - loadbalancer_id = azurerm_lb_backend_address_pool.test.loadbalancer_id -} -`, resource) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_backend_address_pool_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_backend_address_pool_resource_test.go deleted file mode 100644 index 9ff6dfce5b1d..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_backend_address_pool_resource_test.go +++ /dev/null @@ -1,244 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - nw "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" -) - -func TestAccAzureRMLoadBalancerBackEndAddressPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") - - var lb network.LoadBalancer - addressPoolName := fmt.Sprintf("%d-address-pool", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerBackEndAddressPool_basic(data, addressPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerBackEndAddressPoolExists(addressPoolName, &lb), - ), - }, - data.ImportStep(), - }, - }) -} -func TestAccAzureRMLoadBalancerBackEndAddressPool_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") - - var lb network.LoadBalancer - addressPoolName := fmt.Sprintf("%d-address-pool", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerBackEndAddressPool_basic(data, addressPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerBackEndAddressPoolExists(addressPoolName, &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancerBackEndAddressPool_requiresImport(data, addressPoolName), - ExpectError: acceptance.RequiresImportError(data.ResourceType), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerBackEndAddressPool_removal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") - - var lb network.LoadBalancer - addressPoolName := fmt.Sprintf("%d-address-pool", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerBackEndAddressPool_removal(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerBackEndAddressPoolNotExists(addressPoolName, &lb), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerBackEndAddressPool_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_backend_address_pool", "test") - - var lb network.LoadBalancer - addressPoolName := fmt.Sprintf("%d-address-pool", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerBackEndAddressPool_basic(data, addressPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerBackEndAddressPoolExists(addressPoolName, &lb), - testCheckAzureRMLoadBalancerBackEndAddressPoolDisappears(addressPoolName, &lb), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerBackEndAddressPoolExists(addressPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerBackEndAddressPoolByName(lb, addressPoolName) - if !exists { - return fmt.Errorf("A BackEnd Address Pool with name %q cannot be found.", addressPoolName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerBackEndAddressPoolNotExists(addressPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerBackEndAddressPoolByName(lb, addressPoolName) - if exists { - return fmt.Errorf("A BackEnd Address Pool with name %q has been found.", addressPoolName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerBackEndAddressPoolDisappears(addressPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - _, i, exists := nw.FindLoadBalancerBackEndAddressPoolByName(lb, addressPoolName) - if !exists { - return fmt.Errorf("A BackEnd Address Pool with name %q cannot be found.", addressPoolName) - } - - currentPools := *lb.LoadBalancerPropertiesFormat.BackendAddressPools - pools := append(currentPools[:i], currentPools[i+1:]...) - lb.LoadBalancerPropertiesFormat.BackendAddressPools = &pools - - id, err := azure.ParseAzureResourceID(*lb.ID) - if err != nil { - return err - } - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, *lb.Name, *lb) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %+v", err) - } - - _, err = client.Get(ctx, id.ResourceGroup, *lb.Name, "") - return err - } -} - -func testAccAzureRMLoadBalancerBackEndAddressPool_basic(data acceptance.TestData, addressPoolName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_backend_address_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, addressPoolName) -} - -func testAccAzureRMLoadBalancerBackEndAddressPool_requiresImport(data acceptance.TestData, name string) string { - template := testAccAzureRMLoadBalancerBackEndAddressPool_basic(data, name) - return fmt.Sprintf(` -%s - -resource "azurerm_lb_backend_address_pool" "import" { - name = azurerm_lb_backend_address_pool.test.name - loadbalancer_id = azurerm_lb_backend_address_pool.test.loadbalancer_id - resource_group_name = azurerm_lb_backend_address_pool.test.resource_group_name -} -`, template) -} - -func testAccAzureRMLoadBalancerBackEndAddressPool_removal(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_data_source_test.go b/azurerm/internal/services/network/tests/loadbalancer_data_source_test.go deleted file mode 100644 index 2d9f89ba71c0..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_data_source_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" -) - -func TestAccAzureRMDataSourceLoadBalancer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_lb", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataSourceLoadBalancer_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - }, - }) -} - -func testAccAzureRMDataSourceLoadBalancer_basic(data acceptance.TestData) string { - resource := testAccAzureRMLoadBalancer_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_lb" "test" { - name = azurerm_lb.test.name - resource_group_name = azurerm_lb.test.resource_group_name -} -`, resource) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_nat_pool_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_nat_pool_resource_test.go deleted file mode 100644 index 309a54d5658a..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_nat_pool_resource_test.go +++ /dev/null @@ -1,425 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - nw "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" -) - -func TestAccAzureRMLoadBalancerNatPool_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") - - var lb network.LoadBalancer - natPoolName := fmt.Sprintf("NatPool-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - natPoolId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/inboundNatPools/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, natPoolName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatPool_basic(data, natPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPoolName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_nat_pool.test", "id", natPoolId), - ), - }, - { - ResourceName: "azurerm_lb.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatPool_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") - - var lb network.LoadBalancer - natPoolName := fmt.Sprintf("NatPool-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - natPoolId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/inboundNatPools/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, natPoolName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatPool_basic(data, natPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPoolName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_nat_pool.test", "id", natPoolId), - ), - }, - { - Config: testAccAzureRMLoadBalancerNatPool_requiresImport(data, natPoolName), - ExpectError: acceptance.RequiresImportError("azurerm_lb_nat_pool"), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatPool_removal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") - - var lb network.LoadBalancer - natPoolName := fmt.Sprintf("NatPool-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatPool_basic(data, natPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPoolName, &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancerNatPool_removal(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolNotExists(natPoolName, &lb), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatPool_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") - data2 := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test2") - - var lb network.LoadBalancer - natPoolName := fmt.Sprintf("NatPool-%d", data.RandomInteger) - natPool2Name := fmt.Sprintf("NatPool-%d", data2.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatPool_multiplePools(data, natPoolName, natPool2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPoolName, &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPool2Name, &lb), - resource.TestCheckResourceAttr("azurerm_lb_nat_pool.test2", "backend_port", "3390"), - ), - }, - { - Config: testAccAzureRMLoadBalancerNatPool_multiplePoolsUpdate(data, natPoolName, natPool2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPoolName, &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPool2Name, &lb), - resource.TestCheckResourceAttr("azurerm_lb_nat_pool.test2", "backend_port", "3391"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatPool_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_pool", "test") - - var lb network.LoadBalancer - natPoolName := fmt.Sprintf("NatPool-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatPool_basic(data, natPoolName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatPoolExists(natPoolName, &lb), - testCheckAzureRMLoadBalancerNatPoolDisappears(natPoolName, &lb), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerNatPoolExists(natPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerNatPoolByName(lb, natPoolName) - if !exists { - return fmt.Errorf("A NAT Pool with name %q cannot be found.", natPoolName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerNatPoolNotExists(natPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerNatPoolByName(lb, natPoolName) - if exists { - return fmt.Errorf("A NAT Pool with name %q has been found.", natPoolName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerNatPoolDisappears(natPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - _, i, exists := nw.FindLoadBalancerNatPoolByName(lb, natPoolName) - if !exists { - return fmt.Errorf("A Nat Pool with name %q cannot be found.", natPoolName) - } - - currentPools := *lb.LoadBalancerPropertiesFormat.InboundNatPools - pools := append(currentPools[:i], currentPools[i+1:]...) - lb.LoadBalancerPropertiesFormat.InboundNatPools = &pools - - id, err := azure.ParseAzureResourceID(*lb.ID) - if err != nil { - return err - } - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, *lb.Name, *lb) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the completion of Load Balancer %+v", err) - } - - _, err = client.Get(ctx, id.ResourceGroup, *lb.Name, "") - return err - } -} - -func testAccAzureRMLoadBalancerNatPool_basic(data acceptance.TestData, natPoolName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_nat_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - frontend_port_start = 80 - frontend_port_end = 81 - backend_port = 3389 - frontend_ip_configuration_name = "one-%d" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, natPoolName, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerNatPool_requiresImport(data acceptance.TestData, name string) string { - template := testAccAzureRMLoadBalancerNatPool_basic(data, name) - return fmt.Sprintf(` -%s - -resource "azurerm_lb_nat_pool" "import" { - name = azurerm_lb_nat_pool.test.name - loadbalancer_id = azurerm_lb_nat_pool.test.loadbalancer_id - resource_group_name = azurerm_lb_nat_pool.test.resource_group_name - frontend_ip_configuration_name = azurerm_lb_nat_pool.test.frontend_ip_configuration_name - protocol = "Tcp" - frontend_port_start = 80 - frontend_port_end = 81 - backend_port = 3389 -} -`, template) -} - -func testAccAzureRMLoadBalancerNatPool_removal(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerNatPool_multiplePools(data acceptance.TestData, natPoolName, natPool2Name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_nat_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - frontend_port_start = 80 - frontend_port_end = 81 - backend_port = 3389 - - frontend_ip_configuration_name = "one-%d" -} - -resource "azurerm_lb_nat_pool" "test2" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - frontend_port_start = 82 - frontend_port_end = 83 - backend_port = 3390 - - frontend_ip_configuration_name = "one-%d" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, natPoolName, data.RandomInteger, natPool2Name, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerNatPool_multiplePoolsUpdate(data acceptance.TestData, natPoolName, natPool2Name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_nat_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - frontend_port_start = 80 - frontend_port_end = 81 - backend_port = 3389 - frontend_ip_configuration_name = "one-%d" -} - -resource "azurerm_lb_nat_pool" "test2" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - frontend_port_start = 82 - frontend_port_end = 83 - backend_port = 3391 - frontend_ip_configuration_name = "one-%d" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, natPoolName, data.RandomInteger, natPool2Name, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_nat_rule_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_nat_rule_resource_test.go deleted file mode 100644 index 81f11082f5b1..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_nat_rule_resource_test.go +++ /dev/null @@ -1,435 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - nw "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" -) - -func TestAccAzureRMLoadBalancerNatRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - natRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/inboundNatRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, natRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_basic(data, natRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "id", natRuleId), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerNatRule_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - natRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/inboundNatRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, natRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_complete(data, natRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "id", natRuleId), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerNatRule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - natRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/inboundNatRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, natRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_basic(data, natRuleName, "Standard"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "id", natRuleId), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLoadBalancerNatRule_complete(data, natRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_nat_rule.test", "id", natRuleId), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLoadBalancerNatRule_basic(data, natRuleName, "Standard"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "id", natRuleId), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerNatRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - natRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/inboundNatRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, natRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_basic(data, natRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "id", natRuleId), - ), - }, - { - Config: testAccAzureRMLoadBalancerNatRule_requiresImport(data, natRuleName), - ExpectError: acceptance.RequiresImportError("azurerm_lb_nat_rule"), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatRule_removal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_basic(data, natRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancerNatRule_template(data, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleNotExists(natRuleName, &lb), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatRule_updateMultipleRules(t *testing.T) { - data1 := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - data2 := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test2") - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data1.RandomInteger) - natRule2Name := fmt.Sprintf("NatRule-%d", data2.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_multipleRules(data1, natRuleName, natRule2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRule2Name, &lb), - resource.TestCheckResourceAttr(data2.ResourceName, "frontend_port", "3390"), - resource.TestCheckResourceAttr(data2.ResourceName, "backend_port", "3390"), - ), - }, - { - Config: testAccAzureRMLoadBalancerNatRule_multipleRulesUpdate(data1, natRuleName, natRule2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRule2Name, &lb), - resource.TestCheckResourceAttr(data2.ResourceName, "frontend_port", "3391"), - resource.TestCheckResourceAttr(data2.ResourceName, "backend_port", "3391"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerNatRule_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - natRuleName := fmt.Sprintf("NatRule-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerNatRule_basic(data, natRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerNatRuleExists(natRuleName, &lb), - testCheckAzureRMLoadBalancerNatRuleDisappears(natRuleName, &lb), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerNatRuleExists(natRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerNatRuleByName(lb, natRuleName) - if !exists { - return fmt.Errorf("A NAT Rule with name %q cannot be found.", natRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerNatRuleNotExists(natRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerNatRuleByName(lb, natRuleName) - if exists { - return fmt.Errorf("A NAT Rule with name %q has been found.", natRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerNatRuleDisappears(natRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - _, i, exists := nw.FindLoadBalancerNatRuleByName(lb, natRuleName) - if !exists { - return fmt.Errorf("A Nat Rule with name %q cannot be found.", natRuleName) - } - - currentRules := *lb.LoadBalancerPropertiesFormat.InboundNatRules - rules := append(currentRules[:i], currentRules[i+1:]...) - lb.LoadBalancerPropertiesFormat.InboundNatRules = &rules - - id, err := azure.ParseAzureResourceID(*lb.ID) - if err != nil { - return err - } - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, *lb.Name, *lb) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the completion of Load Balancer %q (Resource Group %q): %+v", *lb.Name, id.ResourceGroup, err) - } - - _, err = client.Get(ctx, id.ResourceGroup, *lb.Name, "") - return err - } -} - -func testAccAzureRMLoadBalancerNatRule_template(data acceptance.TestData, sku string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%[1]d" - location = "%[2]s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "%[3]s" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "%[3]s" - - frontend_ip_configuration { - name = "one-%[1]d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, sku) -} - -func testAccAzureRMLoadBalancerNatRule_basic(data acceptance.TestData, natRuleName string, sku string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_nat_rule" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerNatRule_template(data, sku), natRuleName) -} - -func testAccAzureRMLoadBalancerNatRule_complete(data acceptance.TestData, natRuleName string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_nat_rule" "test" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - - enable_floating_ip = true - enable_tcp_reset = true - idle_timeout_in_minutes = 10 - - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerNatRule_template(data, "Standard"), natRuleName) -} - -func testAccAzureRMLoadBalancerNatRule_requiresImport(data acceptance.TestData, name string) string { - template := testAccAzureRMLoadBalancerNatRule_basic(data, name, "Basic") - return fmt.Sprintf(` -%s - -resource "azurerm_lb_nat_rule" "import" { - name = azurerm_lb_nat_rule.test.name - loadbalancer_id = azurerm_lb_nat_rule.test.loadbalancer_id - resource_group_name = azurerm_lb_nat_rule.test.resource_group_name - frontend_ip_configuration_name = azurerm_lb_nat_rule.test.frontend_ip_configuration_name - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 -} -`, template) -} - -func testAccAzureRMLoadBalancerNatRule_multipleRules(data acceptance.TestData, natRuleName, natRule2Name string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_nat_rule" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} - -resource "azurerm_lb_nat_rule" "test2" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Tcp" - frontend_port = 3390 - backend_port = 3390 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerNatRule_template(data, "Basic"), natRuleName, natRule2Name) -} - -func testAccAzureRMLoadBalancerNatRule_multipleRulesUpdate(data acceptance.TestData, natRuleName, natRule2Name string) string { - return fmt.Sprintf(` -%s -resource "azurerm_lb_nat_rule" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} - -resource "azurerm_lb_nat_rule" "test2" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Tcp" - frontend_port = 3391 - backend_port = 3391 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerNatRule_template(data, "Basic"), natRuleName, natRule2Name) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_outbound_rule_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_outbound_rule_resource_test.go deleted file mode 100644 index ba68109a7340..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_outbound_rule_resource_test.go +++ /dev/null @@ -1,574 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - nw "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" -) - -func TestAccAzureRMLoadBalancerOutboundRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - outboundRuleName := fmt.Sprintf("OutboundRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - outboundRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/outboundRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, outboundRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerOutboundRule_basic(data, outboundRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_outbound_rule.test", "id", outboundRuleId), - ), - }, - { - ResourceName: "azurerm_lb.test", - ImportState: true, - ImportStateVerify: true, - // location is deprecated and was never actually used - ImportStateVerifyIgnore: []string{"location"}, - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerOutboundRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - outboundRuleName := fmt.Sprintf("OutboundRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - outboundRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/outboundRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, outboundRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerOutboundRule_basic(data, outboundRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_outbound_rule.test", "id", outboundRuleId), - ), - }, - { - Config: testAccAzureRMLoadBalancerOutboundRule_requiresImport(data, outboundRuleName), - ExpectError: acceptance.RequiresImportError("azurerm_lb_outbound_rule"), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerOutboundRule_removal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - outboundRuleName := fmt.Sprintf("OutboundRule-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerOutboundRule_basic(data, outboundRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancerOutboundRule_removal(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleNotExists(outboundRuleName, &lb), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerOutboundRule_update(t *testing.T) { - data1 := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test") - data2 := acceptance.BuildTestData(t, "azurerm_lb_outbound_rule", "test2") - - var lb network.LoadBalancer - outboundRuleName := fmt.Sprintf("OutboundRule-%d", data1.RandomInteger) - outboundRule2Name := fmt.Sprintf("OutboundRule-%d", data2.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerOutboundRule_multipleRules(data1, outboundRuleName, outboundRule2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRule2Name, &lb), - ), - }, - data1.ImportStep(), - data2.ImportStep(), - { - Config: testAccAzureRMLoadBalancerOutboundRule_multipleRulesUpdate(data1, outboundRuleName, outboundRule2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRule2Name, &lb), - ), - }, - data1.ImportStep(), - data2.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerOutboundRule_withPublicIPPrefix(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - outboundRuleName := fmt.Sprintf("OutboundRule-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - outboundRuleId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/outboundRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, outboundRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerOutboundRule_withPublicIPPrefix(data, outboundRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_outbound_rule.test", "id", outboundRuleId), - ), - }, - { - ResourceName: "azurerm_lb.test", - ImportState: true, - ImportStateVerify: true, - // location is deprecated and was never actually used - ImportStateVerifyIgnore: []string{"location"}, - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerOutboundRule_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test") - - var lb network.LoadBalancer - outboundRuleName := fmt.Sprintf("OutboundRule-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerOutboundRule_basic(data, outboundRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName, &lb), - testCheckAzureRMLoadBalancerOutboundRuleDisappears(outboundRuleName, &lb), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerOutboundRuleExists(outboundRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - if _, _, exists := nw.FindLoadBalancerOutboundRuleByName(lb, outboundRuleName); !exists { - return fmt.Errorf("A Load Balancer Outbound Rule with name %q cannot be found.", outboundRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerOutboundRuleNotExists(outboundRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - if _, _, exists := nw.FindLoadBalancerOutboundRuleByName(lb, outboundRuleName); exists { - return fmt.Errorf("A Load Balancer Outbound Rule with name %q has been found.", outboundRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerOutboundRuleDisappears(ruleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - _, i, exists := nw.FindLoadBalancerOutboundRuleByName(lb, ruleName) - if !exists { - return fmt.Errorf("A Outbound Rule with name %q cannot be found.", ruleName) - } - - currentRules := *lb.LoadBalancerPropertiesFormat.OutboundRules - rules := append(currentRules[:i], currentRules[i+1:]...) - lb.LoadBalancerPropertiesFormat.OutboundRules = &rules - - id, err := azure.ParseAzureResourceID(*lb.ID) - if err != nil { - return err - } - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, *lb.Name, *lb) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", *lb.Name, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Load Balancer %q (Resource Group %q): %+v", *lb.Name, id.ResourceGroup, err) - } - - _, err = client.Get(ctx, id.ResourceGroup, *lb.Name, "") - return err - } -} - -func testAccAzureRMLoadBalancerOutboundRule_basic(data acceptance.TestData, outboundRuleName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_backend_address_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "be-%d" -} - -resource "azurerm_lb_outbound_rule" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - protocol = "All" - - frontend_ip_configuration { - name = "one-%d" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, outboundRuleName, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerOutboundRule_requiresImport(data acceptance.TestData, name string) string { - template := testAccAzureRMLoadBalancerOutboundRule_basic(data, name) - return fmt.Sprintf(` -%s - -resource "azurerm_lb_outbound_rule" "import" { - name = azurerm_lb_outbound_rule.test.name - resource_group_name = azurerm_lb_outbound_rule.test.resource_group_name - loadbalancer_id = azurerm_lb_outbound_rule.test.loadbalancer_id - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - protocol = "All" - - frontend_ip_configuration { - name = azurerm_lb_outbound_rule.test.frontend_ip_configuration[0].name - } -} -`, template) -} - -func testAccAzureRMLoadBalancerOutboundRule_removal(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_lb_backend_address_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "be-%d" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerOutboundRule_multipleRules(data acceptance.TestData, outboundRuleName, outboundRule2Name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test1" { - name = "test-ip-1-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_public_ip" "test2" { - name = "test-ip-2-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - frontend_ip_configuration { - name = "fe1-%d" - public_ip_address_id = azurerm_public_ip.test1.id - } - - frontend_ip_configuration { - name = "fe2-%d" - public_ip_address_id = azurerm_public_ip.test2.id - } -} - -resource "azurerm_lb_backend_address_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "be-%d" -} - -resource "azurerm_lb_outbound_rule" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - - frontend_ip_configuration { - name = "fe1-%d" - } -} - -resource "azurerm_lb_outbound_rule" "test2" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Udp" - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - - frontend_ip_configuration { - name = "fe2-%d" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, outboundRuleName, data.RandomInteger, outboundRule2Name, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerOutboundRule_multipleRulesUpdate(data acceptance.TestData, outboundRuleName, outboundRule2Name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test1" { - name = "test-ip-1-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_public_ip" "test2" { - name = "test-ip-2-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "Standard" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - frontend_ip_configuration { - name = "fe1-%d" - public_ip_address_id = azurerm_public_ip.test1.id - } - - frontend_ip_configuration { - name = "fe2-%d" - public_ip_address_id = azurerm_public_ip.test2.id - } -} - -resource "azurerm_lb_backend_address_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "be-%d" -} - -resource "azurerm_lb_outbound_rule" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "All" - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - - frontend_ip_configuration { - name = "fe1-%d" - } -} - -resource "azurerm_lb_outbound_rule" "test2" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "All" - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - - frontend_ip_configuration { - name = "fe2-%d" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, outboundRuleName, data.RandomInteger, outboundRule2Name, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerOutboundRule_withPublicIPPrefix(data acceptance.TestData, outboundRuleName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip_prefix" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - prefix_length = 31 -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - frontend_ip_configuration { - name = "one-%d" - public_ip_prefix_id = azurerm_public_ip_prefix.test.id - } -} - -resource "azurerm_lb_backend_address_pool" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "be-%d" -} - -resource "azurerm_lb_outbound_rule" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - protocol = "All" - - frontend_ip_configuration { - name = "one-%d" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, outboundRuleName, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_probe_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_probe_resource_test.go deleted file mode 100644 index 3c9d9a8d9469..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_probe_resource_test.go +++ /dev/null @@ -1,504 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - nw "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" -) - -func TestAccAzureRMLoadBalancerProbe_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") - - var lb network.LoadBalancer - probeName := fmt.Sprintf("probe-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - probeId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/probes/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, probeName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerProbe_basic(data, probeName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_probe.test", "id", probeId), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerProbe_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") - - var lb network.LoadBalancer - probeName := fmt.Sprintf("probe-%d", data.RandomInteger) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - probeId := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/probes/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, probeName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerProbe_basic(data, probeName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_probe.test", "id", probeId), - ), - }, - { - Config: testAccAzureRMLoadBalancerProbe_requiresImport(data, probeName), - ExpectError: acceptance.RequiresImportError("azurerm_lb_probe"), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerProbe_removal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") - - var lb network.LoadBalancer - probeName := fmt.Sprintf("probe-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerProbe_basic(data, probeName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancerProbe_removal(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeNotExists(probeName, &lb), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerProbe_update(t *testing.T) { - data1 := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") - data2 := acceptance.BuildTestData(t, "azurerm_lb_probe", "test2") - - var lb network.LoadBalancer - probeName := fmt.Sprintf("probe-%d", data1.RandomInteger) - probe2Name := fmt.Sprintf("probe-%d", data2.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerProbe_multipleProbes(data1, probeName, probe2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - testCheckAzureRMLoadBalancerProbeExists(probe2Name, &lb), - resource.TestCheckResourceAttr(data2.ResourceName, "port", "80"), - ), - }, - { - Config: testAccAzureRMLoadBalancerProbe_multipleProbesUpdate(data1, probeName, probe2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - testCheckAzureRMLoadBalancerProbeExists(probe2Name, &lb), - resource.TestCheckResourceAttr(data2.ResourceName, "port", "8080"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerProbe_updateProtocol(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") - - var lb network.LoadBalancer - probeName := fmt.Sprintf("probe-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerProbe_updateProtocolBefore(data, probeName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - resource.TestCheckResourceAttr("azurerm_lb_probe.test", "protocol", "Http"), - ), - }, - { - Config: testAccAzureRMLoadBalancerProbe_updateProtocolAfter(data, probeName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - resource.TestCheckResourceAttr("azurerm_lb_probe.test", "protocol", "Tcp"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerProbe_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_probe", "test") - - var lb network.LoadBalancer - probeName := fmt.Sprintf("probe-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerProbe_basic(data, probeName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - testCheckAzureRMLoadBalancerProbeDisappears(probeName, &lb), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerProbeExists(natRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerProbeByName(lb, natRuleName) - if !exists { - return fmt.Errorf("A Probe with name %q cannot be found.", natRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerProbeNotExists(natRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerProbeByName(lb, natRuleName) - if exists { - return fmt.Errorf("A Probe with name %q has been found.", natRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerProbeDisappears(addressPoolName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - _, i, exists := nw.FindLoadBalancerProbeByName(lb, addressPoolName) - if !exists { - return fmt.Errorf("A Probe with name %q cannot be found.", addressPoolName) - } - - currentProbes := *lb.LoadBalancerPropertiesFormat.Probes - probes := append(currentProbes[:i], currentProbes[i+1:]...) - lb.LoadBalancerPropertiesFormat.Probes = &probes - - id, err := azure.ParseAzureResourceID(*lb.ID) - if err != nil { - return err - } - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, *lb.Name, *lb) - if err != nil { - return fmt.Errorf("Error Creating/Updating LoadBalancer: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion for LoadBalancer: %+v", err) - } - - _, err = client.Get(ctx, id.ResourceGroup, *lb.Name, "") - return err - } -} - -func testAccAzureRMLoadBalancerProbe_basic(data acceptance.TestData, probeName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_probe" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - port = 22 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, probeName) -} - -func testAccAzureRMLoadBalancerProbe_requiresImport(data acceptance.TestData, name string) string { - template := testAccAzureRMLoadBalancerProbe_basic(data, name) - return fmt.Sprintf(` -%s - -resource "azurerm_lb_probe" "import" { - name = azurerm_lb_probe.test.name - loadbalancer_id = azurerm_lb_probe.test.loadbalancer_id - resource_group_name = azurerm_lb_probe.test.resource_group_name - port = 22 -} -`, template) -} - -func testAccAzureRMLoadBalancerProbe_removal(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancerProbe_multipleProbes(data acceptance.TestData, probeName, probe2Name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_probe" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - port = 22 -} - -resource "azurerm_lb_probe" "test2" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - port = 80 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, probeName, probe2Name) -} - -func testAccAzureRMLoadBalancerProbe_multipleProbesUpdate(data acceptance.TestData, probeName, probe2Name string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_probe" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - port = 22 -} - -resource "azurerm_lb_probe" "test2" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - port = 8080 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, probeName, probe2Name) -} - -func testAccAzureRMLoadBalancerProbe_updateProtocolBefore(data acceptance.TestData, probeName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_probe" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Http" - request_path = "/" - port = 80 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, probeName) -} - -func testAccAzureRMLoadBalancerProbe_updateProtocolAfter(data acceptance.TestData, probeName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_lb_probe" "test" { - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - name = "%s" - protocol = "Tcp" - port = 80 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, probeName) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_resource_test.go deleted file mode 100644 index a962c628d8a2..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_resource_test.go +++ /dev/null @@ -1,586 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMLoadBalancer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - ), - }, - { - ResourceName: "azurerm_lb.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMLoadBalancer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancer_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_lb"), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancer_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - ), - }, - { - ResourceName: "azurerm_lb.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -func TestAccAzureRMLoadBalancer_frontEndConfigPublicIPPrefix(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_frontEndConfigPublicIPPrefix(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_ip_configuration.#", "1"), - ), - }, - { - ResourceName: "azurerm_lb.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -func TestAccAzureRMLoadBalancer_frontEndConfig(t *testing.T) { - var lb network.LoadBalancer - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_frontEndConfig(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_ip_configuration.#", "2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLoadBalancer_frontEndConfigRemovalWithIP(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_ip_configuration.#", "1"), - ), - }, - { - Config: testAccAzureRMLoadBalancer_frontEndConfigRemoval(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "frontend_ip_configuration.#", "1"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancer_tags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Environment", "production"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Purpose", "AcceptanceTests"), - ), - }, - { - Config: testAccAzureRMLoadBalancer_updatedTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.Purpose", "AcceptanceTests"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancer_emptyPrivateIP(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_emptyPrivateIPAddress(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttrSet(data.ResourceName, "frontend_ip_configuration.0.private_ip_address"), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancer_privateIP(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - var lb network.LoadBalancer - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancer_privateIPAddress(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists(data.ResourceName, &lb), - resource.TestCheckResourceAttrSet(data.ResourceName, "frontend_ip_configuration.0.private_ip_address"), - ), - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerExists(resourceName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - loadBalancerName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for loadbalancer: %s", loadBalancerName) - } - - resp, err := client.Get(ctx, resourceGroup, loadBalancerName, "") - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Load Balancer %q (resource group: %q) does not exist", loadBalancerName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on loadBalancerClient: %+v", err) - } - - *lb = resp - - return nil - } -} - -func testCheckAzureRMLoadBalancerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_lb" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name, "") - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("LoadBalancer still exists:\n%#v", resp.LoadBalancerPropertiesFormat) - } - } - - return nil -} - -func testAccAzureRMLoadBalancer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - Environment = "production" - Purpose = "AcceptanceTests" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMLoadBalancer_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_lb" "import" { - name = azurerm_lb.test.name - location = azurerm_lb.test.location - resource_group_name = azurerm_lb.test.resource_group_name - - tags = { - Environment = "production" - Purpose = "AcceptanceTests" - } -} -`, template) -} - -func testAccAzureRMLoadBalancer_standard(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - tags = { - Environment = "production" - Purpose = "AcceptanceTests" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_updatedTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - tags = { - Purpose = "AcceptanceTests" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_frontEndConfig(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_public_ip" "test1" { - name = "another-test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } - - frontend_ip_configuration { - name = "two-%d" - public_ip_address_id = azurerm_public_ip.test1.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_frontEndConfigRemovalWithIP(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_public_ip" "test1" { - name = "another-test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_frontEndConfigPublicIPPrefix(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_public_ip_prefix" "test" { - name = "test-ip-prefix-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - prefix_length = 31 -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - frontend_ip_configuration { - name = "prefix-%d" - public_ip_prefix_id = azurerm_public_ip_prefix.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_frontEndConfigRemoval(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" -} - -resource "azurerm_lb" "test" { - name = "acctest-loadbalancer-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - frontend_ip_configuration { - name = "one-%d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_emptyPrivateIPAddress(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctvn-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctsub-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_lb" "test" { - name = "acctestlb-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "Basic" - - frontend_ip_configuration { - name = "Internal" - private_ip_address_allocation = "Dynamic" - private_ip_address = "" - subnet_id = azurerm_subnet.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMLoadBalancer_privateIPAddress(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctvn-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctsub-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.2.0/24" -} - -resource "azurerm_lb" "test" { - name = "acctestlb-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - sku = "Basic" - - frontend_ip_configuration { - name = "Internal" - private_ip_address_allocation = "Static" - private_ip_address_version = "IPv4" - private_ip_address = "10.0.2.7" - subnet_id = azurerm_subnet.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_rule_data_source_test.go b/azurerm/internal/services/network/tests/loadbalancer_rule_data_source_test.go deleted file mode 100644 index d8bbfd2afa0f..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_rule_data_source_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - - "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" -) - -func TestAccAzureRMDataSourceLoadBalancerRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_lb_rule", "test") - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataSourceLoadBalancerRule_basic(data, lbRuleName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "frontend_ip_configuration_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "protocol"), - resource.TestCheckResourceAttrSet(data.ResourceName, "frontend_port"), - resource.TestCheckResourceAttrSet(data.ResourceName, "backend_port"), - ), - }, - }, - }) -} - -func TestAccAzureRMDataSourceLoadBalancerRule_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_lb_rule", "test") - backendPoolName := fmt.Sprintf("LbPool-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - probeName := fmt.Sprintf("LbProbe-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMDataSourceLoadBalancerRule_complete(data, lbRuleName, backendPoolName, probeName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(data.ResourceName, "id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "frontend_ip_configuration_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "protocol"), - resource.TestCheckResourceAttrSet(data.ResourceName, "frontend_port"), - resource.TestCheckResourceAttrSet(data.ResourceName, "backend_port"), - resource.TestCheckResourceAttrSet(data.ResourceName, "backend_address_pool_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "probe_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "enable_floating_ip"), - resource.TestCheckResourceAttrSet(data.ResourceName, "enable_tcp_reset"), - resource.TestCheckResourceAttrSet(data.ResourceName, "disable_outbound_snat"), - resource.TestCheckResourceAttrSet(data.ResourceName, "idle_timeout_in_minutes"), - resource.TestCheckResourceAttrSet(data.ResourceName, "load_distribution"), - ), - }, - }, - }) -} - -func testAccAzureRMDataSourceLoadBalancerRule_basic(data acceptance.TestData, name string) string { - resource := testAccAzureRMLoadBalancerRule_basic(data, name, "Basic") - return fmt.Sprintf(` -%s - -data "azurerm_lb_rule" "test" { - name = azurerm_lb_rule.test.name - resource_group_name = azurerm_lb_rule.test.resource_group_name - loadbalancer_id = azurerm_lb_rule.test.loadbalancer_id -} -`, resource) -} - -func testAccAzureRMDataSourceLoadBalancerRule_complete(data acceptance.TestData, lbRuleName string, backendPoolName string, probeName string) string { - return fmt.Sprintf(` -%s -resource "azurerm_lb_backend_address_pool" "test" { - name = "%s" - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id -} - -resource "azurerm_lb_probe" "test" { - name = "%s" - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - protocol = "Tcp" - port = 443 -} - -resource "azurerm_lb_rule" "test" { - name = "%s" - resource_group_name = azurerm_resource_group.test.name - loadbalancer_id = azurerm_lb.test.id - - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - - disable_outbound_snat = true - enable_floating_ip = true - enable_tcp_reset = true - idle_timeout_in_minutes = 10 - - backend_address_pool_id = azurerm_lb_backend_address_pool.test.id - probe_id = azurerm_lb_probe.test.id - - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} - -data "azurerm_lb_rule" "test" { - name = azurerm_lb_rule.test.name - resource_group_name = azurerm_lb_rule.test.resource_group_name - loadbalancer_id = azurerm_lb_rule.test.loadbalancer_id -} -`, testAccAzureRMLoadBalancerRule_template(data, "Standard"), backendPoolName, probeName, lbRuleName) -} diff --git a/azurerm/internal/services/network/tests/loadbalancer_rule_resource_test.go b/azurerm/internal/services/network/tests/loadbalancer_rule_resource_test.go deleted file mode 100644 index 2903b94d26ff..000000000000 --- a/azurerm/internal/services/network/tests/loadbalancer_rule_resource_test.go +++ /dev/null @@ -1,569 +0,0 @@ -package tests - -import ( - "fmt" - "os" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-03-01/network" - "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - nw "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" -) - -func TestResourceAzureRMLoadBalancerRuleNameLabel_validation(t *testing.T) { - cases := []struct { - Value string - ErrCount int - }{ - { - Value: "-word", - ErrCount: 1, - }, - { - Value: "testing-", - ErrCount: 1, - }, - { - Value: "test#test", - ErrCount: 1, - }, - { - Value: acctest.RandStringFromCharSet(81, "abcdedfed"), - ErrCount: 1, - }, - { - Value: "test.rule", - ErrCount: 0, - }, - { - Value: "test_rule", - ErrCount: 0, - }, - { - Value: "test-rule", - ErrCount: 0, - }, - { - Value: "TestRule", - ErrCount: 0, - }, - { - Value: "Test123Rule", - ErrCount: 0, - }, - { - Value: "TestRule", - ErrCount: 0, - }, - } - - for _, tc := range cases { - _, errors := nw.ValidateArmLoadBalancerRuleName(tc.Value, "azurerm_lb_rule") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Azure RM Load Balancer Rule Name Label to trigger a validation error") - } - } -} - -func TestAccAzureRMLoadBalancerRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - lbRule_id := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/loadBalancingRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, lbRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_basic(data, lbRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_rule.test", "id", lbRule_id), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerRule_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - lbRule_id := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/loadBalancingRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, lbRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_complete(data, lbRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_rule.test", "id", lbRule_id), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerRule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - lbRule_id := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/loadBalancingRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, lbRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_basic(data, lbRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_rule.test", "id", lbRule_id), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLoadBalancerRule_complete(data, lbRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_rule.test", "id", lbRule_id), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLoadBalancerRule_basic(data, lbRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_rule.test", "id", lbRule_id), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - lbRule_id := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/loadBalancingRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, lbRuleName) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_basic(data, lbRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - resource.TestCheckResourceAttr( - "azurerm_lb_rule.test", "id", lbRule_id), - ), - }, - { - Config: testAccAzureRMLoadBalancerRule_requiresImport(data, lbRuleName), - ExpectError: acceptance.RequiresImportError("azurerm_lb_rule"), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerRule_removal(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_basic(data, lbRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - ), - }, - { - Config: testAccAzureRMLoadBalancerRule_template(data, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleNotExists(lbRuleName, &lb), - ), - }, - }, - }) -} - -// https://github.com/hashicorp/terraform/issues/9424 -func TestAccAzureRMLoadBalancerRule_inconsistentReads(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") - var lb network.LoadBalancer - backendPoolName := fmt.Sprintf("LbPool-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - probeName := fmt.Sprintf("LbProbe-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_inconsistentRead(data, backendPoolName, probeName, lbRuleName), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerBackEndAddressPoolExists(backendPoolName, &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - testCheckAzureRMLoadBalancerProbeExists(probeName, &lb), - ), - }, - }, - }) -} - -func TestAccAzureRMLoadBalancerRule_updateMultipleRules(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb_rule", "test") - - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - lbRule2Name := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - subscriptionID := os.Getenv("ARM_SUBSCRIPTION_ID") - lbRuleID := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/loadBalancingRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, lbRuleName) - - lbRule2ID := fmt.Sprintf( - "/subscriptions/%s/resourceGroups/acctestRG-lb-%d/providers/Microsoft.Network/loadBalancers/arm-test-loadbalancer-%d/loadBalancingRules/%s", - subscriptionID, data.RandomInteger, data.RandomInteger, lbRule2Name) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_multipleRules(data, lbRuleName, lbRule2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRule2Name, &lb), - resource.TestCheckResourceAttr("azurerm_lb_rule.test", "id", lbRuleID), - resource.TestCheckResourceAttr("azurerm_lb_rule.test2", "id", lbRule2ID), - resource.TestCheckResourceAttr("azurerm_lb_rule.test2", "frontend_port", "3390"), - resource.TestCheckResourceAttr("azurerm_lb_rule.test2", "backend_port", "3390"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMLoadBalancerRule_multipleRulesUpdate(data, lbRuleName, lbRule2Name), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRule2Name, &lb), - resource.TestCheckResourceAttr("azurerm_lb_rule.test", "id", lbRuleID), - resource.TestCheckResourceAttr("azurerm_lb_rule.test2", "id", lbRule2ID), - resource.TestCheckResourceAttr("azurerm_lb_rule.test2", "frontend_port", "3391"), - resource.TestCheckResourceAttr("azurerm_lb_rule.test2", "backend_port", "3391"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMLoadBalancerRule_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_lb", "test") - - var lb network.LoadBalancer - lbRuleName := fmt.Sprintf("LbRule-%s", acctest.RandStringFromCharSet(8, acctest.CharSetAlpha)) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMLoadBalancerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMLoadBalancerRule_basic(data, lbRuleName, "Basic"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMLoadBalancerExists("azurerm_lb.test", &lb), - testCheckAzureRMLoadBalancerRuleExists(lbRuleName, &lb), - testCheckAzureRMLoadBalancerRuleDisappears(lbRuleName, &lb), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzureRMLoadBalancerRuleExists(lbRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerRuleByName(lb, lbRuleName) - if !exists { - return fmt.Errorf("A Load Balancer Rule with name %q cannot be found.", lbRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerRuleNotExists(lbRuleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - _, _, exists := nw.FindLoadBalancerRuleByName(lb, lbRuleName) - if exists { - return fmt.Errorf("A Load Balancer Rule with name %q has been found.", lbRuleName) - } - - return nil - } -} - -func testCheckAzureRMLoadBalancerRuleDisappears(ruleName string, lb *network.LoadBalancer) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Network.LoadBalancersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - _, i, exists := nw.FindLoadBalancerRuleByName(lb, ruleName) - if !exists { - return fmt.Errorf("A Rule with name %q cannot be found.", ruleName) - } - - currentRules := *lb.LoadBalancerPropertiesFormat.LoadBalancingRules - rules := append(currentRules[:i], currentRules[i+1:]...) - lb.LoadBalancerPropertiesFormat.LoadBalancingRules = &rules - - id, err := azure.ParseAzureResourceID(*lb.ID) - if err != nil { - return err - } - - future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, *lb.Name, *lb) - if err != nil { - return fmt.Errorf("Error Creating/Updating Load Balancer %q (Resource Group %q): %+v", *lb.Name, id.ResourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for completion of Load Balancer %q (Resource Group %q): %+v", *lb.Name, id.ResourceGroup, err) - } - - _, err = client.Get(ctx, id.ResourceGroup, *lb.Name, "") - return err - } -} - -func testAccAzureRMLoadBalancerRule_template(data acceptance.TestData, sku string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-lb-%[1]d" - location = "%[2]s" -} - -resource "azurerm_public_ip" "test" { - name = "test-ip-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Static" - sku = "%[3]s" -} - -resource "azurerm_lb" "test" { - name = "arm-test-loadbalancer-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "%[3]s" - - frontend_ip_configuration { - name = "one-%[1]d" - public_ip_address_id = azurerm_public_ip.test.id - } -} -`, data.RandomInteger, data.Locations.Primary, sku) -} - -// nolint: unparam -func testAccAzureRMLoadBalancerRule_basic(data acceptance.TestData, lbRuleName, sku string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_rule" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerRule_template(data, sku), lbRuleName) -} - -func testAccAzureRMLoadBalancerRule_complete(data acceptance.TestData, lbRuleName string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_rule" "test" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - - disable_outbound_snat = true - enable_floating_ip = true - enable_tcp_reset = true - idle_timeout_in_minutes = 10 - - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerRule_template(data, "Standard"), lbRuleName) -} - -func testAccAzureRMLoadBalancerRule_requiresImport(data acceptance.TestData, name string) string { - template := testAccAzureRMLoadBalancerRule_basic(data, name, "Basic") - return fmt.Sprintf(` -%s - -resource "azurerm_lb_rule" "import" { - name = azurerm_lb_rule.test.name - resource_group_name = azurerm_lb_rule.test.resource_group_name - loadbalancer_id = azurerm_lb_rule.test.loadbalancer_id - frontend_ip_configuration_name = azurerm_lb_rule.test.frontend_ip_configuration_name - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 -} -`, template) -} - -// https://github.com/hashicorp/terraform/issues/9424 -func testAccAzureRMLoadBalancerRule_inconsistentRead(data acceptance.TestData, backendPoolName, probeName, lbRuleName string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_backend_address_pool" "teset" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" -} - -resource "azurerm_lb_probe" "test" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - protocol = "Tcp" - port = 443 -} - -resource "azurerm_lb_rule" "test" { - name = "%s" - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - protocol = "Tcp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerRule_template(data, "Basic"), backendPoolName, probeName, lbRuleName) -} - -func testAccAzureRMLoadBalancerRule_multipleRules(data acceptance.TestData, lbRuleName, lbRule2Name string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_rule" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Udp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} - -resource "azurerm_lb_rule" "test2" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Udp" - frontend_port = 3390 - backend_port = 3390 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerRule_template(data, "Basic"), lbRuleName, lbRule2Name) -} - -func testAccAzureRMLoadBalancerRule_multipleRulesUpdate(data acceptance.TestData, lbRuleName, lbRule2Name string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_lb_rule" "test" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Udp" - frontend_port = 3389 - backend_port = 3389 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} - -resource "azurerm_lb_rule" "test2" { - resource_group_name = "${azurerm_resource_group.test.name}" - loadbalancer_id = "${azurerm_lb.test.id}" - name = "%s" - protocol = "Udp" - frontend_port = 3391 - backend_port = 3391 - frontend_ip_configuration_name = azurerm_lb.test.frontend_ip_configuration.0.name -} -`, testAccAzureRMLoadBalancerRule_template(data, "Basic"), lbRuleName, lbRule2Name) -} diff --git a/azurerm/internal/services/network/tests/local_network_gateway_resource_test.go b/azurerm/internal/services/network/tests/local_network_gateway_resource_test.go index 9ac2df6dbce3..acefbe73b66e 100644 --- a/azurerm/internal/services/network/tests/local_network_gateway_resource_test.go +++ b/azurerm/internal/services/network/tests/local_network_gateway_resource_test.go @@ -236,6 +236,32 @@ func TestAccAzureRMLocalNetworkGateway_updateAddressSpace(t *testing.T) { }) } +func TestAccAzureRMLocalNetworkGateway_fqdn(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_local_network_gateway", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMLocalNetworkGatewayDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMLocalNetworkGatewayConfig_fqdn(data, "www.foo.com"), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLocalNetworkGatewayExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMLocalNetworkGatewayConfig_fqdn(data, "www.bar.com"), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMLocalNetworkGatewayExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + // testCheckAzureRMLocalNetworkGatewayExists returns the resource.TestCheckFunc // which checks whether or not the expected local network gateway exists both // in the schema, and on Azure. @@ -325,7 +351,6 @@ func testCheckAzureRMLocalNetworkGatewayDestroy(s *terraform.State) error { resourceGroup := id.ResourceGroup resp, err := client.Get(ctx, resourceGroup, localNetName) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil @@ -495,3 +520,24 @@ resource "azurerm_local_network_gateway" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } + +func testAccAzureRMLocalNetworkGatewayConfig_fqdn(data acceptance.TestData, fqdn string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-network-%d" + location = "%s" +} + +resource "azurerm_local_network_gateway" "test" { + name = "acctestlng-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + gateway_fqdn = %q + address_space = ["127.0.0.0/8"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, fqdn) +} diff --git a/azurerm/internal/services/network/tests/network_connection_monitor_resource_test.go b/azurerm/internal/services/network/tests/network_connection_monitor_resource_test.go index 0da2b740393d..43d85845ea16 100644 --- a/azurerm/internal/services/network/tests/network_connection_monitor_resource_test.go +++ b/azurerm/internal/services/network/tests/network_connection_monitor_resource_test.go @@ -8,9 +8,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" ) func testAccAzureRMNetworkConnectionMonitor_addressBasic(t *testing.T) { @@ -25,10 +25,6 @@ func testAccAzureRMNetworkConnectionMonitor_addressBasic(t *testing.T) { Config: testAccAzureRMNetworkConnectionMonitor_basicAddressConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestCheckResourceAttr(data.ResourceName, "auto_start", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_in_seconds", "60"), ), }, data.ImportStep(), @@ -61,22 +57,15 @@ func testAccAzureRMNetworkConnectionMonitor_requiresImport(t *testing.T) { func testAccAzureRMNetworkConnectionMonitor_addressComplete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_network_connection_monitor", "test") - autoStart := "false" - resource.Test(t, resource.TestCase{ PreCheck: func() { acceptance.PreCheck(t) }, Providers: acceptance.SupportedProviders, CheckDestroy: testCheckAzureRMNetworkConnectionMonitorDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMNetworkConnectionMonitor_completeAddressConfig(data, autoStart), + Config: testAccAzureRMNetworkConnectionMonitor_completeAddressConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_start", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_in_seconds", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "source.0.port", "20020"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), ), }, data.ImportStep(), @@ -87,8 +76,6 @@ func testAccAzureRMNetworkConnectionMonitor_addressComplete(t *testing.T) { func testAccAzureRMNetworkConnectionMonitor_addressUpdate(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_network_connection_monitor", "test") - autoStart := "true" - resource.Test(t, resource.TestCase{ PreCheck: func() { acceptance.PreCheck(t) }, Providers: acceptance.SupportedProviders, @@ -101,14 +88,9 @@ func testAccAzureRMNetworkConnectionMonitor_addressUpdate(t *testing.T) { ), }, { - Config: testAccAzureRMNetworkConnectionMonitor_completeAddressConfig(data, autoStart), + Config: testAccAzureRMNetworkConnectionMonitor_completeAddressConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_start", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_in_seconds", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "source.0.port", "20020"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), ), }, data.ImportStep(), @@ -128,10 +110,6 @@ func testAccAzureRMNetworkConnectionMonitor_vmBasic(t *testing.T) { Config: testAccAzureRMNetworkConnectionMonitor_basicVmConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), - resource.TestCheckResourceAttr(data.ResourceName, "auto_start", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_in_seconds", "60"), ), }, data.ImportStep(), @@ -142,22 +120,15 @@ func testAccAzureRMNetworkConnectionMonitor_vmBasic(t *testing.T) { func testAccAzureRMNetworkConnectionMonitor_vmComplete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_network_connection_monitor", "test") - autoStart := "false" - resource.Test(t, resource.TestCase{ PreCheck: func() { acceptance.PreCheck(t) }, Providers: acceptance.SupportedProviders, CheckDestroy: testCheckAzureRMNetworkConnectionMonitorDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMNetworkConnectionMonitor_completeVmConfig(data, autoStart), + Config: testAccAzureRMNetworkConnectionMonitor_completeVmConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_start", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_in_seconds", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "source.0.port", "20020"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), ), }, data.ImportStep(), @@ -180,14 +151,9 @@ func testAccAzureRMNetworkConnectionMonitor_vmUpdate(t *testing.T) { ), }, { - Config: testAccAzureRMNetworkConnectionMonitor_completeVmConfig(data, "true"), + Config: testAccAzureRMNetworkConnectionMonitor_completeVmConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "auto_start", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "interval_in_seconds", "30"), - resource.TestCheckResourceAttr(data.ResourceName, "source.0.port", "20020"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), ), }, data.ImportStep(), @@ -207,21 +173,18 @@ func testAccAzureRMNetworkConnectionMonitor_destinationUpdate(t *testing.T) { Config: testAccAzureRMNetworkConnectionMonitor_basicAddressConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "destination.0.address"), ), }, { Config: testAccAzureRMNetworkConnectionMonitor_basicVmConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "destination.0.virtual_machine_id"), ), }, { Config: testAccAzureRMNetworkConnectionMonitor_basicAddressConfig(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "destination.0.address"), ), }, data.ImportStep(), @@ -239,7 +202,7 @@ func testAccAzureRMNetworkConnectionMonitor_missingDestination(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccAzureRMNetworkConnectionMonitor_missingDestinationConfig(data), - ExpectError: regexp.MustCompile("Error: either `destination.virtual_machine_id` or `destination.address` must be specified"), + ExpectError: regexp.MustCompile("must have at least 2 endpoints"), }, }, }) @@ -255,8 +218,65 @@ func testAccAzureRMNetworkConnectionMonitor_conflictingDestinations(t *testing.T Steps: []resource.TestStep{ { Config: testAccAzureRMNetworkConnectionMonitor_conflictingDestinationsConfig(data), - ExpectError: regexp.MustCompile("conflicts with destination.0.address"), + ExpectError: regexp.MustCompile("don't allow creating different endpoints for the same VM"), + }, + }, + }) +} + +func testAccAzureRMNetworkConnectionMonitor_withAddressAndVirtualMachineId(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_network_connection_monitor", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMNetworkConnectionMonitorDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMNetworkConnectionMonitor_withAddressAndVirtualMachineIdConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMNetworkConnectionMonitor_httpConfiguration(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_network_connection_monitor", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMNetworkConnectionMonitorDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMNetworkConnectionMonitor_httpConfigurationConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMNetworkConnectionMonitor_icmpConfiguration(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_network_connection_monitor", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMNetworkConnectionMonitorDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMNetworkConnectionMonitor_icmpConfigurationConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMNetworkConnectionMonitorExists(data.ResourceName), + ), }, + data.ImportStep(), }, }) } @@ -271,17 +291,18 @@ func testCheckAzureRMNetworkConnectionMonitorExists(resourceName string) resourc return fmt.Errorf("Not found: %s", resourceName) } - resourceGroup := rs.Primary.Attributes["resource_group_name"] - watcherName := rs.Primary.Attributes["network_watcher_name"] - NetworkConnectionMonitorName := rs.Primary.Attributes["name"] + id, err := parse.ConnectionMonitorID(rs.Primary.ID) + if err != nil { + return err + } - resp, err := client.Get(ctx, resourceGroup, watcherName, NetworkConnectionMonitorName) + resp, err := client.Get(ctx, id.ResourceGroup, id.NetworkWatcherName, id.Name) if err != nil { return fmt.Errorf("Bad: Get on NetworkConnectionMonitorsClient: %s", err) } if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Connection Monitor does not exist: %s", NetworkConnectionMonitorName) + return fmt.Errorf("Connection Monitor does not exist: %s", id.Name) } return nil @@ -297,12 +318,12 @@ func testCheckAzureRMNetworkConnectionMonitorDestroy(s *terraform.State) error { continue } - resourceGroup := rs.Primary.Attributes["resource_group_name"] - watcherName := rs.Primary.Attributes["network_watcher_name"] - NetworkConnectionMonitorName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, watcherName, NetworkConnectionMonitorName) + id, err := parse.ConnectionMonitorID(rs.Primary.ID) + if err != nil { + return err + } + resp, err := client.Get(ctx, id.ResourceGroup, id.NetworkWatcherName, id.Name) if err != nil { return nil } @@ -322,18 +343,18 @@ provider "azurerm" { } resource "azurerm_resource_group" "test" { - name = "acctestRG-watcher-%d" + name = "acctestRG-Watcher-%d" location = "%s" } resource "azurerm_network_watcher" "test" { - name = "acctnw-%d" + name = "acctest-Watcher-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name } resource "azurerm_virtual_network" "test" { - name = "acctvn-%d" + name = "acctest-Vnet-%d" address_space = ["10.0.0.0/16"] location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name @@ -347,7 +368,7 @@ resource "azurerm_subnet" "test" { } resource "azurerm_network_interface" "src" { - name = "acctni-src%d" + name = "acctest-SrcNIC-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name @@ -359,7 +380,7 @@ resource "azurerm_network_interface" "src" { } resource "azurerm_virtual_machine" "src" { - name = "acctvm-src%d" + name = "acctest-SrcVM-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name network_interface_ids = [azurerm_network_interface.src.id] @@ -391,7 +412,7 @@ resource "azurerm_virtual_machine" "src" { } resource "azurerm_virtual_machine_extension" "src" { - name = "network-watcher" + name = "acctest-VMExtension" virtual_machine_id = azurerm_virtual_machine.src.id publisher = "Microsoft.Azure.NetworkWatcher" type = "NetworkWatcherAgentLinux" @@ -407,7 +428,7 @@ func testAccAzureRMNetworkConnectionMonitor_baseWithDestConfig(data acceptance.T %s resource "azurerm_network_interface" "dest" { - name = "acctni-dest%d" + name = "acctest-DestNic-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name @@ -419,7 +440,7 @@ resource "azurerm_network_interface" "dest" { } resource "azurerm_virtual_machine" "dest" { - name = "acctvm-dest%d" + name = "acctest-DestVM-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name network_interface_ids = [azurerm_network_interface.dest.id] @@ -458,18 +479,34 @@ func testAccAzureRMNetworkConnectionMonitor_basicAddressConfig(data acceptance.T %s resource "azurerm_network_connection_monitor" "test" { - name = "acctestcm-%d" - network_watcher_name = azurerm_network_watcher.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_network_watcher.test.location + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location - source { + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id } - destination { + endpoint { + name = "destination" address = "terraform.io" - port = 80 + } + + test_configuration { + name = "tcp" + protocol = "Tcp" + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] } depends_on = [azurerm_virtual_machine_extension.src] @@ -477,37 +514,77 @@ resource "azurerm_network_connection_monitor" "test" { `, config, data.RandomInteger) } -func testAccAzureRMNetworkConnectionMonitor_completeAddressConfig(data acceptance.TestData, autoStart string) string { +func testAccAzureRMNetworkConnectionMonitor_completeAddressConfig(data acceptance.TestData) string { config := testAccAzureRMNetworkConnectionMonitor_baseConfig(data) return fmt.Sprintf(` %s -resource "azurerm_network_connection_monitor" "test" { - name = "acctestcm-%d" - network_watcher_name = azurerm_network_watcher.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_network_watcher.test.location +resource "azurerm_log_analytics_workspace" "test" { + name = "acctest-LAW-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "pergb2018" +} - auto_start = %s - interval_in_seconds = 30 +resource "azurerm_network_connection_monitor" "test" { + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location - source { + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id - port = 20020 + + filter { + item { + address = azurerm_virtual_machine.src.id + type = "AgentAddress" + } + + type = "Include" + } } - destination { + endpoint { + name = "destination" address = "terraform.io" - port = 443 } + test_configuration { + name = "tcp" + protocol = "Tcp" + test_frequency_in_seconds = 40 + preferred_ip_version = "IPv4" + + tcp_configuration { + port = 80 + } + + success_threshold { + checks_failed_percent = 50 + round_trip_time_ms = 40 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] + enabled = true + } + + notes = "testNote" + + output_workspace_resource_ids = [azurerm_log_analytics_workspace.test.id] + tags = { - env = "test" + ENv = "Test" } depends_on = [azurerm_virtual_machine_extension.src] } -`, config, data.RandomInteger, autoStart) +`, config, data.RandomInteger, data.RandomInteger) } func testAccAzureRMNetworkConnectionMonitor_basicVmConfig(data acceptance.TestData) string { @@ -516,18 +593,34 @@ func testAccAzureRMNetworkConnectionMonitor_basicVmConfig(data acceptance.TestDa %s resource "azurerm_network_connection_monitor" "test" { - name = "acctestcm-%d" - network_watcher_name = azurerm_network_watcher.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_network_watcher.test.location + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location - source { + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id } - destination { + endpoint { + name = "destination" virtual_machine_id = azurerm_virtual_machine.dest.id - port = 80 + } + + test_configuration { + name = "tcp" + protocol = "Tcp" + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] } depends_on = [azurerm_virtual_machine_extension.src] @@ -535,37 +628,102 @@ resource "azurerm_network_connection_monitor" "test" { `, config, data.RandomInteger) } -func testAccAzureRMNetworkConnectionMonitor_completeVmConfig(data acceptance.TestData, autoStart string) string { +func testAccAzureRMNetworkConnectionMonitor_withAddressAndVirtualMachineIdConfig(data acceptance.TestData) string { config := testAccAzureRMNetworkConnectionMonitor_baseWithDestConfig(data) return fmt.Sprintf(` %s resource "azurerm_network_connection_monitor" "test" { - name = "acctestcm-%d" - network_watcher_name = azurerm_network_watcher.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_network_watcher.test.location + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location - auto_start = %s - interval_in_seconds = 30 + endpoint { + name = "source" + virtual_machine_id = azurerm_virtual_machine.src.id + } + + endpoint { + name = "destination" + virtual_machine_id = azurerm_virtual_machine.dest.id + address = azurerm_network_interface.dest.private_ip_address + } + + test_configuration { + name = "tcp" + protocol = "Tcp" + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] + } + + depends_on = [azurerm_virtual_machine_extension.src] +} +`, config, data.RandomInteger) +} - source { +func testAccAzureRMNetworkConnectionMonitor_completeVmConfig(data acceptance.TestData) string { + config := testAccAzureRMNetworkConnectionMonitor_baseWithDestConfig(data) + return fmt.Sprintf(` +%s + +resource "azurerm_network_connection_monitor" "test" { + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location + + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id - port = 20020 + + filter { + item { + address = azurerm_virtual_machine.src.id + type = "AgentAddress" + } + + type = "Include" + } } - destination { + endpoint { + name = "destination" virtual_machine_id = azurerm_virtual_machine.dest.id - port = 443 + } + + test_configuration { + name = "tcp" + protocol = "Tcp" + test_frequency_in_seconds = 40 + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] + enabled = true } tags = { - env = "test" + ENv = "Test" } depends_on = [azurerm_virtual_machine_extension.src] } -`, config, data.RandomInteger, autoStart) +`, config, data.RandomInteger) } func testAccAzureRMNetworkConnectionMonitor_missingDestinationConfig(data acceptance.TestData) string { @@ -574,17 +732,29 @@ func testAccAzureRMNetworkConnectionMonitor_missingDestinationConfig(data accept %s resource "azurerm_network_connection_monitor" "test" { - name = "acctestcm-%d" - network_watcher_name = azurerm_network_watcher.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_network_watcher.test.location + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location - source { + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id } - destination { - port = 80 + test_configuration { + name = "tcp" + protocol = "Tcp" + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = [] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] } depends_on = [azurerm_virtual_machine_extension.src] @@ -598,19 +768,34 @@ func testAccAzureRMNetworkConnectionMonitor_conflictingDestinationsConfig(data a %s resource "azurerm_network_connection_monitor" "test" { - name = "acctestcm-%d" - network_watcher_name = azurerm_network_watcher.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_network_watcher.test.location + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location - source { + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id } - destination { - address = "terraform.io" + endpoint { + name = "destination" virtual_machine_id = azurerm_virtual_machine.src.id - port = 80 + } + + test_configuration { + name = "tcp" + protocol = "Tcp" + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] } depends_on = [azurerm_virtual_machine_extension.src] @@ -624,21 +809,128 @@ func testAccAzureRMNetworkConnectionMonitor_requiresImportConfig(data acceptance %s resource "azurerm_network_connection_monitor" "import" { - name = azurerm_network_connection_monitor.test.name - network_watcher_name = azurerm_network_connection_monitor.test.network_watcher_name - resource_group_name = azurerm_network_connection_monitor.test.resource_group_name - location = azurerm_network_connection_monitor.test.location + name = azurerm_network_connection_monitor.test.name + network_watcher_id = azurerm_network_connection_monitor.test.network_watcher_id + location = azurerm_network_connection_monitor.test.location - source { + endpoint { + name = "source" virtual_machine_id = azurerm_virtual_machine.src.id } - destination { + endpoint { + name = "destination" address = "terraform.io" - port = 80 + } + + test_configuration { + name = "tcp" + protocol = "Tcp" + + tcp_configuration { + port = 80 + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] } depends_on = [azurerm_virtual_machine_extension.src] } `, config) } + +func testAccAzureRMNetworkConnectionMonitor_httpConfigurationConfig(data acceptance.TestData) string { + config := testAccAzureRMNetworkConnectionMonitor_baseConfig(data) + return fmt.Sprintf(` +%s + +resource "azurerm_network_connection_monitor" "test" { + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location + + endpoint { + name = "source" + virtual_machine_id = azurerm_virtual_machine.src.id + } + + endpoint { + name = "destination" + address = "terraform.io" + } + + test_configuration { + name = "tcp" + protocol = "Http" + + http_configuration { + method = "Get" + port = 80 + path = "/a/b" + prefer_https = false + valid_status_code_ranges = ["200"] + + request_header { + name = "testHeader" + value = "testVal" + } + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] + } + + depends_on = [azurerm_virtual_machine_extension.src] +} +`, config, data.RandomInteger) +} + +func testAccAzureRMNetworkConnectionMonitor_icmpConfigurationConfig(data acceptance.TestData) string { + config := testAccAzureRMNetworkConnectionMonitor_baseConfig(data) + return fmt.Sprintf(` +%s + +resource "azurerm_network_connection_monitor" "test" { + name = "acctest-CM-%d" + network_watcher_id = azurerm_network_watcher.test.id + location = azurerm_network_watcher.test.location + + endpoint { + name = "source" + virtual_machine_id = azurerm_virtual_machine.src.id + } + + endpoint { + name = "destination" + address = "terraform.io" + } + + test_configuration { + name = "tcp" + protocol = "Icmp" + + icmp_configuration { + trace_route_enabled = true + } + } + + test_group { + name = "testtg" + destination_endpoints = ["destination"] + source_endpoints = ["source"] + test_configuration_names = ["tcp"] + } + + depends_on = [azurerm_virtual_machine_extension.src] +} +`, config, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/network_packet_capture_resource_test.go b/azurerm/internal/services/network/tests/network_packet_capture_resource_test.go index 79323bb48970..6308b3be9782 100644 --- a/azurerm/internal/services/network/tests/network_packet_capture_resource_test.go +++ b/azurerm/internal/services/network/tests/network_packet_capture_resource_test.go @@ -51,6 +51,7 @@ func testAccAzureRMNetworkPacketCapture_requiresImport(t *testing.T) { }, }) } + func testAccAzureRMNetworkPacketCapture_storageAccount(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_network_packet_capture", "test") @@ -149,7 +150,6 @@ func testCheckAzureRMNetworkPacketCaptureDestroy(s *terraform.State) error { NetworkPacketCaptureName := rs.Primary.Attributes["name"] resp, err := client.Get(ctx, resourceGroup, watcherName, NetworkPacketCaptureName) - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/network_security_group_resource_test.go b/azurerm/internal/services/network/tests/network_security_group_resource_test.go index 6ec29d2375b3..f990b44ed7cc 100644 --- a/azurerm/internal/services/network/tests/network_security_group_resource_test.go +++ b/azurerm/internal/services/network/tests/network_security_group_resource_test.go @@ -320,7 +320,6 @@ func testCheckAzureRMNetworkSecurityGroupDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { if utils.ResponseWasNotFound(resp.Response) { return nil diff --git a/azurerm/internal/services/network/tests/network_security_rule_resource_test.go b/azurerm/internal/services/network/tests/network_security_rule_resource_test.go index 436e1901beed..a24cd4e51cdd 100644 --- a/azurerm/internal/services/network/tests/network_security_rule_resource_test.go +++ b/azurerm/internal/services/network/tests/network_security_rule_resource_test.go @@ -205,7 +205,6 @@ func testCheckAzureRMNetworkSecurityRuleDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, sgName, sgrName) - if err != nil { return nil } @@ -396,14 +395,26 @@ resource "azurerm_resource_group" "test" { location = "%s" } -resource "azurerm_application_security_group" "first" { - name = "acctest-first%d" +resource "azurerm_application_security_group" "source1" { + name = "acctest-source1-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_application_security_group" "source2" { + name = "acctest-source2-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_application_security_group" "destination1" { + name = "acctest-destination1-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name } -resource "azurerm_application_security_group" "second" { - name = "acctest-second%d" +resource "azurerm_application_security_group" "destination2" { + name = "acctest-destination2-%d" location = azurerm_resource_group.test.location resource_group_name = azurerm_resource_group.test.name } @@ -422,10 +433,10 @@ resource "azurerm_network_security_rule" "test1" { direction = "Outbound" access = "Allow" protocol = "Tcp" - source_application_security_group_ids = [azurerm_application_security_group.first.id] - destination_application_security_group_ids = [azurerm_application_security_group.second.id] + source_application_security_group_ids = [azurerm_application_security_group.source1.id, azurerm_application_security_group.source2.id] + destination_application_security_group_ids = [azurerm_application_security_group.destination1.id, azurerm_application_security_group.destination2.id] source_port_ranges = ["10000-40000"] destination_port_ranges = ["80", "443", "8080", "8190"] } -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } diff --git a/azurerm/internal/services/network/tests/network_watcher_resource_test.go b/azurerm/internal/services/network/tests/network_watcher_resource_test.go index d54bc015c381..6a72493e9886 100644 --- a/azurerm/internal/services/network/tests/network_watcher_resource_test.go +++ b/azurerm/internal/services/network/tests/network_watcher_resource_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) @@ -35,16 +36,19 @@ func TestAccAzureRMNetworkWatcher(t *testing.T) { "requiresImport": testAccAzureRMPacketCapture_requiresImport, }, "ConnectionMonitor": { - "addressBasic": testAccAzureRMNetworkConnectionMonitor_addressBasic, - "addressComplete": testAccAzureRMNetworkConnectionMonitor_addressComplete, - "addressUpdate": testAccAzureRMNetworkConnectionMonitor_addressUpdate, - "vmBasic": testAccAzureRMNetworkConnectionMonitor_vmBasic, - "vmComplete": testAccAzureRMNetworkConnectionMonitor_vmComplete, - "vmUpdate": testAccAzureRMNetworkConnectionMonitor_vmUpdate, - "destinationUpdate": testAccAzureRMNetworkConnectionMonitor_destinationUpdate, - "missingDestinationInvalid": testAccAzureRMNetworkConnectionMonitor_missingDestination, - "bothDestinationsInvalid": testAccAzureRMNetworkConnectionMonitor_conflictingDestinations, - "requiresImport": testAccAzureRMNetworkConnectionMonitor_requiresImport, + "addressBasic": testAccAzureRMNetworkConnectionMonitor_addressBasic, + "addressComplete": testAccAzureRMNetworkConnectionMonitor_addressComplete, + "addressUpdate": testAccAzureRMNetworkConnectionMonitor_addressUpdate, + "vmBasic": testAccAzureRMNetworkConnectionMonitor_vmBasic, + "vmComplete": testAccAzureRMNetworkConnectionMonitor_vmComplete, + "vmUpdate": testAccAzureRMNetworkConnectionMonitor_vmUpdate, + "destinationUpdate": testAccAzureRMNetworkConnectionMonitor_destinationUpdate, + "missingDestinationInvalid": testAccAzureRMNetworkConnectionMonitor_missingDestination, + "bothDestinationsInvalid": testAccAzureRMNetworkConnectionMonitor_conflictingDestinations, + "requiresImport": testAccAzureRMNetworkConnectionMonitor_requiresImport, + "httpConfiguration": testAccAzureRMNetworkConnectionMonitor_httpConfiguration, + "icmpConfiguration": testAccAzureRMNetworkConnectionMonitor_icmpConfiguration, + "bothAddressAndVirtualMachineId": testAccAzureRMNetworkConnectionMonitor_withAddressAndVirtualMachineId, }, "PacketCapture": { "localDisk": testAccAzureRMNetworkPacketCapture_localDisk, @@ -189,16 +193,18 @@ func testCheckAzureRMNetworkWatcherExists(resourceName string) resource.TestChec return fmt.Errorf("Not found: %s", resourceName) } - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Network Watcher: %q", name) + id, err := parse.NetworkWatcherID(rs.Primary.ID) + if err != nil { + return err + } + if id.ResourceGroup == "" { + return fmt.Errorf("Bad: no resource group found in state for Network Watcher: %q", id.Name) } - resp, err := client.Get(ctx, resourceGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Network Watcher %q (resource group: %q) does not exist", name, resourceGroup) + return fmt.Errorf("Bad: Network Watcher %q (resource group: %q) does not exist", id.Name, id.ResourceGroup) } return fmt.Errorf("Bad: Get on watcherClient: %+v", err) } @@ -217,13 +223,15 @@ func testCheckAzureRMNetworkWatcherDisappears(resourceName string) resource.Test return fmt.Errorf("Not found: %q", resourceName) } - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Network Watcher: %q", name) + id, err := parse.NetworkWatcherID(rs.Primary.ID) + if err != nil { + return err + } + if id.ResourceGroup == "" { + return fmt.Errorf("Bad: no resource group found in state for Network Watcher: %q", id.Name) } - future, err := client.Delete(ctx, resourceGroup, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { return fmt.Errorf("Bad: Delete on watcherClient: %+v", err) @@ -247,11 +255,12 @@ func testCheckAzureRMNetworkWatcherDestroy(s *terraform.State) error { continue } - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) + id, err := parse.NetworkWatcherID(rs.Primary.ID) + if err != nil { + return err + } + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if !utils.ResponseWasNotFound(resp.Response) { return fmt.Errorf("Network Watcher still exists:\n%#v", resp) diff --git a/azurerm/internal/services/network/tests/packet_capture_resource_test.go b/azurerm/internal/services/network/tests/packet_capture_resource_test.go index a3bd0fad6879..1922d97e1eab 100644 --- a/azurerm/internal/services/network/tests/packet_capture_resource_test.go +++ b/azurerm/internal/services/network/tests/packet_capture_resource_test.go @@ -51,6 +51,7 @@ func testAccAzureRMPacketCapture_requiresImport(t *testing.T) { }, }) } + func testAccAzureRMPacketCapture_storageAccount(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_packet_capture", "test") @@ -149,7 +150,6 @@ func testCheckAzureRMPacketCaptureDestroy(s *terraform.State) error { packetCaptureName := rs.Primary.Attributes["name"] resp, err := client.Get(ctx, resourceGroup, watcherName, packetCaptureName) - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/point_to_site_vpn_gateway_resource_test.go b/azurerm/internal/services/network/tests/point_to_site_vpn_gateway_resource_test.go index 09702bf3494e..bfb0d4ee7f10 100644 --- a/azurerm/internal/services/network/tests/point_to_site_vpn_gateway_resource_test.go +++ b/azurerm/internal/services/network/tests/point_to_site_vpn_gateway_resource_test.go @@ -174,6 +174,11 @@ func testAccAzureRMAzureRMPointToSiteVPNGateway_updated(data acceptance.TestData return fmt.Sprintf(` %s +resource "azurerm_virtual_hub_route_table" "test" { + name = "acctest-RouteTable-%d" + virtual_hub_id = azurerm_virtual_hub.test.id +} + resource "azurerm_point_to_site_vpn_gateway" "test" { name = "acctestp2sVPNG-%d" location = azurerm_resource_group.test.location @@ -181,15 +186,25 @@ resource "azurerm_point_to_site_vpn_gateway" "test" { virtual_hub_id = azurerm_virtual_hub.test.id vpn_server_configuration_id = azurerm_vpn_server_configuration.test.id scale_unit = 2 + dns_servers = ["3.3.3.3"] connection_configuration { name = "first" vpn_client_address_pool { address_prefixes = ["172.100.0.0/14", "10.100.0.0/14"] } + + route { + associated_route_table_id = azurerm_virtual_hub_route_table.test.id + + propagated_route_table { + ids = [azurerm_virtual_hub_route_table.test.id] + labels = ["label1", "label2"] + } + } } } -`, template, data.RandomInteger) +`, template, data.RandomInteger, data.RandomInteger) } func testAccAzureRMAzureRMPointToSiteVPNGateway_requiresImport(data acceptance.TestData) string { diff --git a/azurerm/internal/services/network/tests/public_ip_prefix_resource_test.go b/azurerm/internal/services/network/tests/public_ip_prefix_resource_test.go index 544527d30370..9467887dafdd 100644 --- a/azurerm/internal/services/network/tests/public_ip_prefix_resource_test.go +++ b/azurerm/internal/services/network/tests/public_ip_prefix_resource_test.go @@ -84,7 +84,6 @@ func testCheckAzureRMPublicIPPrefixDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/public_ip_resource_test.go b/azurerm/internal/services/network/tests/public_ip_resource_test.go index b2ae48a31fa2..39500d9f3170 100644 --- a/azurerm/internal/services/network/tests/public_ip_resource_test.go +++ b/azurerm/internal/services/network/tests/public_ip_resource_test.go @@ -484,7 +484,6 @@ func testCheckAzureRMPublicIpDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/route_resource_test.go b/azurerm/internal/services/network/tests/route_resource_test.go index da848c4dc19e..79705a2cbf10 100644 --- a/azurerm/internal/services/network/tests/route_resource_test.go +++ b/azurerm/internal/services/network/tests/route_resource_test.go @@ -207,7 +207,6 @@ func testCheckAzureRMRouteDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, rtName, name) - if err != nil { return nil } diff --git a/azurerm/internal/services/network/tests/subnet_resource_test.go b/azurerm/internal/services/network/tests/subnet_resource_test.go index 095912aab2b5..f75f280aab91 100644 --- a/azurerm/internal/services/network/tests/subnet_resource_test.go +++ b/azurerm/internal/services/network/tests/subnet_resource_test.go @@ -240,6 +240,38 @@ func TestAccAzureRMSubnet_serviceEndpoints(t *testing.T) { }) } +func TestAccAzureRMSubnet_serviceEndpointPolicy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_subnet", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMSubnetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMSubnet_serviceEndpointPolicyBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMSubnet_serviceEndpointPolicyUpdate(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMSubnet_serviceEndpointPolicyBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetExists(data.ResourceName), + ), + }, + }, + }) +} + func TestAccAzureRMSubnet_updateAddressPrefix(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_subnet", "test") @@ -520,6 +552,48 @@ resource "azurerm_subnet" "test" { `, template) } +func testAccAzureRMSubnet_serviceEndpointPolicyBasic(data acceptance.TestData) string { + template := testAccAzureRMSubnet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_subnet_service_endpoint_storage_policy" "test" { + name = "acctestSEP-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_subnet" "test" { + name = "internal" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" +} +`, template, data.RandomInteger) +} + +func testAccAzureRMSubnet_serviceEndpointPolicyUpdate(data acceptance.TestData) string { + template := testAccAzureRMSubnet_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_subnet_service_endpoint_storage_policy" "test" { + name = "acctestSEP-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_subnet" "test" { + name = "internal" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.2.0/24" + service_endpoints = ["Microsoft.Sql"] + service_endpoint_policy_ids = [azurerm_subnet_service_endpoint_storage_policy.test.id] +} +`, template, data.RandomInteger) +} + func testAccAzureRMSubnet_updatedAddressPrefix(data acceptance.TestData) string { template := testAccAzureRMSubnet_template(data) return fmt.Sprintf(` diff --git a/azurerm/internal/services/network/tests/subnet_service_endpoint_policy_storage_resource_test.go b/azurerm/internal/services/network/tests/subnet_service_endpoint_policy_storage_resource_test.go new file mode 100644 index 000000000000..d696c0c56531 --- /dev/null +++ b/azurerm/internal/services/network/tests/subnet_service_endpoint_policy_storage_resource_test.go @@ -0,0 +1,229 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMSubnetServiceEndpointStoragePolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_subnet_service_endpoint_storage_policy", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMSubnetServiceEndpointStoragePolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMSubnetServiceEndpointStoragePolicy_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMSubnetServiceEndpointStoragePolicy_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_subnet_service_endpoint_storage_policy", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMSubnetServiceEndpointStoragePolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMSubnetServiceEndpointStoragePolicy_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMSubnetServiceEndpointStoragePolicy_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_subnet_service_endpoint_storage_policy", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMSubnetServiceEndpointStoragePolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMSubnetServiceEndpointStoragePolicy_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMSubnetServiceEndpointStoragePolicy_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMSubnetServiceEndpointStoragePolicy_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMSubnetServiceEndpointStoragePolicy_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_subnet_service_endpoint_storage_policy", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMSubnetServiceEndpointStoragePolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMSubnetServiceEndpointStoragePolicy_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMSubnetServiceEndpointStoragePolicy_requiresImport), + }, + }) +} + +func testCheckAzureRMSubnetServiceEndpointStoragePolicyExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.ServiceEndpointPoliciesClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Subnet Service Endpoint Storage Policy not found: %s", resourceName) + } + + id, err := parse.SubnetServiceEndpointStoragePolicyID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceEndpointPolicyName, ""); err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Subnet Service Endpoint Storage Policy %q (Resource Group %q) does not exist", id.ServiceEndpointPolicyName, id.ResourceGroup) + } + return fmt.Errorf("Getting on Subnet Service Endpoint Storage Policy: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMSubnetServiceEndpointStoragePolicyDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.ServiceEndpointPoliciesClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_subnet_service_endpoint_storage_policy" { + continue + } + + id, err := parse.SubnetServiceEndpointStoragePolicyID(rs.Primary.ID) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.ServiceEndpointPolicyName, "") + if err == nil { + return fmt.Errorf("Subnet Service Endpoint Storage Policy still exists") + } + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Getting on Subnet Service Endpoint Storage Policy: %+v", err) + } + return nil + } + + return nil +} + +func testAccAzureRMSubnetServiceEndpointStoragePolicy_basic(data acceptance.TestData) string { + template := testAccAzureRMSubnetServiceEndpointStoragePolicy_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_subnet_service_endpoint_storage_policy" "test" { + name = "acctestSEP-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, template, data.RandomInteger) +} + +func testAccAzureRMSubnetServiceEndpointStoragePolicy_complete(data acceptance.TestData) string { + template := testAccAzureRMSubnetServiceEndpointStoragePolicy_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "test" { + name = "acctestasasepd%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_subnet_service_endpoint_storage_policy" "test" { + name = "acctestSEP-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + definition { + name = "def1" + description = "test definition1" + service_resources = [ + "/subscriptions/%s", + azurerm_resource_group.test.id, + azurerm_storage_account.test.id + ] + } + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, template, data.RandomString, data.RandomInteger, data.Client().SubscriptionID) +} + +func testAccAzureRMSubnetServiceEndpointStoragePolicy_requiresImport(data acceptance.TestData) string { + template := testAccAzureRMSubnetServiceEndpointStoragePolicy_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_subnet_service_endpoint_storage_policy" "import" { + name = azurerm_subnet_service_endpoint_storage_policy.test.name + resource_group_name = azurerm_subnet_service_endpoint_storage_policy.test.resource_group_name + location = azurerm_subnet_service_endpoint_storage_policy.test.location +} +`, template) +} + +func testAccAzureRMSubnetServiceEndpointStoragePolicy_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/network/tests/virtual_hub_bgp_connection_resource_test.go b/azurerm/internal/services/network/tests/virtual_hub_bgp_connection_resource_test.go new file mode 100644 index 000000000000..ef980572bc46 --- /dev/null +++ b/azurerm/internal/services/network/tests/virtual_hub_bgp_connection_resource_test.go @@ -0,0 +1,190 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMVirtualHubBgpConnection_basic(t *testing.T) { + if true { + t.Skip("Skipping due to API issue preventing deletion") + return + } + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_bgp_connection", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubBgpConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubBgpConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubBgpConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubBgpConnection_requiresImport(t *testing.T) { + if true { + t.Skip("Skipping due to API issue preventing deletion") + return + } + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_bgp_connection", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubBgpConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubBgpConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubBgpConnectionExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMVirtualHubBgpConnection_requiresImport), + }, + }) +} + +func testCheckAzureRMVirtualHubBgpConnectionExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VirtualHubBgpConnectionClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("virtualHubBgpConnection not found: %s", resourceName) + } + + id, err := parse.BgpConnectionID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.Name); err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Network VirtualHubBgpConnection %q does not exist", id.Name) + } + + return fmt.Errorf("bad: Get on Network.VirtualHubBgpConnectionClient: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMVirtualHubBgpConnectionDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VirtualHubBgpConnectionClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_virtual_hub_bgp_connection" { + continue + } + + id, err := parse.BgpConnectionID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Get on Network.VirtualHubBgpConnectionClient: %+v", err) + } + } + + return nil + } + + return nil +} + +func testAccAzureRMVirtualHubBgpConnection_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-VHub-%d" + location = "%s" +} + +resource "azurerm_virtual_hub" "test" { + name = "acctest-VHub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard" +} + +resource "azurerm_public_ip" "test" { + name = "acctest-PIP-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VNet-%d" + address_space = ["10.5.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-Subnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.5.1.0/24" +} + +resource "azurerm_virtual_hub_ip" "test" { + name = "acctest-VHub-IP-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + private_ip_address = "10.5.1.18" + private_ip_allocation_method = "Static" + public_ip_address_id = azurerm_public_ip.test.id + subnet_id = azurerm_subnet.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMVirtualHubBgpConnection_basic(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubBgpConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_bgp_connection" "test" { + name = "acctest-VHub-BgpConnection-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + peer_asn = 65514 + peer_ip = "169.254.21.5" + + depends_on = [azurerm_virtual_hub_ip.test] +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubBgpConnection_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMVirtualHubBgpConnection_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_bgp_connection" "import" { + name = azurerm_virtual_hub_bgp_connection.test.name + virtual_hub_id = azurerm_virtual_hub_bgp_connection.test.virtual_hub_id + peer_asn = azurerm_virtual_hub_bgp_connection.test.peer_asn + peer_ip = azurerm_virtual_hub_bgp_connection.test.peer_ip +} +`, config) +} diff --git a/azurerm/internal/services/network/tests/virtual_hub_connection_resource_test.go b/azurerm/internal/services/network/tests/virtual_hub_connection_resource_test.go index d2d9f8dfc3ed..27bb0ef7c2cd 100644 --- a/azurerm/internal/services/network/tests/virtual_hub_connection_resource_test.go +++ b/azurerm/internal/services/network/tests/virtual_hub_connection_resource_test.go @@ -1,6 +1,7 @@ package tests import ( + "context" "fmt" "testing" @@ -105,7 +106,7 @@ func TestAccAzureRMVirtualHubConnection_update(t *testing.T) { }) } -func TestAccAzureRMVirtualHubConnection_recreateWithSameConnectionName(t *testing.T) { +func TestAccAzureRMVirtualHubConnection_enableInternetSecurity(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_virtual_hub_connection", "test") resource.ParallelTest(t, resource.TestCase{ @@ -131,6 +132,144 @@ func TestAccAzureRMVirtualHubConnection_recreateWithSameConnectionName(t *testin }) } +func TestAccAzureRMVirtualHubConnection_recreateWithSameConnectionName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_connection", "test") + + vhubData := data + vhubData.ResourceName = "azurerm_virtual_hub.test" + resourceGroupName := fmt.Sprintf("acctestRG-vhub-%d", data.RandomInteger) + vhubName := fmt.Sprintf("acctest-VHUB-%d", data.RandomInteger) + vhubConnectionName := fmt.Sprintf("acctestbasicvhubconn-%d", data.RandomInteger) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualHubConnection_template(data), + Check: resource.ComposeTestCheckFunc( + vhubData.CheckWithClient(checkVirtualHubConnectionDoesNotExist(resourceGroupName, vhubName, vhubConnectionName)), + ), + }, + { + Config: testAccAzureRMVirtualHubConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubConnection_removeRoutingConfiguration(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubConnection_withRoutingConfiguration(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + { + Config: testAccAzureRMVirtualHubConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubConnection_removePropagatedRouteTable(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubConnection_withRoutingConfiguration(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + { + Config: testAccAzureRMVirtualHubConnection_withoutPropagatedRouteTable(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubConnection_removeVnetStaticRoute(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubConnection_withRoutingConfiguration(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + { + Config: testAccAzureRMVirtualHubConnection_withoutVnetStaticRoute(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubConnection_updateRoutingConfiguration(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubConnection_withRoutingConfiguration(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + { + Config: testAccAzureRMVirtualHubConnection_updateRoutingConfiguration(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func testCheckAzureRMVirtualHubConnectionExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).Network.HubVirtualNetworkConnectionClient @@ -141,7 +280,7 @@ func testCheckAzureRMVirtualHubConnectionExists(resourceName string) resource.Te return fmt.Errorf("Virtual Hub Connection not found: %s", resourceName) } - id, err := parse.VirtualHubConnectionID(rs.Primary.ID) + id, err := parse.HubVirtualNetworkConnectionID(rs.Primary.ID) if err != nil { return err } @@ -157,6 +296,19 @@ func testCheckAzureRMVirtualHubConnectionExists(resourceName string) resource.Te } } +func checkVirtualHubConnectionDoesNotExist(resourceGroupName, vhubName, vhubConnectionName string) acceptance.ClientCheckFunc { + return func(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) error { + if resp, err := clients.Network.HubVirtualNetworkConnectionClient.Get(ctx, resourceGroupName, vhubName, vhubConnectionName); err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil + } + return fmt.Errorf("Bad: Get on network.HubVirtualNetworkConnectionClient: %+v", err) + } + + return fmt.Errorf("Bad: Virtual Hub Connection %q (Resource Group %q) still exists", vhubConnectionName, resourceGroupName) + } +} + func testCheckAzureRMVirtualHubConnectionDestroy(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).Network.HubVirtualNetworkConnectionClient ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext @@ -166,7 +318,7 @@ func testCheckAzureRMVirtualHubConnectionDestroy(s *terraform.State) error { continue } - id, err := parse.VirtualHubConnectionID(rs.Primary.ID) + id, err := parse.HubVirtualNetworkConnectionID(rs.Primary.ID) if err != nil { return err } @@ -320,3 +472,99 @@ resource "azurerm_virtual_hub" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func testAccAzureRMVirtualHubConnection_withRoutingConfiguration(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_connection" "test" { + name = "acctest-vhubconn-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + remote_virtual_network_id = azurerm_virtual_network.test.id + + routing { + propagated_route_table { + labels = ["label1", "label2"] + } + + static_vnet_route { + name = "testvnetroute" + address_prefixes = ["10.0.3.0/24", "10.0.4.0/24"] + next_hop_ip_address = "10.0.3.5" + } + + static_vnet_route { + name = "testvnetroute2" + address_prefixes = ["10.0.5.0/24"] + next_hop_ip_address = "10.0.5.5" + } + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubConnection_withoutPropagatedRouteTable(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_connection" "test" { + name = "acctest-vhubconn-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + remote_virtual_network_id = azurerm_virtual_network.test.id + + routing { + static_vnet_route { + name = "testvnetroute" + address_prefixes = ["10.0.3.0/24"] + next_hop_ip_address = "10.0.3.5" + } + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubConnection_withoutVnetStaticRoute(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_connection" "test" { + name = "acctest-vhubconn-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + remote_virtual_network_id = azurerm_virtual_network.test.id + + routing { + propagated_route_table { + labels = ["default"] + } + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubConnection_updateRoutingConfiguration(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_connection" "test" { + name = "acctest-vhubconn-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + remote_virtual_network_id = azurerm_virtual_network.test.id + + routing { + propagated_route_table { + labels = ["label3"] + } + + static_vnet_route { + name = "testvnetroute6" + address_prefixes = ["10.0.6.0/24", "10.0.7.0/24"] + next_hop_ip_address = "10.0.6.5" + } + } +} +`, template, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/virtual_hub_ip_resource_test.go b/azurerm/internal/services/network/tests/virtual_hub_ip_resource_test.go new file mode 100644 index 000000000000..6ebf5fb63115 --- /dev/null +++ b/azurerm/internal/services/network/tests/virtual_hub_ip_resource_test.go @@ -0,0 +1,250 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMVirtualHubIP_basic(t *testing.T) { + if true { + t.Skip("Skipping due to API issue preventing deletion") + return + } + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_ip", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubIPDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubIP_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubIPExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubIP_requiresImport(t *testing.T) { + if true { + t.Skip("Skipping due to API issue preventing deletion") + return + } + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_ip", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubIPDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubIP_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubIPExists(data.ResourceName), + ), + }, + { + Config: testAccAzureRMVirtualHubIP_requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_virtual_hub_ip"), + }, + }, + }) +} + +func TestAccAzureRMVirtualHubIP_complete(t *testing.T) { + if true { + t.Skip("Skipping due to API issue preventing deletion") + return + } + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_ip", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubIPDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubIP_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubIPExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubIP_update(t *testing.T) { + if true { + t.Skip("Skipping due to API issue preventing deletion") + return + } + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_ip", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubIPDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubIP_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubIPExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualHubIP_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubIPExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testCheckAzureRMVirtualHubIPExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VirtualHubIPClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Virtual Hub IP not found: %s", resourceName) + } + + id, err := parse.VirtualHubIpConfigurationID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.IpConfigurationName); err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Virtual Hub IP %q (Resource Group %q) does not exist", id.IpConfigurationName, id.ResourceGroup) + } + return fmt.Errorf("Bad: Get on network.VirtualHubIPClient: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMVirtualHubIPDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VirtualHubIPClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_virtual_hub_ip" { + continue + } + + id, err := parse.VirtualHubIpConfigurationID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.IpConfigurationName); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Get on network.VirtualHubIPClient: %+v", err) + } + } + + return nil + } + + return nil +} + +func testAccAzureRMVirtualHubIP_basic(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubIP_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_ip" "test" { + name = "acctest-vhubipconfig-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + subnet_id = azurerm_subnet.test.id +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubIP_requiresImport(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubIP_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_ip" "import" { + name = azurerm_virtual_hub_ip.test.name + virtual_hub_id = azurerm_virtual_hub_ip.test.virtual_hub_id + subnet_id = azurerm_virtual_hub_ip.test.subnet_id +} +`, template) +} + +func testAccAzureRMVirtualHubIP_complete(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubIP_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_ip" "test" { + name = "acctest-vhubipconfig-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + private_ip_address = "10.5.1.18" + private_ip_allocation_method = "Static" + public_ip_address_id = azurerm_public_ip.test.id + subnet_id = azurerm_subnet.test.id +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubIP_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-vhub-%d" + location = "%s" +} + +resource "azurerm_virtual_hub" "test" { + name = "acctest-vhub-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + sku = "Standard" +} + +resource "azurerm_public_ip" "test" { + name = "acctest-pip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-vnet-%d" + address_space = ["10.5.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-subnet-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.5.1.0/24" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/virtual_hub_route_table_test.go b/azurerm/internal/services/network/tests/virtual_hub_route_table_test.go new file mode 100644 index 000000000000..d605e630b1a6 --- /dev/null +++ b/azurerm/internal/services/network/tests/virtual_hub_route_table_test.go @@ -0,0 +1,257 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMVirtualHubRouteTable_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_route_table", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubRouteTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubRouteTable_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubRouteTableExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubRouteTable_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_route_table", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubRouteTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubRouteTable_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubRouteTableExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMVirtualHubRouteTable_requiresImport), + }, + }) +} + +func TestAccAzureRMVirtualHubRouteTable_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_route_table", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubRouteTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubRouteTable_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubRouteTableExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubRouteTable_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_route_table", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubRouteTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubRouteTable_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubRouteTableExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualHubRouteTable_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubRouteTableExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualHubRouteTable_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubRouteTableExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testCheckAzureRMVirtualHubRouteTableExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.HubRouteTableClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("network HubRouteTable not found: %s", resourceName) + } + + id, err := parse.HubRouteTableID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Network HubRouteTable %q does not exist", id.Name) + } + + return fmt.Errorf("bad: Get on Network.HubRouteTableClient: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMVirtualHubRouteTableDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.HubRouteTableClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_network_hub_route_table" { + continue + } + + id, err := parse.HubRouteTableID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Get on Network.HubRouteTableClient: %+v", err) + } + } + + return nil + } + + return nil +} + +func testAccAzureRMVirtualHubRouteTable_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-VHUB-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VNET-%d" + address_space = ["10.5.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_network_security_group" "test" { + name = "acctest-NSG-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-SUBNET-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefixes = ["10.5.1.0/24"] +} + +resource "azurerm_subnet_network_security_group_association" "test" { + subnet_id = azurerm_subnet.test.id + network_security_group_id = azurerm_network_security_group.test.id +} + +resource "azurerm_virtual_wan" "test" { + name = "acctest-VWAN-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_virtual_hub" "test" { + name = "acctest-VHUB-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + virtual_wan_id = azurerm_virtual_wan.test.id + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_virtual_hub_connection" "test" { + name = "acctest-VHUBCONN-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + remote_virtual_network_id = azurerm_virtual_network.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMVirtualHubRouteTable_basic(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubRouteTable_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_route_table" "test" { + name = "acctest-RouteTable-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + labels = ["Label1"] +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubRouteTable_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMVirtualHubRouteTable_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_route_table" "import" { + name = azurerm_virtual_hub_route_table.test.name + virtual_hub_id = azurerm_virtual_hub_route_table.test.virtual_hub_id + labels = azurerm_virtual_hub_route_table.test.labels +} +`, config) +} + +func testAccAzureRMVirtualHubRouteTable_complete(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubRouteTable_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_route_table" "test" { + name = "acctest-RouteTable-%d" + virtual_hub_id = azurerm_virtual_hub.test.id + labels = ["labeL1", "AnotherLabel"] + + route { + name = "VHub-Route-Test" + destinations_type = "CIDR" + destinations = ["10.0.0.0/16"] + next_hop_type = "ResourceId" + next_hop = azurerm_virtual_hub_connection.test.id + } +} +`, template, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/virtual_hub_security_partner_provider_resource_test.go b/azurerm/internal/services/network/tests/virtual_hub_security_partner_provider_resource_test.go new file mode 100644 index 000000000000..6ce9b7f9089a --- /dev/null +++ b/azurerm/internal/services/network/tests/virtual_hub_security_partner_provider_resource_test.go @@ -0,0 +1,229 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMVirtualHubSecurityPartnerProvider_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_security_partner_provider", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubSecurityPartnerProviderDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubSecurityPartnerProvider_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubSecurityPartnerProviderExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubSecurityPartnerProvider_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_security_partner_provider", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubSecurityPartnerProviderDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubSecurityPartnerProvider_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubSecurityPartnerProviderExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMVirtualHubSecurityPartnerProvider_requiresImport), + }, + }) +} + +func TestAccAzureRMVirtualHubSecurityPartnerProvider_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_security_partner_provider", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubSecurityPartnerProviderDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubSecurityPartnerProvider_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubSecurityPartnerProviderExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualHubSecurityPartnerProvider_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_hub_security_partner_provider", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualHubSecurityPartnerProviderDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualHubSecurityPartnerProvider_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubSecurityPartnerProviderExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualHubSecurityPartnerProvider_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualHubSecurityPartnerProviderExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testCheckAzureRMVirtualHubSecurityPartnerProviderExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.SecurityPartnerProviderClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Security Partner Provider not found: %s", resourceName) + } + + id, err := parse.SecurityPartnerProviderID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Security Partner Provider %q does not exist", id.Name) + } + return fmt.Errorf("bad: Get on Network.VirtualHubSecurityPartnerProviderClient: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMVirtualHubSecurityPartnerProviderDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.SecurityPartnerProviderClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_virtual_hub_security_partner_provider" { + continue + } + + id, err := parse.SecurityPartnerProviderID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Get on Network.SecurityPartnerProviderClient: %+v", err) + } + } + + return nil + } + + return nil +} + +func testAccAzureRMVirtualHubSecurityPartnerProvider_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-vhub-%d" + location = "%s" +} + +resource "azurerm_virtual_wan" "test" { + name = "acctest-vwan-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_virtual_hub" "test" { + name = "acctest-VHUB-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + virtual_wan_id = azurerm_virtual_wan.test.id + address_prefix = "10.0.2.0/24" +} + +resource "azurerm_vpn_gateway" "test" { + name = "acctest-VPNG-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + virtual_hub_id = azurerm_virtual_hub.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMVirtualHubSecurityPartnerProvider_basic(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubSecurityPartnerProvider_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_security_partner_provider" "test" { + name = "acctest-SPP-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + security_provider_name = "ZScaler" + + depends_on = [azurerm_vpn_gateway.test] +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVirtualHubSecurityPartnerProvider_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMVirtualHubSecurityPartnerProvider_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_security_partner_provider" "import" { + name = azurerm_virtual_hub_security_partner_provider.test.name + resource_group_name = azurerm_virtual_hub_security_partner_provider.test.resource_group_name + location = azurerm_virtual_hub_security_partner_provider.test.location + security_provider_name = azurerm_virtual_hub_security_partner_provider.test.security_provider_name +} +`, config) +} + +func testAccAzureRMVirtualHubSecurityPartnerProvider_complete(data acceptance.TestData) string { + template := testAccAzureRMVirtualHubSecurityPartnerProvider_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_security_partner_provider" "test" { + name = "acctest-SPP-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + virtual_hub_id = azurerm_virtual_hub.test.id + security_provider_name = "ZScaler" + + tags = { + ENv = "Test" + } + + depends_on = [azurerm_vpn_gateway.test] +} +`, template, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/virtual_network_gateway_connection_resource_test.go b/azurerm/internal/services/network/tests/virtual_network_gateway_connection_resource_test.go index d55a4c1f859d..f0194f4f29b1 100644 --- a/azurerm/internal/services/network/tests/virtual_network_gateway_connection_resource_test.go +++ b/azurerm/internal/services/network/tests/virtual_network_gateway_connection_resource_test.go @@ -188,6 +188,32 @@ func TestAccAzureRMVirtualNetworkGatewayConnection_updatingSharedKey(t *testing. }) } +func TestAccAzureRMVirtualNetworkGatewayConnection_useLocalAzureIpAddressEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_network_gateway_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualNetworkGatewayConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualNetworkGatewayConnection_useLocalAzureIpAddressEnabled(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetworkGatewayConnection_useLocalAzureIpAddressEnabledUpdate(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func testCheckAzureRMVirtualNetworkGatewayConnectionExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VnetGatewayConnectionsClient @@ -227,7 +253,6 @@ func testCheckAzureRMVirtualNetworkGatewayConnectionDestroy(s *terraform.State) resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { return nil } @@ -787,3 +812,143 @@ resource "azurerm_virtual_network_gateway_connection" "test" { } `, data.RandomInteger, data.Locations.Primary) } + +func testAccAzureRMVirtualNetworkGatewayConnection_useLocalAzureIpAddressEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "GatewaySubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_public_ip" "test" { + name = "acctestip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_virtual_network_gateway" "test" { + name = "acctestgw-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + type = "Vpn" + vpn_type = "RouteBased" + sku = "VpnGw1" + private_ip_address_enabled = true + ip_configuration { + name = "vnetGatewayConfig" + public_ip_address_id = azurerm_public_ip.test.id + private_ip_address_allocation = "Dynamic" + subnet_id = azurerm_subnet.test.id + } +} + +resource "azurerm_local_network_gateway" "test" { + name = "acctestlgw-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + gateway_address = "168.62.225.23" + address_space = ["10.1.1.0/24"] +} + +resource "azurerm_virtual_network_gateway_connection" "test" { + name = "acctestgwc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + local_azure_ip_address_enabled = true + + type = "IPsec" + virtual_network_gateway_id = azurerm_virtual_network_gateway.test.id + local_network_gateway_id = azurerm_local_network_gateway.test.id + + shared_key = "4-v3ry-53cr37-1p53c-5h4r3d-k3y" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMVirtualNetworkGatewayConnection_useLocalAzureIpAddressEnabledUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "GatewaySubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_public_ip" "test" { + name = "acctestip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_virtual_network_gateway" "test" { + name = "acctestgw-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + type = "Vpn" + vpn_type = "RouteBased" + sku = "VpnGw1" + ip_configuration { + name = "vnetGatewayConfig" + public_ip_address_id = azurerm_public_ip.test.id + private_ip_address_allocation = "Dynamic" + subnet_id = azurerm_subnet.test.id + } +} + +resource "azurerm_local_network_gateway" "test" { + name = "acctestlgw-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + gateway_address = "168.62.225.23" + address_space = ["10.1.1.0/24"] +} + +resource "azurerm_virtual_network_gateway_connection" "test" { + name = "acctestgwc-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + local_azure_ip_address_enabled = false + + type = "IPsec" + virtual_network_gateway_id = azurerm_virtual_network_gateway.test.id + local_network_gateway_id = azurerm_local_network_gateway.test.id + dpd_timeout_seconds = 30 + + shared_key = "4-v3ry-53cr37-1p53c-5h4r3d-k3y" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/virtual_network_gateway_resource_test.go b/azurerm/internal/services/network/tests/virtual_network_gateway_resource_test.go index 882a2e672ff8..823c0ea34935 100644 --- a/azurerm/internal/services/network/tests/virtual_network_gateway_resource_test.go +++ b/azurerm/internal/services/network/tests/virtual_network_gateway_resource_test.go @@ -286,6 +286,32 @@ func TestAccAzureRMVirtualNetworkGateway_expressRoute(t *testing.T) { }) } +func TestAccAzureRMVirtualNetworkGateway_privateIpAddressEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_network_gateway", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualNetworkGatewayDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualNetworkGateway_privateIpAddressEnabled(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkGatewayExists("azurerm_virtual_network_gateway.test"), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetworkGateway_privateIpAddressEnabledUpdate(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkGatewayExists("azurerm_virtual_network_gateway.test"), + ), + }, + data.ImportStep(), + }, + }) +} + func testCheckAzureRMVirtualNetworkGatewayExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VnetGatewayClient @@ -325,7 +351,6 @@ func testCheckAzureRMVirtualNetworkGatewayDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { return nil } @@ -963,3 +988,113 @@ resource "azurerm_virtual_network_gateway" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, generation) } + +func testAccAzureRMVirtualNetworkGateway_privateIpAddressEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "GatewaySubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_public_ip" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_virtual_network_gateway" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + type = "Vpn" + vpn_type = "RouteBased" + sku = "VpnGw1" + private_ip_address_enabled = true + + custom_route { + address_prefixes = [ + "101.168.0.6/32" + ] + } + + ip_configuration { + name = "vnetGatewayConfig" + public_ip_address_id = azurerm_public_ip.test.id + private_ip_address_allocation = "Dynamic" + subnet_id = azurerm_subnet.test.id + } +} + `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMVirtualNetworkGateway_privateIpAddressEnabledUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] +} + +resource "azurerm_subnet" "test" { + name = "GatewaySubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_public_ip" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_virtual_network_gateway" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + type = "Vpn" + vpn_type = "RouteBased" + sku = "VpnGw1" + private_ip_address_enabled = false + + custom_route { + address_prefixes = [ + "101.168.0.6/32" + ] + } + + ip_configuration { + name = "vnetGatewayConfig" + public_ip_address_id = azurerm_public_ip.test.id + private_ip_address_allocation = "Dynamic" + subnet_id = azurerm_subnet.test.id + } +} + `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/network/tests/virtual_network_resource_test.go b/azurerm/internal/services/network/tests/virtual_network_resource_test.go index 8e27c365f4c7..9b5666a0f1ec 100644 --- a/azurerm/internal/services/network/tests/virtual_network_resource_test.go +++ b/azurerm/internal/services/network/tests/virtual_network_resource_test.go @@ -203,6 +203,79 @@ func TestAccAzureRMVirtualNetwork_deleteSubnet(t *testing.T) { }) } +func TestAccAzureRMVirtualNetwork_bgpCommunity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_network", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualNetworkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualNetwork_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetwork_bgpCommunity(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetwork_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVirtualNetwork_vmProtection(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_virtual_network", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVirtualNetworkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVirtualNetwork_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetwork_vmProtection(data, true), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetwork_vmProtection(data, false), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVirtualNetwork_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVirtualNetworkExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + func testCheckAzureRMVirtualNetworkExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VnetClient @@ -277,7 +350,6 @@ func testCheckAzureRMVirtualNetworkDestroy(s *terraform.State) error { resourceGroup := rs.Primary.Attributes["resource_group_name"] resp, err := client.Get(ctx, resourceGroup, name, "") - if err != nil { return nil } @@ -480,3 +552,57 @@ resource "azurerm_virtual_network" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger) } + +func testAccAzureRMVirtualNetwork_bgpCommunity(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + subnet { + name = "subnet1" + address_prefix = "10.0.1.0/24" + } + + bgp_community = "12076:20000" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMVirtualNetwork_vmProtection(data acceptance.TestData, enabled bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestvirtnet%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + subnet { + name = "subnet1" + address_prefix = "10.0.1.0/24" + } + + vm_protection_enabled = %t +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, enabled) +} diff --git a/azurerm/internal/services/network/tests/virtual_wan_data_source_test.go b/azurerm/internal/services/network/tests/virtual_wan_data_source_test.go new file mode 100644 index 000000000000..855888746a97 --- /dev/null +++ b/azurerm/internal/services/network/tests/virtual_wan_data_source_test.go @@ -0,0 +1,57 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceVirtualWan_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_virtual_wan", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceVirtualWan_basic(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(data.ResourceName, "name"), + resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), + ), + }, + }, + }) +} + +func testAccDataSourceVirtualWan_basic(data acceptance.TestData) string { + template := testAccDataSourceVirtualWan_template(data) + return fmt.Sprintf(` +%s +resource "azurerm_virtual_wan" "test" { + name = "test" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +data "azurerm_virtual_wan" "test" { + name = azurerm_virtual_wan.test.name + resource_group_name = azurerm_virtual_wan.test.resource_group_name +} + `, template) +} + +func testAccDataSourceVirtualWan_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/network/tests/vpn_gateway_connection_resource_test.go b/azurerm/internal/services/network/tests/vpn_gateway_connection_resource_test.go new file mode 100644 index 000000000000..261d8a0fada3 --- /dev/null +++ b/azurerm/internal/services/network/tests/vpn_gateway_connection_resource_test.go @@ -0,0 +1,396 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMVpnGatewayConnection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_vpn_gateway_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVpnGatewayConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVpnGatewayConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVpnGatewayConnection_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_vpn_gateway_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVpnGatewayConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVpnGatewayConnection_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVpnGatewayConnection_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_vpn_gateway_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVpnGatewayConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVpnGatewayConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVpnGatewayConnection_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVpnGatewayConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVpnGatewayConnection_customRouteTable(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_vpn_gateway_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVpnGatewayConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVpnGatewayConnection_customRouteTable(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMVpnGatewayConnection_customRouteTableUpdate(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVpnGatewayConnection_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_vpn_gateway_connection", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVpnGatewayConnectionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMVpnGatewayConnection_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVpnGatewayConnectionExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMVpnGatewayConnection_requiresImport), + }, + }) +} + +func testCheckAzureRMVpnGatewayConnectionExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VpnConnectionsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Vpn Gateway Connection not found: %s", resourceName) + } + + id, err := parse.VpnConnectionID(rs.Primary.ID) + if err != nil { + return err + } + + if resp, err := client.Get(ctx, id.ResourceGroup, id.VpnGatewayName, id.Name); err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Vpn Gateway Connection %q (Resource Group %q / VPN Gateway %q) does not exist", id.Name, id.ResourceGroup, id.VpnGatewayName) + } + return fmt.Errorf("Getting on Network.VpnConnetions: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMVpnGatewayConnectionDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Network.VpnConnectionsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_vpn_gateway_connection" { + continue + } + + id, err := parse.VpnConnectionID(rs.Primary.ID) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.VpnGatewayName, id.Name) + if err == nil { + return fmt.Errorf("Network.VpnConnetions still exists") + } + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Getting on Network.VpnConnetions: %+v", err) + } + return nil + } + + return nil +} + +func testAccAzureRMVpnGatewayConnection_basic(data acceptance.TestData) string { + template := testAccAzureRMVpnGatewayConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_vpn_gateway_connection" "test" { + name = "acctest-VpnGwConn-%[2]d" + vpn_gateway_id = azurerm_vpn_gateway.test.id + remote_vpn_site_id = azurerm_vpn_site.test.id + vpn_link { + name = "link1" + vpn_site_link_id = azurerm_vpn_site.test.link[0].id + } + vpn_link { + name = "link2" + vpn_site_link_id = azurerm_vpn_site.test.link[1].id + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVpnGatewayConnection_complete(data acceptance.TestData) string { + template := testAccAzureRMVpnGatewayConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_vpn_gateway_connection" "test" { + name = "acctest-VpnGwConn-%[2]d" + vpn_gateway_id = azurerm_vpn_gateway.test.id + remote_vpn_site_id = azurerm_vpn_site.test.id + vpn_link { + name = "link1" + vpn_site_link_id = azurerm_vpn_site.test.link[0].id + } + vpn_link { + name = "link2" + vpn_site_link_id = azurerm_vpn_site.test.link[1].id + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVpnGatewayConnection_customRouteTable(data acceptance.TestData) string { + template := testAccAzureRMVpnGatewayConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_route_table" "test" { + name = "acctest-RouteTable-%[2]d" + virtual_hub_id = azurerm_virtual_hub.test.id +} + +resource "azurerm_vpn_gateway_connection" "test" { + name = "acctest-VpnGwConn-%[2]d" + vpn_gateway_id = azurerm_vpn_gateway.test.id + remote_vpn_site_id = azurerm_vpn_site.test.id + routing { + associated_route_table = azurerm_virtual_hub_route_table.test.id + propagated_route_tables = [azurerm_virtual_hub_route_table.test.id] + } + vpn_link { + name = "link1" + vpn_site_link_id = azurerm_vpn_site.test.link[0].id + ipsec_policy { + sa_lifetime_sec = 300 + sa_data_size_kb = 1024 + encryption_algorithm = "AES256" + integrity_algorithm = "SHA256" + ike_encryption_algorithm = "AES128" + ike_integrity_algorithm = "SHA256" + dh_group = "DHGroup14" + pfs_group = "PFS14" + } + bandwidth_mbps = 30 + protocol = "IKEv2" + ratelimit_enabled = true + route_weight = 2 + shared_key = "secret" + local_azure_ip_address_enabled = true + policy_based_traffic_selector_enabled = true + } + + vpn_link { + name = "link3" + vpn_site_link_id = azurerm_vpn_site.test.link[1].id + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVpnGatewayConnection_customRouteTableUpdate(data acceptance.TestData) string { + template := testAccAzureRMVpnGatewayConnection_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_hub_route_table" "test" { + name = "acctest-RouteTable-%[2]d" + virtual_hub_id = azurerm_virtual_hub.test.id +} + +resource "azurerm_virtual_hub_route_table" "test2" { + name = "acctest-RouteTable-%[2]d-2" + virtual_hub_id = azurerm_virtual_hub.test.id +} + +resource "azurerm_vpn_gateway_connection" "test" { + name = "acctest-VpnGwConn-%[2]d" + vpn_gateway_id = azurerm_vpn_gateway.test.id + remote_vpn_site_id = azurerm_vpn_site.test.id + routing { + associated_route_table = azurerm_virtual_hub_route_table.test2.id + propagated_route_tables = [azurerm_virtual_hub_route_table.test2.id] + } + vpn_link { + name = "link1" + vpn_site_link_id = azurerm_vpn_site.test.link[0].id + ipsec_policy { + sa_lifetime_sec = 300 + sa_data_size_kb = 1024 + encryption_algorithm = "AES256" + integrity_algorithm = "SHA256" + ike_encryption_algorithm = "AES128" + ike_integrity_algorithm = "SHA256" + dh_group = "DHGroup14" + pfs_group = "PFS14" + } + bandwidth_mbps = 30 + protocol = "IKEv2" + ratelimit_enabled = true + route_weight = 2 + shared_key = "secret" + local_azure_ip_address_enabled = true + policy_based_traffic_selector_enabled = true + } + + vpn_link { + name = "link3" + vpn_site_link_id = azurerm_vpn_site.test.link[1].id + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMVpnGatewayConnection_requiresImport(data acceptance.TestData) string { + template := testAccAzureRMVpnGatewayConnection_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_vpn_gateway_connection" "import" { + name = azurerm_vpn_gateway_connection.test.name + vpn_gateway_id = azurerm_vpn_gateway_connection.test.vpn_gateway_id + remote_vpn_site_id = azurerm_vpn_gateway_connection.test.remote_vpn_site_id + dynamic "vpn_link" { + for_each = azurerm_vpn_gateway_connection.test.vpn_link + iterator = v + content { + name = v.value["name"] + vpn_site_link_id = v.value["vpn_site_link_id"] + } + } +} +`, template) +} + +func testAccAzureRMVpnGatewayConnection_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} +resource "azurerm_resource_group" "test" { + name = "acctestRG-vpn-%[1]d" + location = "%[2]s" +} + + +resource "azurerm_virtual_wan" "test" { + name = "acctest-vwan-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} + +resource "azurerm_virtual_hub" "test" { + name = "acctest-vhub-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + virtual_wan_id = azurerm_virtual_wan.test.id + address_prefix = "10.0.0.0/24" +} + +resource "azurerm_vpn_gateway" "test" { + name = "acctest-vpngw-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + virtual_hub_id = azurerm_virtual_hub.test.id +} + +resource "azurerm_vpn_site" "test" { + name = "acctest-vpnsite-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + virtual_wan_id = azurerm_virtual_wan.test.id + link { + name = "link1" + ip_address = "10.0.0.1" + } + link { + name = "link2" + ip_address = "10.0.0.2" + } +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/network/tests/vpn_gateway_resource_test.go b/azurerm/internal/services/network/tests/vpn_gateway_resource_test.go index 85627701ac09..4fec6452bd24 100644 --- a/azurerm/internal/services/network/tests/vpn_gateway_resource_test.go +++ b/azurerm/internal/services/network/tests/vpn_gateway_resource_test.go @@ -224,6 +224,14 @@ resource "azurerm_vpn_gateway" "test" { bgp_settings { asn = 65515 peer_weight = 0 + + instance_0_bgp_peering_address { + custom_ips = ["169.254.21.5"] + } + + instance_1_bgp_peering_address { + custom_ips = ["169.254.21.10"] + } } } `, template, data.RandomInteger) diff --git a/azurerm/internal/services/network/tests/vpn_server_configuration_resource_test.go b/azurerm/internal/services/network/tests/vpn_server_configuration_resource_test.go index 5a86c9a1dd48..4053e93ef0b4 100644 --- a/azurerm/internal/services/network/tests/vpn_server_configuration_resource_test.go +++ b/azurerm/internal/services/network/tests/vpn_server_configuration_resource_test.go @@ -88,7 +88,54 @@ func TestAccAzureRMVPNServerConfiguration_radius(t *testing.T) { CheckDestroy: testCheckAzureRMVPNServerConfigurationDestroy, Steps: []resource.TestStep{ { - Config: testAccAzureRMAzureRMVPNServerConfiguration_radius(data), + Config: testAccAzureRMAzureRMVPNServerConfiguration_singleRadiusDeprecated(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMVPNServerConfiguration_multipleRadius(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_vpn_server_configuration", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMVPNServerConfigurationDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMAzureRMVPNServerConfiguration_singleRadiusDeprecated(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMAzureRMVPNServerConfiguration_singleRadius(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMAzureRMVPNServerConfiguration_multipleRadius(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMAzureRMVPNServerConfiguration_singleRadius(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMAzureRMVPNServerConfiguration_azureAD(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), ), @@ -121,7 +168,7 @@ func TestAccAzureRMVPNServerConfiguration_multipleAuth(t *testing.T) { }, data.ImportStep(), { - Config: testAccAzureRMAzureRMVPNServerConfiguration_radius(data), + Config: testAccAzureRMAzureRMVPNServerConfiguration_singleRadius(data), Check: resource.ComposeTestCheckFunc( testCheckAzureRMVPNServerConfigurationExists(data.ResourceName), ), @@ -306,7 +353,7 @@ EOF `, template, data.RandomInteger) } -func testAccAzureRMAzureRMVPNServerConfiguration_radius(data acceptance.TestData) string { +func testAccAzureRMAzureRMVPNServerConfiguration_singleRadiusDeprecated(data acceptance.TestData) string { template := testAccAzureRMAzureRMVPNServerConfiguration_template(data) return fmt.Sprintf(` %s @@ -351,6 +398,110 @@ EOF `, template, data.RandomInteger) } +func testAccAzureRMAzureRMVPNServerConfiguration_singleRadius(data acceptance.TestData) string { + template := testAccAzureRMAzureRMVPNServerConfiguration_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_vpn_server_configuration" "test" { + name = "acctestVPNSC-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + vpn_authentication_types = ["Radius"] + + radius { + + server { + address = "10.105.1.1" + secret = "vindicators-the-return-of-worldender" + score = 15 + } + + server_root_certificate { + name = "DigiCert-Federated-ID-Root-CA" + public_cert_data = <%&:?/+]+$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q must not contain characters from %q", k, "<>&:?/+%")) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/network/validate/hub_route_table_name_test.go b/azurerm/internal/services/network/validate/hub_route_table_name_test.go new file mode 100644 index 000000000000..35805f43c6e0 --- /dev/null +++ b/azurerm/internal/services/network/validate/hub_route_table_name_test.go @@ -0,0 +1,38 @@ +package validate + +import ( + "testing" +) + +func TestHubRouteTableName(t *testing.T) { + cases := []struct { + Input string + ExpectError bool + }{ + { + Input: "", + ExpectError: true, + }, + { + Input: "hello", + ExpectError: false, + }, + { + Input: "hello-world", + ExpectError: false, + }, + { + Input: "test<", + ExpectError: true, + }, + } + + for _, tc := range cases { + _, errors := HubRouteTableName(tc.Input, "name") + + hasError := len(errors) > 0 + if tc.ExpectError && !hasError { + t.Fatalf("Expected the Virtual Hub Route Table Name to trigger a validation error for '%s'", tc.Input) + } + } +} diff --git a/azurerm/internal/services/network/validate/hub_virtual_network_connection_id.go b/azurerm/internal/services/network/validate/hub_virtual_network_connection_id.go new file mode 100644 index 000000000000..a44038a11dc2 --- /dev/null +++ b/azurerm/internal/services/network/validate/hub_virtual_network_connection_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func HubVirtualNetworkConnectionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.HubVirtualNetworkConnectionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/hub_virtual_network_connection_id_test.go b/azurerm/internal/services/network/validate/hub_virtual_network_connection_id_test.go new file mode 100644 index 000000000000..67fa6a5119b9 --- /dev/null +++ b/azurerm/internal/services/network/validate/hub_virtual_network_connection_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestHubVirtualNetworkConnectionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubVirtualNetworkConnections/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/hubVirtualNetworkConnections/hubConnection1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1/HUBVIRTUALNETWORKCONNECTIONS/HUBCONNECTION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := HubVirtualNetworkConnectionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/ip_group_id.go b/azurerm/internal/services/network/validate/ip_group_id.go new file mode 100644 index 000000000000..ea86855853a9 --- /dev/null +++ b/azurerm/internal/services/network/validate/ip_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func IpGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.IpGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/ip_group_id_test.go b/azurerm/internal/services/network/validate/ip_group_id_test.go new file mode 100644 index 000000000000..35931f954caa --- /dev/null +++ b/azurerm/internal/services/network/validate/ip_group_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestIpGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/ipGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/ipGroups/group1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/IPGROUPS/GROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := IpGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/load_balancer.go b/azurerm/internal/services/network/validate/load_balancer.go deleted file mode 100644 index 715f8d5885fe..000000000000 --- a/azurerm/internal/services/network/validate/load_balancer.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func LoadBalancerID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.LoadBalancerID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/nat_gateway.go b/azurerm/internal/services/network/validate/nat_gateway.go deleted file mode 100644 index 7ec9cdd42e6f..000000000000 --- a/azurerm/internal/services/network/validate/nat_gateway.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func NatGatewayID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.NatGatewayID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/nat_gateway_id.go b/azurerm/internal/services/network/validate/nat_gateway_id.go new file mode 100644 index 000000000000..5d5ef86d5011 --- /dev/null +++ b/azurerm/internal/services/network/validate/nat_gateway_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func NatGatewayID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NatGatewayID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/nat_gateway_id_test.go b/azurerm/internal/services/network/validate/nat_gateway_id_test.go new file mode 100644 index 000000000000..f573601848a0 --- /dev/null +++ b/azurerm/internal/services/network/validate/nat_gateway_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNatGatewayID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/natGateways/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/natGateways/gateway1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NATGATEWAYS/GATEWAY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NatGatewayID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_endpoint_address.go b/azurerm/internal/services/network/validate/network_connection_monitor_endpoint_address.go new file mode 100644 index 000000000000..b501be273495 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_endpoint_address.go @@ -0,0 +1,28 @@ +package validate + +import ( + "fmt" + "net/url" +) + +func NetworkConnectionMonitorEndpointAddress(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if len(value) == 0 { + errors = append(errors, fmt.Errorf("%q cannot be an empty string: %q", k, value)) + return warnings, errors + } + + url, err := url.Parse(value) + if err != nil { + errors = append(errors, fmt.Errorf("parsing %q: %q", k, value)) + return warnings, errors + } + + if url.Scheme != "" || url.RawQuery != "" { + errors = append(errors, fmt.Errorf("%q cannot contain scheme and query parameter: %q", k, value)) + return warnings, errors + } + + return warnings, errors +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_endpoint_address_test.go b/azurerm/internal/services/network/validate/network_connection_monitor_endpoint_address_test.go new file mode 100644 index 000000000000..d3118464a8f4 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_endpoint_address_test.go @@ -0,0 +1,45 @@ +package validate + +import "testing" + +func TestNetworkConnectionMonitorEndpointAddressWithDomainName(t *testing.T) { + cases := []struct { + Value string + Errors int + }{ + { + Value: "", + Errors: 1, + }, + { + Value: "a-b", + Errors: 0, + }, + { + Value: "terraform.io", + Errors: 0, + }, + { + Value: "www.google.com", + Errors: 0, + }, + { + Value: "http://www.google.com", + Errors: 1, + }, + { + Value: "www.google.com/a/b?a=1", + Errors: 1, + }, + } + + for _, tc := range cases { + t.Run(tc.Value, func(t *testing.T) { + _, errors := NetworkConnectionMonitorEndpointAddress(tc.Value, "address") + + if len(errors) != tc.Errors { + t.Fatalf("Expected address to return %d error(s) not %d", tc.Errors, len(errors)) + } + }) + } +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_http_path.go b/azurerm/internal/services/network/validate/network_connection_monitor_http_path.go new file mode 100644 index 000000000000..235dc7642c3e --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_http_path.go @@ -0,0 +1,28 @@ +package validate + +import ( + "fmt" + "net/url" +) + +func NetworkConnectionMonitorHttpPath(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if len(value) == 0 { + errors = append(errors, fmt.Errorf("%q cannot be an empty string: %q", k, value)) + return warnings, errors + } + + path, err := url.ParseRequestURI(value) + if err != nil { + errors = append(errors, fmt.Errorf("parsing %q: %q", k, value)) + return warnings, errors + } + + if path.IsAbs() { + errors = append(errors, fmt.Errorf("%q only accepts the absolute path: %q", k, value)) + return warnings, errors + } + + return warnings, errors +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_http_path_test.go b/azurerm/internal/services/network/validate/network_connection_monitor_http_path_test.go new file mode 100644 index 000000000000..8053963dc9c3 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_http_path_test.go @@ -0,0 +1,45 @@ +package validate + +import "testing" + +func TestNetworkConnectionMonitorHttpPath(t *testing.T) { + cases := []struct { + Value string + Errors int + }{ + { + Value: "", + Errors: 1, + }, + { + Value: "a/b", + Errors: 1, + }, + { + Value: "/ab/b1/", + Errors: 0, + }, + { + Value: "/a/b", + Errors: 0, + }, + { + Value: "http://www.terraform.io", + Errors: 1, + }, + { + Value: "/a/b/", + Errors: 0, + }, + } + + for _, tc := range cases { + t.Run(tc.Value, func(t *testing.T) { + _, errors := NetworkConnectionMonitorHttpPath(tc.Value, "path") + + if len(errors) != tc.Errors { + t.Fatalf("Expected Path to return %d error(s) not %d", tc.Errors, len(errors)) + } + }) + } +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_id.go b/azurerm/internal/services/network/validate/network_connection_monitor_id.go new file mode 100644 index 000000000000..deacc505f8b8 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_id.go @@ -0,0 +1,22 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func NetworkConnectionMonitorID(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if _, err := parse.ConnectionMonitorID(v); err != nil { + errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) + return + } + + return warnings, errors +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_valid_status_code_ranges.go b/azurerm/internal/services/network/validate/network_connection_monitor_valid_status_code_ranges.go new file mode 100644 index 000000000000..978a6c05f9dc --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_valid_status_code_ranges.go @@ -0,0 +1,59 @@ +package validate + +import ( + "fmt" + "regexp" + "strconv" + "strings" +) + +func NetworkConnectionMonitorValidStatusCodeRanges(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if len(value) == 0 { + errors = append(errors, fmt.Errorf("%q cannot be an empty string: %q", k, value)) + return warnings, errors + } + + if len(value) != 7 && len(value) != 3 { + errors = append(errors, fmt.Errorf("The len of %q should be 3 or 7: %q", k, value)) + return warnings, errors + } + + // Here the format of the expected code range is `301-304` + if len(value) == 7 { + if !regexp.MustCompile(`^([1-5][0-9][0-9]-([1-5][0-9][0-9]|600))$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q can contain hyphen: %q", k, value)) + return warnings, errors + } else { + vArray := strings.Split(value, "-") + + startNumber, err := strconv.Atoi(vArray[0]) + if err != nil { + errors = append(errors, fmt.Errorf("expected %s on the left of - to be an integer, got %v: %v", k, value, err)) + return warnings, errors + } + + endNumber, err := strconv.Atoi(vArray[1]) + if err != nil { + errors = append(errors, fmt.Errorf("expected %s on the right of - to be an integer, got %v: %v", k, value, err)) + return warnings, errors + } + + if startNumber >= endNumber { + errors = append(errors, fmt.Errorf("the start number of %q should less than the end number: %q", k, value)) + return warnings, errors + } + } + } + + // Here the format of the expected code ranges are `2xx` and `418` + if len(value) == 3 { + if !regexp.MustCompile(`^([1-5][0-9x][0-9x]|600)$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q can contain number with x or pure number: %q", k, value)) + return warnings, errors + } + } + + return warnings, errors +} diff --git a/azurerm/internal/services/network/validate/network_connection_monitor_valid_status_code_ranges_test.go b/azurerm/internal/services/network/validate/network_connection_monitor_valid_status_code_ranges_test.go new file mode 100644 index 000000000000..920009458c4b --- /dev/null +++ b/azurerm/internal/services/network/validate/network_connection_monitor_valid_status_code_ranges_test.go @@ -0,0 +1,77 @@ +package validate + +import "testing" + +func TestNetworkConnectionMonitorValidStatusCodeRanges(t *testing.T) { + cases := []struct { + Value string + Errors int + }{ + { + Value: "", + Errors: 1, + }, + { + Value: "100", + Errors: 0, + }, + { + Value: "599", + Errors: 0, + }, + { + Value: "600", + Errors: 0, + }, + { + Value: "1xx", + Errors: 0, + }, + { + Value: "10x", + Errors: 0, + }, + { + Value: "1x2", + Errors: 0, + }, + { + Value: "259-379", + Errors: 0, + }, + { + Value: "489-379", + Errors: 1, + }, + { + Value: "30x-4xx", + Errors: 1, + }, + { + Value: "7xx", + Errors: 1, + }, + { + Value: "99", + Errors: 1, + }, + { + Value: "888", + Errors: 1, + }, + { + Value: "1111", + Errors: 1, + }, + } + + for _, tc := range cases { + t.Run(tc.Value, func(t *testing.T) { + _, errors := NetworkConnectionMonitorValidStatusCodeRanges(tc.Value, "valid_status_code_ranges") + + if len(errors) != tc.Errors { + t.Fatalf("Expected valid_status_code_ranges to return %d error(s) not %d", tc.Errors, len(errors)) + } + }) + } +} diff --git a/azurerm/internal/services/network/validate/network_interface.go b/azurerm/internal/services/network/validate/network_interface.go deleted file mode 100644 index e25f80bb69a5..000000000000 --- a/azurerm/internal/services/network/validate/network_interface.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func NetworkInterfaceID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.NetworkInterfaceID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/network_interface_id.go b/azurerm/internal/services/network/validate/network_interface_id.go new file mode 100644 index 000000000000..1f4d37eacac3 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_interface_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func NetworkInterfaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NetworkInterfaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/network_interface_id_test.go b/azurerm/internal/services/network/validate/network_interface_id_test.go new file mode 100644 index 000000000000..cc3b6b42d3a1 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_interface_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNetworkInterfaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkInterfaces/networkInterface1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKINTERFACES/NETWORKINTERFACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NetworkInterfaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/network_watcher_id.go b/azurerm/internal/services/network/validate/network_watcher_id.go new file mode 100644 index 000000000000..e538e8a6f9a8 --- /dev/null +++ b/azurerm/internal/services/network/validate/network_watcher_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func NetworkWatcherID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NetworkWatcherID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/network_watcher_id_test.go b/azurerm/internal/services/network/validate/network_watcher_id_test.go new file mode 100644 index 000000000000..d98aa1a98fdf --- /dev/null +++ b/azurerm/internal/services/network/validate/network_watcher_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNetworkWatcherID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKWATCHERS/WATCHER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NetworkWatcherID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/packet_capture_id.go b/azurerm/internal/services/network/validate/packet_capture_id.go new file mode 100644 index 000000000000..39d312a6575f --- /dev/null +++ b/azurerm/internal/services/network/validate/packet_capture_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func PacketCaptureID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PacketCaptureID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/packet_capture_id_test.go b/azurerm/internal/services/network/validate/packet_capture_id_test.go new file mode 100644 index 000000000000..2f670bd2f8c6 --- /dev/null +++ b/azurerm/internal/services/network/validate/packet_capture_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPacketCaptureID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NetworkWatcherName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for NetworkWatcherName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/networkWatchers/watcher1/packetCaptures/capture1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/NETWORKWATCHERS/WATCHER1/PACKETCAPTURES/CAPTURE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PacketCaptureID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/private_dns_zone_config_id.go b/azurerm/internal/services/network/validate/private_dns_zone_config_id.go new file mode 100644 index 000000000000..1268992331b1 --- /dev/null +++ b/azurerm/internal/services/network/validate/private_dns_zone_config_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func PrivateDnsZoneConfigID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PrivateDnsZoneConfigID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/private_dns_zone_config_id_test.go b/azurerm/internal/services/network/validate/private_dns_zone_config_id_test.go new file mode 100644 index 000000000000..080a01db57e1 --- /dev/null +++ b/azurerm/internal/services/network/validate/private_dns_zone_config_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPrivateDnsZoneConfigID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/", + Valid: false, + }, + + { + // missing PrivateDnsZoneGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneGroupName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/privateDnsZoneConfigs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1/privateDnsZoneConfigs/privateDnsZoneConfig1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEENDPOINTS/ENDPOINT1/PRIVATEDNSZONEGROUPS/PRIVATEDNSZONEGROUP1/PRIVATEDNSZONECONFIGS/PRIVATEDNSZONECONFIG1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PrivateDnsZoneConfigID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/private_dns_zone_group_id.go b/azurerm/internal/services/network/validate/private_dns_zone_group_id.go new file mode 100644 index 000000000000..0da6a98bf580 --- /dev/null +++ b/azurerm/internal/services/network/validate/private_dns_zone_group_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func PrivateDnsZoneGroupID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PrivateDnsZoneGroupID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/private_dns_zone_group_id_test.go b/azurerm/internal/services/network/validate/private_dns_zone_group_id_test.go new file mode 100644 index 000000000000..b8ba6dbe6a85 --- /dev/null +++ b/azurerm/internal/services/network/validate/private_dns_zone_group_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPrivateDnsZoneGroupID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateEndpointName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1/privateDnsZoneGroups/privateDnsZoneGroup1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEENDPOINTS/ENDPOINT1/PRIVATEDNSZONEGROUPS/PRIVATEDNSZONEGROUP1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PrivateDnsZoneGroupID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/private_endpoint_id.go b/azurerm/internal/services/network/validate/private_endpoint_id.go new file mode 100644 index 000000000000..05f778662971 --- /dev/null +++ b/azurerm/internal/services/network/validate/private_endpoint_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func PrivateEndpointID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PrivateEndpointID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/private_endpoint_id_test.go b/azurerm/internal/services/network/validate/private_endpoint_id_test.go new file mode 100644 index 000000000000..76cd97afa48e --- /dev/null +++ b/azurerm/internal/services/network/validate/private_endpoint_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPrivateEndpointID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateEndpoints/endpoint1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEENDPOINTS/ENDPOINT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PrivateEndpointID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/public_ip_address.go b/azurerm/internal/services/network/validate/public_ip_address.go deleted file mode 100644 index 6c195be9784f..000000000000 --- a/azurerm/internal/services/network/validate/public_ip_address.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func PublicIPAddressID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.PublicIPAddressID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/public_ip_address_id.go b/azurerm/internal/services/network/validate/public_ip_address_id.go new file mode 100644 index 000000000000..6c0dc8cfcca0 --- /dev/null +++ b/azurerm/internal/services/network/validate/public_ip_address_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func PublicIpAddressID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PublicIpAddressID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/public_ip_address_id_test.go b/azurerm/internal/services/network/validate/public_ip_address_id_test.go new file mode 100644 index 000000000000..eb490aa4a5f4 --- /dev/null +++ b/azurerm/internal/services/network/validate/public_ip_address_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPublicIpAddressID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/publicIPAddresses/publicIpAddress1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PUBLICIPADDRESSES/PUBLICIPADDRESS1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PublicIpAddressID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/route_filter_id.go b/azurerm/internal/services/network/validate/route_filter_id.go new file mode 100644 index 000000000000..561a9b79475e --- /dev/null +++ b/azurerm/internal/services/network/validate/route_filter_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func RouteFilterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.RouteFilterID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/route_filter_id_test.go b/azurerm/internal/services/network/validate/route_filter_id_test.go new file mode 100644 index 000000000000..2522d444defc --- /dev/null +++ b/azurerm/internal/services/network/validate/route_filter_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestRouteFilterID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/routeFilters/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/routeFilters/filter1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/ROUTEFILTERS/FILTER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := RouteFilterID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/security_partner_provider_id.go b/azurerm/internal/services/network/validate/security_partner_provider_id.go new file mode 100644 index 000000000000..c343f2b7cd45 --- /dev/null +++ b/azurerm/internal/services/network/validate/security_partner_provider_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func SecurityPartnerProviderID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SecurityPartnerProviderID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/security_partner_provider_id_test.go b/azurerm/internal/services/network/validate/security_partner_provider_id_test.go new file mode 100644 index 000000000000..94bd1c6bab70 --- /dev/null +++ b/azurerm/internal/services/network/validate/security_partner_provider_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSecurityPartnerProviderID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/securityPartnerProviders/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/securityPartnerProviders/partnerProvider1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/SECURITYPARTNERPROVIDERS/PARTNERPROVIDER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SecurityPartnerProviderID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/subnet.go b/azurerm/internal/services/network/validate/subnet.go deleted file mode 100644 index 625f73014824..000000000000 --- a/azurerm/internal/services/network/validate/subnet.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -// SubnetID validates that the specified ID is a valid Subnet ID -func SubnetID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.SubnetID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/subnet_id.go b/azurerm/internal/services/network/validate/subnet_id.go new file mode 100644 index 000000000000..672da2896c72 --- /dev/null +++ b/azurerm/internal/services/network/validate/subnet_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func SubnetID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SubnetID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/subnet_id_test.go b/azurerm/internal/services/network/validate/subnet_id_test.go new file mode 100644 index 000000000000..f7f377a7d95a --- /dev/null +++ b/azurerm/internal/services/network/validate/subnet_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSubnetID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VirtualNetworkName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for VirtualNetworkName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALNETWORKS/NETWORK1/SUBNETS/SUBNET1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SubnetID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_id.go b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_id.go new file mode 100644 index 000000000000..5c272f8f432e --- /dev/null +++ b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func SubnetServiceEndpointStoragePolicyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SubnetServiceEndpointStoragePolicyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_id_test.go b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_id_test.go new file mode 100644 index 000000000000..b5b74dcc3b41 --- /dev/null +++ b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSubnetServiceEndpointStoragePolicyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServiceEndpointPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for ServiceEndpointPolicyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/serviceEndpointPolicies/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/serviceEndpointPolicies/policy1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/SERVICEENDPOINTPOLICIES/POLICY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SubnetServiceEndpointStoragePolicyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_name.go b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_name.go new file mode 100644 index 000000000000..892ba048ed9b --- /dev/null +++ b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func SubnetServiceEndpointStoragePolicyName(i interface{}, k string) (warnings []string, errors []error) { + return validation.StringMatch(regexp.MustCompile(`^[^\W_]([\w.\-]{0,78}[\w])?$`), "The name can be up to 80 characters long. It must begin with a alphnum character, and it must end with a alphnum character or with '_'. The name may contain alphnum characters or '.', '-', '_'.")(i, k) +} + +func SubnetServiceEndpointStoragePolicyDefinitionName(i interface{}, k string) (warnings []string, errors []error) { + // Same rule as policy + return SubnetServiceEndpointStoragePolicyName(i, k) +} diff --git a/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_name_test.go b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_name_test.go new file mode 100644 index 000000000000..4ce342f93043 --- /dev/null +++ b/azurerm/internal/services/network/validate/subnet_service_endpoint_storage_policy_name_test.go @@ -0,0 +1,65 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestServiceEndpointPolicyName(t *testing.T) { + testData := []struct { + Name string + Input string + Error bool + }{ + { + Name: "Empty", + Input: "", + Error: true, + }, + { + Name: "Longest", + Input: strings.Repeat("a", 80), + Error: false, + }, + { + Name: "Too long", + Input: strings.Repeat("a", 81), + Error: true, + }, + { + Name: "One word lone of valid char", + Input: "a", + Error: false, + }, + { + Name: "One word lone of invalid char - '_'", + Input: "_", + Error: true, + }, + { + Name: "Invalid ending char - '-'", + Input: "a-", + Error: true, + }, + { + Name: "Invalid middle char", + Input: "a%a", + Error: true, + }, + { + Name: "Valid weired name", + Input: "1.-_", + Error: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %s: %q", v.Name, v.Input) + + _, errors := SubnetServiceEndpointStoragePolicyName(v.Input, "") + isError := len(errors) != 0 + if v.Error != isError { + t.Fatalf("Expected %t but got %t", v.Error, isError) + } + } +} diff --git a/azurerm/internal/services/network/validate/virtual_hub.go b/azurerm/internal/services/network/validate/virtual_hub.go deleted file mode 100644 index 288ccc51a50e..000000000000 --- a/azurerm/internal/services/network/validate/virtual_hub.go +++ /dev/null @@ -1,20 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func ValidateVirtualHubID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - return nil, []error{fmt.Errorf("expected type of %q to be string", k)} - } - - if _, err := parse.VirtualHubID(v); err != nil { - return nil, []error{err} - } - - return nil, nil -} diff --git a/azurerm/internal/services/network/validate/virtual_hub_id.go b/azurerm/internal/services/network/validate/virtual_hub_id.go new file mode 100644 index 000000000000..ff225945475c --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_hub_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VirtualHubID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualHubID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/virtual_hub_id_test.go b/azurerm/internal/services/network/validate/virtual_hub_id_test.go new file mode 100644 index 000000000000..e4fa096e2f7e --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_hub_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualHubID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualHubID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/virtual_hub_ip_configuration_id.go b/azurerm/internal/services/network/validate/virtual_hub_ip_configuration_id.go new file mode 100644 index 000000000000..32040630d5ae --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_hub_ip_configuration_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VirtualHubIpConfigurationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualHubIpConfigurationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/virtual_hub_ip_configuration_id_test.go b/azurerm/internal/services/network/validate/virtual_hub_ip_configuration_id_test.go new file mode 100644 index 000000000000..ef880480db58 --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_hub_ip_configuration_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualHubIpConfigurationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for VirtualHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/", + Valid: false, + }, + + { + // missing IpConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/", + Valid: false, + }, + + { + // missing value for IpConfigurationName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/ipConfigurations/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualHubs/virtualHub1/ipConfigurations/ipConfiguration1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALHUBS/VIRTUALHUB1/IPCONFIGURATIONS/IPCONFIGURATION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualHubIpConfigurationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/virtual_hub_test.go b/azurerm/internal/services/network/validate/virtual_hub_test.go deleted file mode 100644 index 7ae3bda547ef..000000000000 --- a/azurerm/internal/services/network/validate/virtual_hub_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package validate - -import "testing" - -func TestValidateVirtualHub(t *testing.T) { - testData := []struct { - Name string - Input string - Valid bool - }{ - { - Name: "Empty", - Input: "", - Valid: false, - }, - { - Name: "No Virtual Hubs Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Valid: false, - }, - { - Name: "No Virtual Hubs Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/", - Valid: false, - }, - { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/virtualHubs/example", - Valid: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - _, errors := ValidateVirtualHubID(v.Input, "virtual_hub_id") - isValid := len(errors) == 0 - if v.Valid != isValid { - t.Fatalf("Expected %t but got %t", v.Valid, isValid) - } - } -} diff --git a/azurerm/internal/services/network/validate/virtual_network.go b/azurerm/internal/services/network/validate/virtual_network.go deleted file mode 100644 index 03b10e31f2ef..000000000000 --- a/azurerm/internal/services/network/validate/virtual_network.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -// VirtualNetworkID validates that the specified ID is a valid Virtual Network ID -func VirtualNetworkID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.VirtualNetworkID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/virtual_network_bgp_community.go b/azurerm/internal/services/network/validate/virtual_network_bgp_community.go new file mode 100644 index 000000000000..2f31b8139fe4 --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_network_bgp_community.go @@ -0,0 +1,42 @@ +package validate + +import ( + "fmt" + "strconv" + "strings" +) + +func VirtualNetworkBgpCommunity(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + segments := strings.Split(v, ":") + if len(segments) != 2 { + errors = append(errors, fmt.Errorf(`invalid notation of bgp community: expected "x:y"`)) + return + } + + asn, err := strconv.Atoi(segments[0]) + if err != nil { + errors = append(errors, fmt.Errorf(`converting asn %q: %v`, segments[0], err)) + return + } + if !(asn > 0 && asn < 65535) { + errors = append(errors, fmt.Errorf(`asn %d exceeds range: [0, 65535]`, asn)) + return + } + + comm, err := strconv.Atoi(segments[1]) + if err != nil { + errors = append(errors, fmt.Errorf(`converting community value %q: %v`, segments[1], err)) + return + } + if !(comm > 0 && comm < 65535) { + errors = append(errors, fmt.Errorf(`community value %d exceeds range: [0, 65535]`, comm)) + return + } + return warnings, errors +} diff --git a/azurerm/internal/services/network/validate/virtual_network_id.go b/azurerm/internal/services/network/validate/virtual_network_id.go new file mode 100644 index 000000000000..ac47b2a465d7 --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_network_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VirtualNetworkID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualNetworkID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/virtual_network_id_test.go b/azurerm/internal/services/network/validate/virtual_network_id_test.go new file mode 100644 index 000000000000..527f404c9702 --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_network_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualNetworkID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALNETWORKS/NETWORK1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualNetworkID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/virtual_wan.go b/azurerm/internal/services/network/validate/virtual_wan.go deleted file mode 100644 index 8f206bfd9093..000000000000 --- a/azurerm/internal/services/network/validate/virtual_wan.go +++ /dev/null @@ -1,22 +0,0 @@ -package validate - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func VirtualWanID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.VirtualWanID(v); err != nil { - errors = append(errors, fmt.Errorf("parsing %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/network/validate/virtual_wan_id.go b/azurerm/internal/services/network/validate/virtual_wan_id.go new file mode 100644 index 000000000000..94ad4d2cd69a --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_wan_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VirtualWanID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualWanID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/virtual_wan_id_test.go b/azurerm/internal/services/network/validate/virtual_wan_id_test.go new file mode 100644 index 000000000000..1794886468da --- /dev/null +++ b/azurerm/internal/services/network/validate/virtual_wan_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualWanID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualWans/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualWans/virtualWan1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALWANS/VIRTUALWAN1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualWanID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/vpn_connection_id.go b/azurerm/internal/services/network/validate/vpn_connection_id.go new file mode 100644 index 000000000000..47698a17a1a3 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_connection_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VpnConnectionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VpnConnectionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/vpn_connection_id_test.go b/azurerm/internal/services/network/validate/vpn_connection_id_test.go new file mode 100644 index 000000000000..654338bd8651 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_connection_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVpnConnectionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VpnGatewayName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for VpnGatewayName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/vpnConnections/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1/vpnConnections/vpnConnection1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNGATEWAYS/VPNGATEWAY1/VPNCONNECTIONS/VPNCONNECTION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VpnConnectionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/vpn_gateway_id.go b/azurerm/internal/services/network/validate/vpn_gateway_id.go new file mode 100644 index 000000000000..244fcfb4f5b3 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_gateway_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VpnGatewayID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VpnGatewayID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/vpn_gateway_id_test.go b/azurerm/internal/services/network/validate/vpn_gateway_id_test.go new file mode 100644 index 000000000000..fbce31496efe --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_gateway_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVpnGatewayID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnGateways/vpnGateway1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNGATEWAYS/VPNGATEWAY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VpnGatewayID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/vpn_site.go b/azurerm/internal/services/network/validate/vpn_site.go deleted file mode 100644 index bf81d92e9815..000000000000 --- a/azurerm/internal/services/network/validate/vpn_site.go +++ /dev/null @@ -1,28 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" -) - -func VpnSiteID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.VpnSiteID(v); err != nil { - errors = append(errors, fmt.Errorf("parsing %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} - -func VpnSiteName() func(i interface{}, k string) (warnings []string, errors []error) { - return validation.StringMatch(regexp.MustCompile(`^[^'<>%&:?/+]+$`), "The value must not contain characters from '<>%&:?/+.") -} diff --git a/azurerm/internal/services/network/validate/vpn_site_id.go b/azurerm/internal/services/network/validate/vpn_site_id.go new file mode 100644 index 000000000000..b65ae2a5b1ab --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_site_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VpnSiteID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VpnSiteID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/vpn_site_id_test.go b/azurerm/internal/services/network/validate/vpn_site_id_test.go new file mode 100644 index 000000000000..6f9cc5b020f7 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_site_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVpnSiteID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNSITES/VPNSITE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VpnSiteID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/vpn_site_link_id.go b/azurerm/internal/services/network/validate/vpn_site_link_id.go new file mode 100644 index 000000000000..b71605ba7465 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_site_link_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" +) + +func VpnSiteLinkID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VpnSiteLinkID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/network/validate/vpn_site_link_id_test.go b/azurerm/internal/services/network/validate/vpn_site_link_id_test.go new file mode 100644 index 000000000000..9c2e298c9411 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_site_link_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVpnSiteLinkID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing VpnSiteName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for VpnSiteName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/vpnSiteLinks/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/vpnSites/vpnSite1/vpnSiteLinks/vpnSiteLink1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VPNSITES/VPNSITE1/VPNSITELINKS/VPNSITELINK1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VpnSiteLinkID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/network/validate/vpn_site_name.go b/azurerm/internal/services/network/validate/vpn_site_name.go new file mode 100644 index 000000000000..6cfbd83125c5 --- /dev/null +++ b/azurerm/internal/services/network/validate/vpn_site_name.go @@ -0,0 +1,11 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func VpnSiteName() func(i interface{}, k string) (warnings []string, errors []error) { + return validation.StringMatch(regexp.MustCompile(`^[^'<>%&:?/+]+$`), "The value must not contain characters from '<>%&:?/+.") +} diff --git a/azurerm/internal/services/network/virtual_hub_bgp_connection_resource.go b/azurerm/internal/services/network/virtual_hub_bgp_connection_resource.go new file mode 100644 index 000000000000..af47a851b594 --- /dev/null +++ b/azurerm/internal/services/network/virtual_hub_bgp_connection_resource.go @@ -0,0 +1,184 @@ +package network + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmVirtualHubBgpConnection() *schema.Resource { + return &schema.Resource{ + Create: resourceArmVirtualHubBgpConnectionCreate, + Read: resourceArmVirtualHubBgpConnectionRead, + Delete: resourceArmVirtualHubBgpConnectionDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.BgpConnectionID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "virtual_hub_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.VirtualHubID, + }, + + "peer_asn": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntAtLeast(0), + }, + + "peer_ip": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsIPv4Address, + }, + }, + } +} + +func resourceArmVirtualHubBgpConnectionCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualHubBgpConnectionClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VirtualHubID(d.Get("virtual_hub_id").(string)) + if err != nil { + return err + } + + locks.ByName(id.Name, virtualHubResourceName) + defer locks.UnlockByName(id.Name, virtualHubResourceName) + + name := d.Get("name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_virtual_hub_bgp_connection", *existing.ID) + } + } + + parameters := network.BgpConnection{ + Name: utils.String(d.Get("name").(string)), + BgpConnectionProperties: &network.BgpConnectionProperties{ + PeerAsn: utils.Int64(int64(d.Get("peer_asn").(int))), + PeerIP: utils.String(d.Get("peer_ip").(string)), + }, + } + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, parameters) + if err != nil { + return fmt.Errorf("creating/updating Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating/updating future for Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + return fmt.Errorf("retrieving Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q) ID", name, id.ResourceGroup, id.Name) + } + + d.SetId(*resp.ID) + + return resourceArmVirtualHubBgpConnectionRead(d, meta) +} + +func resourceArmVirtualHubBgpConnectionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualHubBgpConnectionClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.BgpConnectionID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] network %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", id.Name, id.ResourceGroup, id.VirtualHubName, err) + } + + d.Set("name", id.Name) + d.Set("virtual_hub_id", parse.NewVirtualHubID(id.SubscriptionId, id.ResourceGroup, id.VirtualHubName).ID()) + + if props := resp.BgpConnectionProperties; props != nil { + d.Set("peer_asn", props.PeerAsn) + d.Set("peer_ip", props.PeerIP) + } + + return nil +} + +func resourceArmVirtualHubBgpConnectionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualHubBgpConnectionClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.BgpConnectionID(d.Id()) + if err != nil { + return err + } + + locks.ByName(id.VirtualHubName, virtualHubResourceName) + defer locks.UnlockByName(id.VirtualHubName, virtualHubResourceName) + + future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualHubName, id.Name) + if err != nil { + return fmt.Errorf("deleting Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", id.Name, id.ResourceGroup, id.VirtualHubName, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on deleting future for Virtual Hub Bgp Connection %q (Resource Group %q / Virtual Hub %q): %+v", id.Name, id.ResourceGroup, id.VirtualHubName, err) + } + + return nil +} diff --git a/azurerm/internal/services/network/virtual_hub_connection_resource.go b/azurerm/internal/services/network/virtual_hub_connection_resource.go index 36f34d721174..2a56fac1e478 100644 --- a/azurerm/internal/services/network/virtual_hub_connection_resource.go +++ b/azurerm/internal/services/network/virtual_hub_connection_resource.go @@ -7,11 +7,12 @@ import ( "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) @@ -46,14 +47,14 @@ func resourceArmVirtualHubConnection() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azure.ValidateResourceID, + ValidateFunc: validate.VirtualHubID, }, "remote_virtual_network_id": { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: azure.ValidateResourceID, + ValidateFunc: validate.VirtualNetworkID, }, // TODO 3.0: remove this property @@ -76,6 +77,82 @@ func resourceArmVirtualHubConnection() *schema.Resource { ForceNew: true, Default: false, }, + + "routing": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "associated_route_table_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validate.HubRouteTableID, + }, + + "propagated_route_table": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "labels": { + Type: schema.TypeSet, + Optional: true, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "route_table_ids": { + Type: schema.TypeList, + Optional: true, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validate.HubRouteTableID, + }, + }, + }, + }, + }, + + "static_vnet_route": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "address_prefixes": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.IsCIDR, + }, + }, + + "next_hop_ip_address": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.IsIPv4Address, + }, + }, + }, + }, + }, + }, + }, }, } } @@ -117,6 +194,10 @@ func resourceArmVirtualHubConnectionCreateOrUpdate(d *schema.ResourceData, meta }, } + if v, ok := d.GetOk("routing"); ok { + connection.HubVirtualNetworkConnectionProperties.RoutingConfiguration = expandArmVirtualHubConnectionRouting(v.([]interface{})) + } + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, connection) if err != nil { return fmt.Errorf("creating Connection %q (Virtual Hub %q / Resource Group %q): %+v", name, id.Name, id.ResourceGroup, err) @@ -139,12 +220,11 @@ func resourceArmVirtualHubConnectionCreateOrUpdate(d *schema.ResourceData, meta } func resourceArmVirtualHubConnectionRead(d *schema.ResourceData, meta interface{}) error { - subscriptionId := meta.(*clients.Client).Account.SubscriptionId client := meta.(*clients.Client).Network.HubVirtualNetworkConnectionClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.VirtualHubConnectionID(d.Id()) + id, err := parse.HubVirtualNetworkConnectionID(d.Id()) if err != nil { return err } @@ -160,7 +240,7 @@ func resourceArmVirtualHubConnectionRead(d *schema.ResourceData, meta interface{ } d.Set("name", id.Name) - d.Set("virtual_hub_id", parse.NewVirtualHubID(id.ResourceGroup, id.VirtualHubName).ID(subscriptionId)) + d.Set("virtual_hub_id", parse.NewVirtualHubID(id.SubscriptionId, id.ResourceGroup, id.VirtualHubName).ID()) if props := resp.HubVirtualNetworkConnectionProperties; props != nil { // The following two attributes are deprecated by API (which will always return `true`). @@ -175,6 +255,10 @@ func resourceArmVirtualHubConnectionRead(d *schema.ResourceData, meta interface{ remoteVirtualNetworkId = *props.RemoteVirtualNetwork.ID } d.Set("remote_virtual_network_id", remoteVirtualNetworkId) + + if err := d.Set("routing", flattenArmVirtualHubConnectionRouting(props.RoutingConfiguration)); err != nil { + return fmt.Errorf("setting `routing`: %+v", err) + } } return nil @@ -185,7 +269,7 @@ func resourceArmVirtualHubConnectionDelete(d *schema.ResourceData, meta interfac ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.VirtualHubConnectionID(d.Id()) + id, err := parse.HubVirtualNetworkConnectionID(d.Id()) if err != nil { return err } @@ -204,3 +288,185 @@ func resourceArmVirtualHubConnectionDelete(d *schema.ResourceData, meta interfac return nil } + +func expandArmVirtualHubConnectionRouting(input []interface{}) *network.RoutingConfiguration { + if len(input) == 0 { + return &network.RoutingConfiguration{} + } + + v := input[0].(map[string]interface{}) + result := network.RoutingConfiguration{} + + if associatedRouteTableId := v["associated_route_table_id"].(string); associatedRouteTableId != "" { + result.AssociatedRouteTable = &network.SubResource{ + ID: utils.String(associatedRouteTableId), + } + } + + if vnetStaticRoute := v["static_vnet_route"].([]interface{}); len(vnetStaticRoute) != 0 { + result.VnetRoutes = expandArmVirtualHubConnectionVnetStaticRoute(vnetStaticRoute) + } + + if propagatedRouteTable := v["propagated_route_table"].([]interface{}); len(propagatedRouteTable) != 0 { + result.PropagatedRouteTables = expandArmVirtualHubConnectionPropagatedRouteTable(propagatedRouteTable) + } + + return &result +} + +func expandArmVirtualHubConnectionPropagatedRouteTable(input []interface{}) *network.PropagatedRouteTable { + if len(input) == 0 { + return &network.PropagatedRouteTable{} + } + + v := input[0].(map[string]interface{}) + + result := network.PropagatedRouteTable{} + + if labels := v["labels"].(*schema.Set).List(); len(labels) != 0 { + result.Labels = utils.ExpandStringSlice(labels) + } + + if routeTableIds := v["route_table_ids"].([]interface{}); len(routeTableIds) != 0 { + result.Ids = expandIDsToSubResources(routeTableIds) + } + + return &result +} + +func expandArmVirtualHubConnectionVnetStaticRoute(input []interface{}) *network.VnetRoute { + if len(input) == 0 { + return &network.VnetRoute{} + } + + results := make([]network.StaticRoute, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.StaticRoute{} + + if name := v["name"].(string); name != "" { + result.Name = utils.String(name) + } + + if addressPrefixes := v["address_prefixes"].(*schema.Set).List(); len(addressPrefixes) != 0 { + result.AddressPrefixes = utils.ExpandStringSlice(addressPrefixes) + } + + if nextHopIPAddress := v["next_hop_ip_address"].(string); nextHopIPAddress != "" { + result.NextHopIPAddress = utils.String(nextHopIPAddress) + } + + results = append(results, result) + } + + return &network.VnetRoute{ + StaticRoutes: &results, + } +} + +func expandIDsToSubResources(input []interface{}) *[]network.SubResource { + ids := make([]network.SubResource, 0) + + for _, v := range input { + ids = append(ids, network.SubResource{ + ID: utils.String(v.(string)), + }) + } + + return &ids +} + +func flattenArmVirtualHubConnectionRouting(input *network.RoutingConfiguration) []interface{} { + if input == nil { + return []interface{}{} + } + + associatedRouteTableId := "" + if input.AssociatedRouteTable != nil && input.AssociatedRouteTable.ID != nil { + associatedRouteTableId = *input.AssociatedRouteTable.ID + } + + return []interface{}{ + map[string]interface{}{ + "associated_route_table_id": associatedRouteTableId, + "propagated_route_table": flattenArmVirtualHubConnectionPropagatedRouteTable(input.PropagatedRouteTables), + "static_vnet_route": flattenArmVirtualHubConnectionVnetStaticRoute(input.VnetRoutes), + }, + } +} + +func flattenArmVirtualHubConnectionPropagatedRouteTable(input *network.PropagatedRouteTable) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + labels := make([]interface{}, 0) + if input.Labels != nil { + labels = utils.FlattenStringSlice(input.Labels) + } + + routeTableIds := make([]interface{}, 0) + if input.Ids != nil { + routeTableIds = flattenSubResourcesToIDs(input.Ids) + } + + return []interface{}{ + map[string]interface{}{ + "labels": labels, + "route_table_ids": routeTableIds, + }, + } +} + +func flattenArmVirtualHubConnectionVnetStaticRoute(input *network.VnetRoute) []interface{} { + results := make([]interface{}, 0) + if input == nil || input.StaticRoutes == nil { + return results + } + + for _, item := range *input.StaticRoutes { + var name string + if item.Name != nil { + name = *item.Name + } + + var nextHopIpAddress string + if item.NextHopIPAddress != nil { + nextHopIpAddress = *item.NextHopIPAddress + } + + addressPrefixes := make([]interface{}, 0) + if item.AddressPrefixes != nil { + addressPrefixes = utils.FlattenStringSlice(item.AddressPrefixes) + } + + v := map[string]interface{}{ + "name": name, + "address_prefixes": addressPrefixes, + "next_hop_ip_address": nextHopIpAddress, + } + + results = append(results, v) + } + + return results +} + +func flattenSubResourcesToIDs(input *[]network.SubResource) []interface{} { + ids := make([]interface{}, 0) + if input == nil { + return ids + } + + for _, v := range *input { + if v.ID == nil { + continue + } + + ids = append(ids, *v.ID) + } + + return ids +} diff --git a/azurerm/internal/services/network/virtual_hub_ip_resource.go b/azurerm/internal/services/network/virtual_hub_ip_resource.go new file mode 100644 index 000000000000..77ebfe095459 --- /dev/null +++ b/azurerm/internal/services/network/virtual_hub_ip_resource.go @@ -0,0 +1,224 @@ +package network + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmVirtualHubIP() *schema.Resource { + return &schema.Resource{ + Create: resourceArmVirtualHubIPCreateUpdate, + Read: resourceArmVirtualHubIPRead, + Update: resourceArmVirtualHubIPCreateUpdate, + Delete: resourceArmVirtualHubIPDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(60 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(60 * time.Minute), + Delete: schema.DefaultTimeout(60 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.VirtualHubIpConfigurationID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "virtual_hub_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: networkValidate.VirtualHubID, + }, + + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: networkValidate.SubnetID, + }, + + "private_ip_address": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.IsIPv4Address, + }, + + "private_ip_allocation_method": { + Type: schema.TypeString, + Optional: true, + Default: network.Dynamic, + ValidateFunc: validation.StringInSlice([]string{ + string(network.Dynamic), + string(network.Static), + }, false), + }, + + "public_ip_address_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: networkValidate.PublicIpAddressID, + }, + }, + } +} + +func resourceArmVirtualHubIPCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualHubIPClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VirtualHubID(d.Get("virtual_hub_id").(string)) + if err != nil { + return err + } + + locks.ByName(id.Name, virtualHubResourceName) + defer locks.UnlockByName(id.Name, virtualHubResourceName) + + name := d.Get("name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Virtual Hub IP %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_virtual_hub_ip", *existing.ID) + } + } + + parameters := network.HubIPConfiguration{ + Name: utils.String(d.Get("name").(string)), + HubIPConfigurationPropertiesFormat: &network.HubIPConfigurationPropertiesFormat{ + Subnet: &network.Subnet{ + ID: utils.String(d.Get("subnet_id").(string)), + }, + }, + } + + if v, ok := d.GetOk("private_ip_address"); ok { + parameters.HubIPConfigurationPropertiesFormat.PrivateIPAddress = utils.String(v.(string)) + } + + if v, ok := d.GetOk("private_ip_allocation_method"); ok { + parameters.HubIPConfigurationPropertiesFormat.PrivateIPAllocationMethod = network.IPAllocationMethod(v.(string)) + } + + if v, ok := d.GetOk("public_ip_address_id"); ok { + parameters.HubIPConfigurationPropertiesFormat.PublicIPAddress = &network.PublicIPAddress{ + ID: utils.String(v.(string)), + } + } + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, parameters) + if err != nil { + return fmt.Errorf("creating/updating Virtual Hub IP %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating/updating future for Virtual Hub IP %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + return fmt.Errorf("retrieving Virtual Hub IP %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Virtual Hub IP %q (Resource Group %q / Virtual Hub %q) ID", name, id.ResourceGroup, id.Name) + } + + d.SetId(*resp.ID) + + return resourceArmVirtualHubIPRead(d, meta) +} + +func resourceArmVirtualHubIPRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualHubIPClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VirtualHubIpConfigurationID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.IpConfigurationName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Virtual Hub IP %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Virtual Hub IP %q (Resource Group %q / Virtual Hub %q): %+v", id.IpConfigurationName, id.ResourceGroup, id.VirtualHubName, err) + } + + d.Set("name", id.IpConfigurationName) + d.Set("virtual_hub_id", parse.NewVirtualHubID(id.SubscriptionId, id.ResourceGroup, id.VirtualHubName).ID()) + + if props := resp.HubIPConfigurationPropertiesFormat; props != nil { + d.Set("private_ip_address", props.PrivateIPAddress) + d.Set("private_ip_allocation_method", props.PrivateIPAllocationMethod) + + if v := props.PublicIPAddress; v != nil { + d.Set("public_ip_address_id", v.ID) + } + + if v := props.Subnet; v != nil { + d.Set("subnet_id", v.ID) + } + } + + return nil +} + +func resourceArmVirtualHubIPDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualHubIPClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VirtualHubIpConfigurationID(d.Id()) + if err != nil { + return err + } + + locks.ByName(id.VirtualHubName, virtualHubResourceName) + defer locks.UnlockByName(id.VirtualHubName, virtualHubResourceName) + + future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualHubName, id.IpConfigurationName) + if err != nil { + return fmt.Errorf("deleting Virtual Hub IP %q (Resource Group %q / virtualHubName %q): %+v", id.IpConfigurationName, id.ResourceGroup, id.VirtualHubName, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on deleting future for Virtual Hub IP %q (Resource Group %q / virtualHubName %q): %+v", id.IpConfigurationName, id.ResourceGroup, id.VirtualHubName, err) + } + + return nil +} diff --git a/azurerm/internal/services/network/virtual_hub_resource.go b/azurerm/internal/services/network/virtual_hub_resource.go index 8831fd931ee0..d9fe8df1e0fd 100644 --- a/azurerm/internal/services/network/virtual_hub_resource.go +++ b/azurerm/internal/services/network/virtual_hub_resource.go @@ -6,17 +6,17 @@ import ( "log" "time" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" @@ -56,14 +56,24 @@ func resourceArmVirtualHub() *schema.Resource { "address_prefix": { Type: schema.TypeString, - Required: true, + Optional: true, ForceNew: true, ValidateFunc: validate.CIDR, }, + "sku": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + "Basic", + "Standard", + }, false), + }, + "virtual_wan_id": { Type: schema.TypeString, - Required: true, + Optional: true, ForceNew: true, ValidateFunc: azure.ValidateResourceID, }, @@ -123,23 +133,31 @@ func resourceArmVirtualHubCreateUpdate(d *schema.ResourceData, meta interface{}) } location := azure.NormalizeLocation(d.Get("location").(string)) - addressPrefix := d.Get("address_prefix").(string) - virtualWanId := d.Get("virtual_wan_id").(string) route := d.Get("route").(*schema.Set).List() t := d.Get("tags").(map[string]interface{}) parameters := network.VirtualHub{ Location: utils.String(location), VirtualHubProperties: &network.VirtualHubProperties{ - AddressPrefix: utils.String(addressPrefix), - VirtualWan: &network.SubResource{ - ID: &virtualWanId, - }, RouteTable: expandArmVirtualHubRoute(route), }, Tags: tags.Expand(t), } + if v, ok := d.GetOk("address_prefix"); ok { + parameters.VirtualHubProperties.AddressPrefix = utils.String(v.(string)) + } + + if v, ok := d.GetOk("sku"); ok { + parameters.VirtualHubProperties.Sku = utils.String(v.(string)) + } + + if v, ok := d.GetOk("virtual_wan_id"); ok { + parameters.VirtualHubProperties.VirtualWan = &network.SubResource{ + ID: utils.String(v.(string)), + } + } + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) if err != nil { return fmt.Errorf("Error creating Virtual Hub %q (Resource Group %q): %+v", name, resourceGroup, err) @@ -157,7 +175,7 @@ func resourceArmVirtualHubCreateUpdate(d *schema.ResourceData, meta interface{}) timeout, _ := ctx.Deadline() stateConf := &resource.StateChangeConf{ Pending: []string{"Provisioning"}, - Target: []string{"Provisioned", "Failed"}, + Target: []string{"Provisioned", "Failed", "None"}, Refresh: virtualHubCreateRefreshFunc(ctx, client, resourceGroup, name), PollInterval: 15 * time.Second, ContinuousTargetOccurence: 3, @@ -204,6 +222,7 @@ func resourceArmVirtualHubRead(d *schema.ResourceData, meta interface{}) error { } if props := resp.VirtualHubProperties; props != nil { d.Set("address_prefix", props.AddressPrefix) + d.Set("sku", props.Sku) if err := d.Set("route", flattenArmVirtualHubRoute(props.RouteTable)); err != nil { return fmt.Errorf("Error setting `route`: %+v", err) diff --git a/azurerm/internal/services/network/virtual_hub_route_table_resource.go b/azurerm/internal/services/network/virtual_hub_route_table_resource.go new file mode 100644 index 000000000000..28d54e33794a --- /dev/null +++ b/azurerm/internal/services/network/virtual_hub_route_table_resource.go @@ -0,0 +1,292 @@ +package network + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmVirtualHubRouteTable() *schema.Resource { + return &schema.Resource{ + Create: resourceArmVirtualHubRouteTableCreateUpdate, + Read: resourceArmVirtualHubRouteTableRead, + Update: resourceArmVirtualHubRouteTableCreateUpdate, + Delete: resourceArmVirtualHubRouteTableDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.HubRouteTableID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: networkValidate.HubRouteTableName, + }, + + "virtual_hub_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: networkValidate.VirtualHubID, + }, + + "labels": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "route": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "destinations": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + + "destinations_type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + "CIDR", + "ResourceId", + "Service", + }, false), + }, + + "next_hop": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "next_hop_type": { + Type: schema.TypeString, + Optional: true, + Default: "ResourceId", + ValidateFunc: validation.StringInSlice([]string{ + "ResourceId", + }, false), + }, + }, + }, + }, + }, + } +} + +func resourceArmVirtualHubRouteTableCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.HubRouteTableClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VirtualHubID(d.Get("virtual_hub_id").(string)) + if err != nil { + return err + } + + locks.ByName(id.Name, virtualHubResourceName) + defer locks.UnlockByName(id.Name, virtualHubResourceName) + + name := d.Get("name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_virtual_hub_route_table", *existing.ID) + } + } + + parameters := network.HubRouteTable{ + Name: utils.String(d.Get("name").(string)), + HubRouteTableProperties: &network.HubRouteTableProperties{ + Labels: utils.ExpandStringSlice(d.Get("labels").(*schema.Set).List()), + Routes: expandArmVirtualHubRouteTableHubRoutes(d.Get("route").(*schema.Set).List()), + }, + } + + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.Name, name, parameters) + if err != nil { + return fmt.Errorf("creating/updating HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating/updating future for HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name, name) + if err != nil { + return fmt.Errorf("retrieving HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", name, id.ResourceGroup, id.Name, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for HubRouteTable %q (Resource Group %q / Virtual Hub %q) ID", name, id.ResourceGroup, id.Name) + } + + d.SetId(*resp.ID) + + return resourceArmVirtualHubRouteTableRead(d, meta) +} + +func resourceArmVirtualHubRouteTableRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.HubRouteTableClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.HubRouteTableID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.VirtualHubName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Virtual Hub Route Table %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", id.Name, id.ResourceGroup, id.VirtualHubName, err) + } + + d.Set("name", id.Name) + d.Set("virtual_hub_id", parse.NewVirtualHubID(id.SubscriptionId, id.ResourceGroup, id.VirtualHubName).ID()) + + if props := resp.HubRouteTableProperties; props != nil { + d.Set("labels", utils.FlattenStringSlice(props.Labels)) + + if err := d.Set("route", flattenArmVirtualHubRouteTableHubRoutes(props.Routes)); err != nil { + return fmt.Errorf("setting `route`: %+v", err) + } + } + return nil +} + +func resourceArmVirtualHubRouteTableDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.HubRouteTableClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.HubRouteTableID(d.Id()) + if err != nil { + return err + } + + locks.ByName(id.VirtualHubName, virtualHubResourceName) + defer locks.UnlockByName(id.VirtualHubName, virtualHubResourceName) + + future, err := client.Delete(ctx, id.ResourceGroup, id.VirtualHubName, id.Name) + if err != nil { + return fmt.Errorf("deleting HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", id.Name, id.ResourceGroup, id.VirtualHubName, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on deleting future for HubRouteTable %q (Resource Group %q / Virtual Hub %q): %+v", id.Name, id.ResourceGroup, id.VirtualHubName, err) + } + + return nil +} + +func expandArmVirtualHubRouteTableHubRoutes(input []interface{}) *[]network.HubRoute { + results := make([]network.HubRoute, 0) + + for _, item := range input { + v := item.(map[string]interface{}) + + result := network.HubRoute{ + Name: utils.String(v["name"].(string)), + DestinationType: utils.String(v["destinations_type"].(string)), + Destinations: utils.ExpandStringSlice(v["destinations"].(*schema.Set).List()), + NextHopType: utils.String(v["next_hop_type"].(string)), + NextHop: utils.String(v["next_hop"].(string)), + } + + results = append(results, result) + } + + return &results +} + +func flattenArmVirtualHubRouteTableHubRoutes(input *[]network.HubRoute) []interface{} { + results := make([]interface{}, 0) + if input == nil { + return results + } + + for _, item := range *input { + var name string + if item.Name != nil { + name = *item.Name + } + + var destinationType string + if item.DestinationType != nil { + destinationType = *item.DestinationType + } + + var nextHop string + if item.NextHop != nil { + nextHop = *item.NextHop + } + + var nextHopType string + if item.NextHopType != nil { + nextHopType = *item.NextHopType + } + + v := map[string]interface{}{ + "name": name, + "destinations": utils.FlattenStringSlice(item.Destinations), + "destinations_type": destinationType, + "next_hop": nextHop, + "next_hop_type": nextHopType, + } + + results = append(results, v) + } + + return results +} diff --git a/azurerm/internal/services/network/virtual_hub_security_partner_provider_resource.go b/azurerm/internal/services/network/virtual_hub_security_partner_provider_resource.go new file mode 100644 index 000000000000..6d81271f50b2 --- /dev/null +++ b/azurerm/internal/services/network/virtual_hub_security_partner_provider_resource.go @@ -0,0 +1,210 @@ +package network + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmVirtualHubSecurityPartnerProvider() *schema.Resource { + return &schema.Resource{ + Create: resourceArmVirtualHubSecurityPartnerProviderCreate, + Read: resourceArmVirtualHubSecurityPartnerProviderRead, + Update: resourceArmVirtualHubSecurityPartnerProviderUpdate, + Delete: resourceArmVirtualHubSecurityPartnerProviderDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.SecurityPartnerProviderID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "security_provider_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.ZScaler), + string(network.IBoss), + string(network.Checkpoint), + }, false), + }, + + "virtual_hub_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: networkValidate.VirtualHubID, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceArmVirtualHubSecurityPartnerProviderCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.SecurityPartnerProviderClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing Security Partner Provider %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_virtual_hub_security_partner_provider", *existing.ID) + } + + parameters := network.SecurityPartnerProvider{ + Location: utils.String(location.Normalize(d.Get("location").(string))), + SecurityPartnerProviderPropertiesFormat: &network.SecurityPartnerProviderPropertiesFormat{ + SecurityProviderName: network.SecurityProviderName(d.Get("security_provider_name").(string)), + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + if v, ok := d.GetOk("virtual_hub_id"); ok { + parameters.SecurityPartnerProviderPropertiesFormat.VirtualHub = &network.SubResource{ + ID: utils.String(v.(string)), + } + } + + future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) + if err != nil { + return fmt.Errorf("creating Security Partner Provider %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating future for Security Partner Provider %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("retrieving Security Partner Provider %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Security Partner Provider %q (Resource Group %q) ID", name, resourceGroup) + } + + d.SetId(*resp.ID) + + return resourceArmVirtualHubSecurityPartnerProviderRead(d, meta) +} + +func resourceArmVirtualHubSecurityPartnerProviderRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.SecurityPartnerProviderClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SecurityPartnerProviderID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] security partner provider %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Security Partner Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if props := resp.SecurityPartnerProviderPropertiesFormat; props != nil { + d.Set("security_provider_name", props.SecurityProviderName) + + if props.VirtualHub != nil && props.VirtualHub.ID != nil { + d.Set("virtual_hub_id", props.VirtualHub.ID) + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmVirtualHubSecurityPartnerProviderUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.SecurityPartnerProviderClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SecurityPartnerProviderID(d.Id()) + if err != nil { + return err + } + + parameters := network.TagsObject{} + + if d.HasChange("tags") { + parameters.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + if _, err := client.UpdateTags(ctx, id.ResourceGroup, id.Name, parameters); err != nil { + return fmt.Errorf("updating Security Partner Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return resourceArmVirtualHubSecurityPartnerProviderRead(d, meta) +} + +func resourceArmVirtualHubSecurityPartnerProviderDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.SecurityPartnerProviderClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.SecurityPartnerProviderID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("deleting Security Partner Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on deleting future for Security Partner Provider %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/network/virtual_network_gateway_connection_data_source.go b/azurerm/internal/services/network/virtual_network_gateway_connection_data_source.go index 1852b569f8f2..3b5571064d68 100644 --- a/azurerm/internal/services/network/virtual_network_gateway_connection_data_source.go +++ b/azurerm/internal/services/network/virtual_network_gateway_connection_data_source.go @@ -49,6 +49,11 @@ func dataSourceArmVirtualNetworkGatewayConnection() *schema.Resource { Computed: true, }, + "dpd_timeout_seconds": { + Type: schema.TypeInt, + Computed: true, + }, + "enable_bgp": { Type: schema.TypeBool, Computed: true, @@ -89,6 +94,11 @@ func dataSourceArmVirtualNetworkGatewayConnection() *schema.Resource { Computed: true, }, + "local_azure_ip_address_enabled": { + Type: schema.TypeBool, + Computed: true, + }, + "local_network_gateway_id": { Type: schema.TypeString, Computed: true, @@ -112,7 +122,6 @@ func dataSourceArmVirtualNetworkGatewayConnection() *schema.Resource { "traffic_selector_policy": { Type: schema.TypeList, Computed: true, - MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "local_address_cidrs": { @@ -234,6 +243,14 @@ func dataSourceArmVirtualNetworkGatewayConnectionRead(d *schema.ResourceData, me d.Set("express_route_circuit_id", gwc.Peer.ID) } + if gwc.DpdTimeoutSeconds != nil { + d.Set("dpd_timeout_seconds", gwc.DpdTimeoutSeconds) + } + + if gwc.UseLocalAzureIPAddress != nil { + d.Set("local_azure_ip_address_enabled", gwc.UseLocalAzureIPAddress) + } + d.Set("resource_guid", gwc.ResourceGUID) ipsecPoliciesSettingsFlat := flattenArmVirtualNetworkGatewayConnectionDataSourceIpsecPolicies(gwc.IpsecPolicies) diff --git a/azurerm/internal/services/network/virtual_network_gateway_connection_resource.go b/azurerm/internal/services/network/virtual_network_gateway_connection_resource.go index e7b406ea8592..a51fea474966 100644 --- a/azurerm/internal/services/network/virtual_network_gateway_connection_resource.go +++ b/azurerm/internal/services/network/virtual_network_gateway_connection_resource.go @@ -73,6 +73,12 @@ func resourceArmVirtualNetworkGatewayConnection() *schema.Resource { ValidateFunc: validation.StringIsNotEmpty, }, + "dpd_timeout_seconds": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + }, + "express_route_circuit_id": { Type: schema.TypeString, Optional: true, @@ -87,6 +93,12 @@ func resourceArmVirtualNetworkGatewayConnection() *schema.Resource { ValidateFunc: azure.ValidateResourceIDOrEmpty, }, + "local_azure_ip_address_enabled": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + }, + "local_network_gateway_id": { Type: schema.TypeString, Optional: true, @@ -375,6 +387,10 @@ func resourceArmVirtualNetworkGatewayConnectionRead(d *schema.ResourceData, meta d.Set("authorization_key", conn.AuthorizationKey) } + if conn.DpdTimeoutSeconds != nil { + d.Set("dpd_timeout_seconds", conn.DpdTimeoutSeconds) + } + if conn.Peer != nil { d.Set("express_route_circuit_id", conn.Peer.ID) } @@ -383,6 +399,10 @@ func resourceArmVirtualNetworkGatewayConnectionRead(d *schema.ResourceData, meta d.Set("peer_virtual_network_gateway_id", conn.VirtualNetworkGateway2.ID) } + if conn.UseLocalAzureIPAddress != nil { + d.Set("local_azure_ip_address_enabled", conn.UseLocalAzureIPAddress) + } + if conn.LocalNetworkGateway2 != nil { d.Set("local_network_gateway_id", conn.LocalNetworkGateway2.ID) } @@ -479,6 +499,10 @@ func getArmVirtualNetworkGatewayConnectionProperties(d *schema.ResourceData) (*n props.AuthorizationKey = &authorizationKey } + if v, ok := d.GetOk("dpd_timeout_seconds"); ok { + props.DpdTimeoutSeconds = utils.Int32(int32(v.(int))) + } + if v, ok := d.GetOk("express_route_circuit_id"); ok { expressRouteCircuitId := v.(string) props.Peer = &network.SubResource{ @@ -502,6 +526,10 @@ func getArmVirtualNetworkGatewayConnectionProperties(d *schema.ResourceData) (*n } } + if v, ok := d.GetOk("local_azure_ip_address_enabled"); ok { + props.UseLocalAzureIPAddress = utils.Bool(v.(bool)) + } + if v, ok := d.GetOk("local_network_gateway_id"); ok { localNetworkGatewayId := v.(string) _, name, err := resourceGroupAndLocalNetworkGatewayFromId(localNetworkGatewayId) diff --git a/azurerm/internal/services/network/virtual_network_gateway_data_source.go b/azurerm/internal/services/network/virtual_network_gateway_data_source.go index aa7f2d011f44..f24f6bc070b4 100644 --- a/azurerm/internal/services/network/virtual_network_gateway_data_source.go +++ b/azurerm/internal/services/network/virtual_network_gateway_data_source.go @@ -48,6 +48,11 @@ func dataSourceArmVirtualNetworkGateway() *schema.Resource { Computed: true, }, + "private_ip_address_enabled": { + Type: schema.TypeBool, + Computed: true, + }, + "active_active": { Type: schema.TypeBool, Computed: true, @@ -199,6 +204,22 @@ func dataSourceArmVirtualNetworkGateway() *schema.Resource { }, }, + "custom_route": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "address_prefixes": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "default_local_network_gateway_id": { Type: schema.TypeString, Computed: true, @@ -239,6 +260,7 @@ func dataSourceArmVirtualNetworkGatewayRead(d *schema.ResourceData, meta interfa d.Set("type", string(gw.GatewayType)) d.Set("enable_bgp", gw.EnableBgp) + d.Set("private_ip_address_enabled", gw.EnablePrivateIPAddress) d.Set("active_active", gw.ActiveActive) d.Set("generation", string(gw.VpnGatewayGeneration)) @@ -267,6 +289,10 @@ func dataSourceArmVirtualNetworkGatewayRead(d *schema.ResourceData, meta interfa if err := d.Set("bgp_settings", bgpSettingsFlat); err != nil { return fmt.Errorf("Error setting `bgp_settings`: %+v", err) } + + if err := d.Set("custom_route", flattenArmVirtualNetworkGatewayAddressSpace(gw.CustomRoutes)); err != nil { + return fmt.Errorf("setting `custom_route`: %+v", err) + } } return tags.FlattenAndSet(d, resp.Tags) diff --git a/azurerm/internal/services/network/virtual_network_gateway_resource.go b/azurerm/internal/services/network/virtual_network_gateway_resource.go index 71c801124fe2..56b74b4b45e6 100644 --- a/azurerm/internal/services/network/virtual_network_gateway_resource.go +++ b/azurerm/internal/services/network/virtual_network_gateway_resource.go @@ -81,6 +81,12 @@ func resourceArmVirtualNetworkGateway() *schema.Resource { Computed: true, }, + "private_ip_address_enabled": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + }, + "active_active": { Type: schema.TypeBool, Optional: true, @@ -321,6 +327,25 @@ func resourceArmVirtualNetworkGateway() *schema.Resource { }, }, + "custom_route": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "address_prefixes": { + Type: schema.TypeSet, + Optional: true, + ForceNew: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "default_local_network_gateway_id": { Type: schema.TypeString, Optional: true, @@ -420,6 +445,7 @@ func resourceArmVirtualNetworkGatewayRead(d *schema.ResourceData, meta interface if gw := resp.VirtualNetworkGatewayPropertiesFormat; gw != nil { d.Set("type", string(gw.GatewayType)) d.Set("enable_bgp", gw.EnableBgp) + d.Set("private_ip_address_enabled", gw.EnablePrivateIPAddress) d.Set("active_active", gw.ActiveActive) d.Set("generation", string(gw.VpnGatewayGeneration)) @@ -446,6 +472,10 @@ func resourceArmVirtualNetworkGatewayRead(d *schema.ResourceData, meta interface if err := d.Set("bgp_settings", flattenArmVirtualNetworkGatewayBgpSettings(gw.BgpSettings)); err != nil { return fmt.Errorf("Error setting `bgp_settings`: %+v", err) } + + if err := d.Set("custom_route", flattenArmVirtualNetworkGatewayAddressSpace(gw.CustomRoutes)); err != nil { + return fmt.Errorf("setting `custom_route`: %+v", err) + } } return tags.FlattenAndSet(d, resp.Tags) @@ -477,17 +507,21 @@ func getArmVirtualNetworkGatewayProperties(d *schema.ResourceData) (*network.Vir gatewayType := network.VirtualNetworkGatewayType(d.Get("type").(string)) vpnType := network.VpnType(d.Get("vpn_type").(string)) enableBgp := d.Get("enable_bgp").(bool) + enablePrivateIpAddress := d.Get("private_ip_address_enabled").(bool) activeActive := d.Get("active_active").(bool) generation := network.VpnGatewayGeneration(d.Get("generation").(string)) + customRoute := d.Get("custom_route").([]interface{}) props := &network.VirtualNetworkGatewayPropertiesFormat{ - GatewayType: gatewayType, - VpnType: vpnType, - EnableBgp: &enableBgp, - ActiveActive: &activeActive, - VpnGatewayGeneration: generation, - Sku: expandArmVirtualNetworkGatewaySku(d), - IPConfigurations: expandArmVirtualNetworkGatewayIPConfigurations(d), + GatewayType: gatewayType, + VpnType: vpnType, + EnableBgp: &enableBgp, + EnablePrivateIPAddress: &enablePrivateIpAddress, + ActiveActive: &activeActive, + VpnGatewayGeneration: generation, + Sku: expandArmVirtualNetworkGatewaySku(d), + IPConfigurations: expandArmVirtualNetworkGatewayIPConfigurations(d), + CustomRoutes: expandArmVirtualNetworkGatewayAddressSpace(customRoute), } if gatewayDefaultSiteID := d.Get("default_local_network_gateway_id").(string); gatewayDefaultSiteID != "" { @@ -666,6 +700,16 @@ func expandArmVirtualNetworkGatewaySku(d *schema.ResourceData) *network.VirtualN } } +func expandArmVirtualNetworkGatewayAddressSpace(input []interface{}) *network.AddressSpace { + if len(input) == 0 { + return nil + } + v := input[0].(map[string]interface{}) + return &network.AddressSpace{ + AddressPrefixes: utils.ExpandStringSlice(v["address_prefixes"].(*schema.Set).List()), + } +} + func flattenArmVirtualNetworkGatewayBgpSettings(settings *network.BgpSettings) []interface{} { output := make([]interface{}, 0) @@ -918,3 +962,15 @@ func resourceArmVirtualNetworkGatewayCustomizeDiff(diff *schema.ResourceDiff, _ } return nil } + +func flattenArmVirtualNetworkGatewayAddressSpace(input *network.AddressSpace) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + return []interface{}{ + map[string]interface{}{ + "address_prefixes": utils.FlattenStringSlice(input.AddressPrefixes), + }, + } +} diff --git a/azurerm/internal/services/network/virtual_network_resource.go b/azurerm/internal/services/network/virtual_network_resource.go index a99e807c94c5..db48af799a47 100644 --- a/azurerm/internal/services/network/virtual_network_resource.go +++ b/azurerm/internal/services/network/virtual_network_resource.go @@ -8,6 +8,8 @@ import ( "net/http" "time" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" @@ -63,6 +65,12 @@ func resourceArmVirtualNetwork() *schema.Resource { }, }, + "bgp_community": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.VirtualNetworkBgpCommunity, + }, + "ddos_protection_plan": { Type: schema.TypeList, Optional: true, @@ -92,6 +100,12 @@ func resourceArmVirtualNetwork() *schema.Resource { }, }, + "vm_protection_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + "guid": { Type: schema.TypeString, Computed: true, @@ -260,6 +274,18 @@ func resourceArmVirtualNetworkRead(d *schema.ResourceData, meta interface{}) err if err := d.Set("dns_servers", flattenVirtualNetworkDNSServers(props.DhcpOptions)); err != nil { return fmt.Errorf("Error setting `dns_servers`: %+v", err) } + + bgpCommunity := "" + if p := props.BgpCommunities; p != nil { + if v := p.VirtualNetworkCommunity; v != nil { + bgpCommunity = *v + } + } + if err := d.Set("bgp_community", bgpCommunity); err != nil { + return fmt.Errorf("Error setting `bgp_community`: %+v", err) + } + + d.Set("vm_protection_enabled", props.EnableVMProtection) } return tags.FlattenAndSet(d, resp.Tags) @@ -345,7 +371,8 @@ func expandVirtualNetworkProperties(ctx context.Context, d *schema.ResourceData, DhcpOptions: &network.DhcpOptions{ DNSServers: utils.ExpandStringSlice(d.Get("dns_servers").([]interface{})), }, - Subnets: &subnets, + EnableVMProtection: utils.Bool(d.Get("vm_protection_enabled").(bool)), + Subnets: &subnets, } if v, ok := d.GetOk("ddos_protection_plan"); ok { @@ -369,6 +396,10 @@ func expandVirtualNetworkProperties(ctx context.Context, d *schema.ResourceData, } } + if v, ok := d.GetOk("bgp_community"); ok { + properties.BgpCommunities = &network.VirtualNetworkBgpCommunities{VirtualNetworkCommunity: utils.String(v.(string))} + } + return properties, nil } @@ -456,7 +487,6 @@ func resourceAzureSubnetHash(v interface{}) int { func getExistingSubnet(ctx context.Context, resGroup string, vnetName string, subnetName string, meta interface{}) (*network.Subnet, error) { subnetClient := meta.(*clients.Client).Network.SubnetsClient resp, err := subnetClient.Get(ctx, resGroup, vnetName, subnetName, "") - if err != nil { if resp.StatusCode == http.StatusNotFound { return &network.Subnet{}, nil diff --git a/azurerm/internal/services/network/virtual_wan_data_source.go b/azurerm/internal/services/network/virtual_wan_data_source.go new file mode 100644 index 000000000000..f835679ff660 --- /dev/null +++ b/azurerm/internal/services/network/virtual_wan_data_source.go @@ -0,0 +1,130 @@ +package network + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceArmVirtualWan() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmVirtualWanRead, + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "allow_branch_to_branch_traffic": { + Type: schema.TypeBool, + Computed: true, + }, + "disable_vpn_encryption": { + Type: schema.TypeBool, + Computed: true, + }, + "office365_local_breakout_category": { + Type: schema.TypeString, + Computed: true, + }, + "sku": { + Type: schema.TypeString, + Computed: true, + }, + "virtual_hub_ids": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "vpn_site_ids": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "location": azure.SchemaLocationForDataSource(), + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceArmVirtualWanRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VirtualWanClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Error: Virtual Wan %q (Resource Group %q) was not found", name, resourceGroup) + } + return fmt.Errorf("Error reading Virtual Wan %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("API returns a nil/empty id on Virtual Wan %q (resource group %q): %+v", name, resourceGroup, err) + } + d.SetId(*resp.ID) + + d.Set("name", resp.Name) + d.Set("resource_group_name", resourceGroup) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if props := resp.VirtualWanProperties; props != nil { + d.Set("allow_branch_to_branch_traffic", props.AllowBranchToBranchTraffic) + d.Set("disable_vpn_encryption", props.DisableVpnEncryption) + if err := d.Set("office365_local_breakout_category", props.Office365LocalBreakoutCategory); err != nil { + return fmt.Errorf("error setting `office365_local_breakout_category`: %v", err) + } + d.Set("office365_local_breakout_category", props.Office365LocalBreakoutCategory) + if err := d.Set("sku", props.Type); err != nil { + return fmt.Errorf("error setting `sku`: %v", err) + } + d.Set("sku", props.Type) + if err := d.Set("virtual_hub_ids", flattenVirtualWanProperties(props.VirtualHubs)); err != nil { + return fmt.Errorf("error setting `virtual_hubs`: %v", err) + } + if err := d.Set("vpn_site_ids", flattenVirtualWanProperties(props.VpnSites)); err != nil { + return fmt.Errorf("error setting `vpn_sites`: %v", err) + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func flattenVirtualWanProperties(input *[]network.SubResource) []interface{} { + if input == nil { + return []interface{}{} + } + output := make([]interface{}, 0) + for _, v := range *input { + if v.ID != nil { + output = append(output, *v.ID) + } + } + return output +} diff --git a/azurerm/internal/services/network/vpn_gateway.go b/azurerm/internal/services/network/vpn_gateway.go deleted file mode 100644 index 9b8f42a3fa39..000000000000 --- a/azurerm/internal/services/network/vpn_gateway.go +++ /dev/null @@ -1,33 +0,0 @@ -package network - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type VPNGatewayResourceID struct { - ResourceGroup string - Name string -} - -func ParseVPNGatewayID(input string) (*VPNGatewayResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse VPN Gateway ID %q: %+v", input, err) - } - - gateway := VPNGatewayResourceID{ - ResourceGroup: id.ResourceGroup, - } - - if gateway.Name, err = id.PopSegment("vpnGateways"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &gateway, nil -} diff --git a/azurerm/internal/services/network/vpn_gateway_connection_resource.go b/azurerm/internal/services/network/vpn_gateway_connection_resource.go new file mode 100644 index 000000000000..7f1eb6768daf --- /dev/null +++ b/azurerm/internal/services/network/vpn_gateway_connection_resource.go @@ -0,0 +1,643 @@ +package network + +import ( + "fmt" + "log" + "time" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + + "github.com/hashicorp/go-azure-helpers/response" + + "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmVPNGatewayConnection() *schema.Resource { + return &schema.Resource{ + Create: resourceArmVpnGatewayConnectionResourceCreateUpdate, + Read: resourceArmVpnGatewayConnectionResourceRead, + Update: resourceArmVpnGatewayConnectionResourceCreateUpdate, + Delete: resourceArmVpnGatewayConnectionResourceDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.VpnConnectionID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "vpn_gateway_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.VpnGatewayID, + }, + + "remote_vpn_site_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.VpnSiteID, + }, + + "internet_security_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + // Service will create a route table for the user if this is not specified. + "routing": { + Type: schema.TypeList, + Optional: true, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "associated_route_table": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.HubRouteTableID, + }, + "propagated_route_tables": { + Type: schema.TypeList, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validate.HubRouteTableID, + }, + }, + }, + }, + }, + + "vpn_link": { + Type: schema.TypeList, + Required: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "vpn_site_link_id": { + Type: schema.TypeString, + Required: true, + // The vpn site link associated with one link connection can not be updated + ForceNew: true, + ValidateFunc: validate.VpnSiteLinkID, + }, + + "route_weight": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntAtLeast(0), + Default: 0, + }, + + "protocol": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.IKEv1), + string(network.IKEv2), + }, false), + Default: string(network.IKEv2), + }, + + "bandwidth_mbps": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntAtLeast(1), + Default: 10, + }, + + "shared_key": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "bgp_enabled": { + Type: schema.TypeBool, + ForceNew: true, + Optional: true, + Default: false, + }, + + "ipsec_policy": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "sa_lifetime_sec": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(300, 172799), + }, + "sa_data_size_kb": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1024, 2147483647), + }, + "encryption_algorithm": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.IpsecEncryptionAES128), + string(network.IpsecEncryptionAES192), + string(network.IpsecEncryptionAES256), + string(network.IpsecEncryptionDES), + string(network.IpsecEncryptionDES3), + string(network.IpsecEncryptionGCMAES128), + string(network.IpsecEncryptionGCMAES192), + string(network.IpsecEncryptionGCMAES256), + string(network.IpsecEncryptionNone), + }, false), + }, + "integrity_algorithm": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.IpsecIntegrityMD5), + string(network.IpsecIntegritySHA1), + string(network.IpsecIntegritySHA256), + string(network.IpsecIntegrityGCMAES128), + string(network.IpsecIntegrityGCMAES192), + string(network.IpsecIntegrityGCMAES256), + }, false), + }, + + "ike_encryption_algorithm": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.DES), + string(network.DES3), + string(network.AES128), + string(network.AES192), + string(network.AES256), + string(network.GCMAES128), + string(network.GCMAES256), + }, false), + }, + + "ike_integrity_algorithm": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.IkeIntegrityMD5), + string(network.IkeIntegritySHA1), + string(network.IkeIntegritySHA256), + string(network.IkeIntegritySHA384), + string(network.IkeIntegrityGCMAES128), + string(network.IkeIntegrityGCMAES256), + }, false), + }, + + "dh_group": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.None), + string(network.DHGroup1), + string(network.DHGroup2), + string(network.DHGroup14), + string(network.DHGroup24), + string(network.DHGroup2048), + string(network.ECP256), + string(network.ECP384), + }, false), + }, + + "pfs_group": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(network.PfsGroupNone), + string(network.PfsGroupPFS1), + string(network.PfsGroupPFS2), + string(network.PfsGroupPFS14), + string(network.PfsGroupPFS24), + string(network.PfsGroupPFS2048), + string(network.PfsGroupPFSMM), + string(network.PfsGroupECP256), + string(network.PfsGroupECP384), + }, false), + }, + }, + }, + }, + + "ratelimit_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "local_azure_ip_address_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + + "policy_based_traffic_selector_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + }, + }, + }, + }, + } +} + +func resourceArmVpnGatewayConnectionResourceCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VpnConnectionsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + gatewayId, err := parse.VpnGatewayID(d.Get("vpn_gateway_id").(string)) + if err != nil { + return err + } + + if d.IsNewResource() { + resp, err := client.Get(ctx, gatewayId.ResourceGroup, gatewayId.Name, name) + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("checking for existing Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway %q): %+v", name, gatewayId.ResourceGroup, gatewayId.Name, err) + } + } + + if resp.ID != nil && *resp.ID != "" { + return tf.ImportAsExistsError("azurerm_vpn_gateway_connection", *resp.ID) + } + } + + locks.ByName(gatewayId.Name, VPNGatewayResourceName) + defer locks.UnlockByName(gatewayId.Name, VPNGatewayResourceName) + + param := network.VpnConnection{ + Name: &name, + VpnConnectionProperties: &network.VpnConnectionProperties{ + EnableInternetSecurity: utils.Bool(d.Get("internet_security_enabled").(bool)), + RemoteVpnSite: &network.SubResource{ + ID: utils.String(d.Get("remote_vpn_site_id").(string)), + }, + VpnLinkConnections: expandArmVpnGatewayConnectionVpnSiteLinkConnections(d.Get("vpn_link").([]interface{})), + RoutingConfiguration: expandArmVpnGatewayConnectionRoutingConfiguration(d.Get("routing").([]interface{})), + }, + } + + future, err := client.CreateOrUpdate(ctx, gatewayId.ResourceGroup, gatewayId.Name, name, param) + if err != nil { + return fmt.Errorf("creating Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway %q): %+v", name, gatewayId.ResourceGroup, gatewayId.Name, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation of Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway %q): %+v", name, gatewayId.ResourceGroup, gatewayId.Name, err) + } + + resp, err := client.Get(ctx, gatewayId.ResourceGroup, gatewayId.Name, name) + if err != nil { + return fmt.Errorf("retrieving Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway: %q): %+v", name, gatewayId.ResourceGroup, gatewayId.Name, err) + } + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("empty or nil ID returned for Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway: %q) ID", name, gatewayId.ResourceGroup, gatewayId.Name) + } + + id, err := parse.VpnConnectionID(*resp.ID) + if err != nil { + return err + } + d.SetId(id.ID()) + + return resourceArmVpnGatewayConnectionResourceRead(d, meta) +} + +func resourceArmVpnGatewayConnectionResourceRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VpnConnectionsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VpnConnectionID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.VpnGatewayName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Vpn Gateway Connection Resource %q was not found in VPN Gateway %q in Resource Group %q - removing from state!", id.Name, id.VpnGatewayName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway %q): %+v", id.Name, id.ResourceGroup, id.VpnGatewayName, err) + } + + d.Set("name", id.Name) + + gatewayId := parse.NewVpnGatewayID(id.SubscriptionId, id.ResourceGroup, id.VpnGatewayName) + d.Set("vpn_gateway_id", gatewayId.ID()) + + if prop := resp.VpnConnectionProperties; prop != nil { + vpnSiteId := "" + if site := prop.RemoteVpnSite; site != nil { + if id := site.ID; id != nil { + theVpnSiteId, err := parse.VpnSiteID(*id) + if err != nil { + return err + } + vpnSiteId = theVpnSiteId.ID() + } + } + d.Set("remote_vpn_site_id", vpnSiteId) + + enableInternetSecurity := false + if prop.EnableInternetSecurity != nil { + enableInternetSecurity = *prop.EnableInternetSecurity + } + d.Set("internet_security_enabled", enableInternetSecurity) + + if err := d.Set("routing", flattenArmVpnGatewayConnectionRoutingConfiguration(prop.RoutingConfiguration)); err != nil { + return fmt.Errorf(`setting "routing": %v`, err) + } + + if err := d.Set("vpn_link", flattenArmVpnGatewayConnectionVpnSiteLinkConnections(prop.VpnLinkConnections)); err != nil { + return fmt.Errorf(`setting "vpn_link": %v`, err) + } + } + + return nil +} + +func resourceArmVpnGatewayConnectionResourceDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VpnConnectionsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.VpnConnectionID(d.Id()) + if err != nil { + return err + } + + locks.ByName(id.VpnGatewayName, VPNGatewayResourceName) + defer locks.UnlockByName(id.VpnGatewayName, VPNGatewayResourceName) + + future, err := client.Delete(ctx, id.ResourceGroup, id.VpnGatewayName, id.Name) + if err != nil { + if response.WasNotFound(future.Response()) { + return nil + } + return fmt.Errorf("deleting Vpn Gateway Connection Resource %q (Resource Group %q / VPN Gateway %q): %+v", id.Name, id.ResourceGroup, id.VpnGatewayName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + if !response.WasNotFound(future.Response()) { + return fmt.Errorf("Error waiting for the deletion of VPN Gateway Connection %q (Resource Group %q / VPN Gateway %q): %+v", id.Name, id.ResourceGroup, id.VpnGatewayName, err) + } + } + + return nil +} + +func expandArmVpnGatewayConnectionVpnSiteLinkConnections(input []interface{}) *[]network.VpnSiteLinkConnection { + if len(input) == 0 { + return nil + } + + result := make([]network.VpnSiteLinkConnection, 0) + + for _, e := range input { + e := e.(map[string]interface{}) + v := network.VpnSiteLinkConnection{ + Name: utils.String(e["name"].(string)), + VpnSiteLinkConnectionProperties: &network.VpnSiteLinkConnectionProperties{ + VpnSiteLink: &network.SubResource{ID: utils.String(e["vpn_site_link_id"].(string))}, + RoutingWeight: utils.Int32(int32(e["route_weight"].(int))), + VpnConnectionProtocolType: network.VirtualNetworkGatewayConnectionProtocol(e["protocol"].(string)), + ConnectionBandwidth: utils.Int32(int32(e["bandwidth_mbps"].(int))), + EnableBgp: utils.Bool(e["bgp_enabled"].(bool)), + IpsecPolicies: expandArmVpnGatewayConnectionIpSecPolicies(e["ipsec_policy"].([]interface{})), + EnableRateLimiting: utils.Bool(e["ratelimit_enabled"].(bool)), + UseLocalAzureIPAddress: utils.Bool(e["local_azure_ip_address_enabled"].(bool)), + UsePolicyBasedTrafficSelectors: utils.Bool(e["policy_based_traffic_selector_enabled"].(bool)), + }, + } + + if sharedKey := e["shared_key"]; sharedKey != "" { + sharedKey := sharedKey.(string) + v.VpnSiteLinkConnectionProperties.SharedKey = &sharedKey + } + result = append(result, v) + } + + return &result +} + +func flattenArmVpnGatewayConnectionVpnSiteLinkConnections(input *[]network.VpnSiteLinkConnection) interface{} { + if input == nil { + return []interface{}{} + } + + output := make([]interface{}, 0) + + for _, e := range *input { + name := "" + if e.Name != nil { + name = *e.Name + } + + vpnSiteLinkId := "" + if e.VpnSiteLink != nil && e.VpnSiteLink.ID != nil { + vpnSiteLinkId = *e.VpnSiteLink.ID + } + + routeWeight := 0 + if e.RoutingWeight != nil { + routeWeight = int(*e.RoutingWeight) + } + + bandwidth := 0 + if e.ConnectionBandwidth != nil { + bandwidth = int(*e.ConnectionBandwidth) + } + + sharedKey := "" + if e.SharedKey != nil { + sharedKey = *e.SharedKey + } + + bgpEnabled := false + if e.EnableBgp != nil { + bgpEnabled = *e.EnableBgp + } + + usePolicyBased := false + if e.UsePolicyBasedTrafficSelectors != nil { + usePolicyBased = *e.UsePolicyBasedTrafficSelectors + } + + rateLimitEnabled := false + if e.EnableRateLimiting != nil { + rateLimitEnabled = *e.EnableRateLimiting + } + + useLocalAzureIpAddress := false + if e.UseLocalAzureIPAddress != nil { + useLocalAzureIpAddress = *e.UseLocalAzureIPAddress + } + + v := map[string]interface{}{ + "name": name, + "vpn_site_link_id": vpnSiteLinkId, + "route_weight": routeWeight, + "protocol": string(e.VpnConnectionProtocolType), + "bandwidth_mbps": bandwidth, + "shared_key": sharedKey, + "bgp_enabled": bgpEnabled, + "ipsec_policy": flattenArmVpnGatewayConnectionIpSecPolicies(e.IpsecPolicies), + "ratelimit_enabled": rateLimitEnabled, + "local_azure_ip_address_enabled": useLocalAzureIpAddress, + "policy_based_traffic_selector_enabled": usePolicyBased, + } + + output = append(output, v) + } + + return output +} + +func expandArmVpnGatewayConnectionIpSecPolicies(input []interface{}) *[]network.IpsecPolicy { + if len(input) == 0 { + return nil + } + + result := make([]network.IpsecPolicy, 0) + + for _, e := range input { + e := e.(map[string]interface{}) + v := network.IpsecPolicy{ + SaLifeTimeSeconds: utils.Int32(int32(e["sa_lifetime_sec"].(int))), + SaDataSizeKilobytes: utils.Int32(int32(e["sa_data_size_kb"].(int))), + IpsecEncryption: network.IpsecEncryption(e["encryption_algorithm"].(string)), + IpsecIntegrity: network.IpsecIntegrity(e["integrity_algorithm"].(string)), + IkeEncryption: network.IkeEncryption(e["ike_encryption_algorithm"].(string)), + IkeIntegrity: network.IkeIntegrity(e["ike_integrity_algorithm"].(string)), + DhGroup: network.DhGroup(e["dh_group"].(string)), + PfsGroup: network.PfsGroup(e["pfs_group"].(string)), + } + result = append(result, v) + } + + return &result +} + +func flattenArmVpnGatewayConnectionIpSecPolicies(input *[]network.IpsecPolicy) []interface{} { + if input == nil { + return []interface{}{} + } + + output := make([]interface{}, 0) + + for _, e := range *input { + saLifetimeSec := 0 + if e.SaLifeTimeSeconds != nil { + saLifetimeSec = int(*e.SaLifeTimeSeconds) + } + + saDataSizeKb := 0 + if e.SaDataSizeKilobytes != nil { + saDataSizeKb = int(*e.SaDataSizeKilobytes) + } + + v := map[string]interface{}{ + "sa_lifetime_sec": saLifetimeSec, + "sa_data_size_kb": saDataSizeKb, + "encryption_algorithm": string(e.IpsecEncryption), + "integrity_algorithm": string(e.IpsecIntegrity), + "ike_encryption_algorithm": string(e.IkeEncryption), + "ike_integrity_algorithm": string(e.IkeIntegrity), + "dh_group": string(e.DhGroup), + "pfs_group": string(e.PfsGroup), + } + + output = append(output, v) + } + + return output +} + +func expandArmVpnGatewayConnectionRoutingConfiguration(input []interface{}) *network.RoutingConfiguration { + if len(input) == 0 || input[0] == nil { + return nil + } + raw := input[0].(map[string]interface{}) + output := &network.RoutingConfiguration{ + AssociatedRouteTable: &network.SubResource{ID: utils.String(raw["associated_route_table"].(string))}, + PropagatedRouteTables: &network.PropagatedRouteTable{Ids: expandNetworkSubResourceID(raw["propagated_route_tables"].([]interface{}))}, + } + + return output +} + +func flattenArmVpnGatewayConnectionRoutingConfiguration(input *network.RoutingConfiguration) []interface{} { + if input == nil { + return []interface{}{} + } + + associateRouteTable := "" + if input.AssociatedRouteTable != nil && input.AssociatedRouteTable.ID != nil { + associateRouteTable = *input.AssociatedRouteTable.ID + } + + propagatedRouteTables := []interface{}{} + if input.PropagatedRouteTables != nil && input.PropagatedRouteTables.Ids != nil { + for _, subresource := range *input.PropagatedRouteTables.Ids { + id := "" + if subresource.ID != nil { + id = *subresource.ID + } + propagatedRouteTables = append(propagatedRouteTables, id) + } + } + + return []interface{}{ + map[string]interface{}{ + "associated_route_table": associateRouteTable, + "propagated_route_tables": propagatedRouteTables, + }, + } +} diff --git a/azurerm/internal/services/network/vpn_gateway_resource.go b/azurerm/internal/services/network/vpn_gateway_resource.go index 4ef33115ebfa..cd374f6cdceb 100644 --- a/azurerm/internal/services/network/vpn_gateway_resource.go +++ b/azurerm/internal/services/network/vpn_gateway_resource.go @@ -6,8 +6,6 @@ import ( "log" "time" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" - "github.com/Azure/azure-sdk-for-go/services/network/mgmt/2020-05-01/network" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" @@ -15,17 +13,23 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + commonValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) +var VPNGatewayResourceName = "azurerm_vpn_gateway" + func resourceArmVPNGateway() *schema.Resource { return &schema.Resource{ - Create: resourceArmVPNGatewayCreateUpdate, + Create: resourceArmVPNGatewayCreate, Read: resourceArmVPNGatewayRead, - Update: resourceArmVPNGatewayCreateUpdate, + Update: resourceArmVPNGatewayUpdate, Delete: resourceArmVPNGatewayDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, @@ -42,6 +46,7 @@ func resourceArmVPNGateway() *schema.Resource { "name": { Type: schema.TypeString, Required: true, + ForceNew: true, ValidateFunc: validation.StringIsNotEmpty, }, @@ -53,13 +58,14 @@ func resourceArmVPNGateway() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.ValidateVirtualHubID, + ValidateFunc: validate.VirtualHubID, }, "bgp_settings": { Type: schema.TypeList, Optional: true, Computed: true, + MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "asn": { @@ -78,6 +84,86 @@ func resourceArmVPNGateway() *schema.Resource { Type: schema.TypeString, Computed: true, }, + + "instance_0_bgp_peering_address": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "custom_ips": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: commonValidate.IPv4Address, + }, + }, + + "ip_configuration_id": { + Type: schema.TypeString, + Computed: true, + }, + + "default_ips": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "tunnel_ips": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + + "instance_1_bgp_peering_address": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "custom_ips": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: commonValidate.IPv4Address, + }, + }, + + "ip_configuration_id": { + Type: schema.TypeString, + Computed: true, + }, + + "default_ips": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "tunnel_ips": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, }, }, }, @@ -94,25 +180,23 @@ func resourceArmVPNGateway() *schema.Resource { } } -func resourceArmVPNGatewayCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceArmVPNGatewayCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Network.VpnGatewaysClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) - } + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) } + } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_vpn_gateway", *existing.ID) - } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_vpn_gateway", *existing.ID) } bgpSettingsRaw := d.Get("bgp_settings").([]interface{}) @@ -138,33 +222,92 @@ func resourceArmVPNGatewayCreateUpdate(d *schema.ResourceData, meta interface{}) if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters); err != nil { return fmt.Errorf("Error creating VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) } - - log.Printf("[DEBUG] Waiting for Virtual Hub %q (Resource Group %q) to become available", name, resourceGroup) - stateConf := &resource.StateChangeConf{ - Pending: []string{"pending"}, - Target: []string{"available"}, - Refresh: vpnGatewayWaitForCreatedRefreshFunc(ctx, client, resourceGroup, name), - Delay: 30 * time.Second, - PollInterval: 10 * time.Second, - ContinuousTargetOccurence: 3, + if err := waitForCompletion(d, ctx, client, resourceGroup, name); err != nil { + return err } - if d.IsNewResource() { - stateConf.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error retrieving VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) } - if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for creation of Virtual Hub %q (Resource Group %q): %+v", name, resourceGroup, err) + // `vpnGatewayParameters.Properties.bgpSettings.bgpPeeringAddress` customer cannot provide this field during create. This will be set with default value once gateway is created. + // it could only be updated + if len(bgpSettingsRaw) > 0 && resp.VpnGatewayProperties != nil && resp.VpnGatewayProperties.BgpSettings != nil && resp.VpnGatewayProperties.BgpSettings.BgpPeeringAddresses != nil { + val := bgpSettingsRaw[0].(map[string]interface{}) + input0 := val["instance_0_bgp_peering_address"].([]interface{}) + input1 := val["instance_1_bgp_peering_address"].([]interface{}) + + if len(input0) > 0 || len(input1) > 0 { + if len(input0) > 0 { + val := input0[0].(map[string]interface{}) + (*resp.VpnGatewayProperties.BgpSettings.BgpPeeringAddresses)[0].CustomBgpIPAddresses = utils.ExpandStringSlice(val["custom_ips"].(*schema.Set).List()) + } + if len(input1) > 0 { + val := input1[0].(map[string]interface{}) + (*resp.VpnGatewayProperties.BgpSettings.BgpPeeringAddresses)[1].CustomBgpIPAddresses = utils.ExpandStringSlice(val["custom_ips"].(*schema.Set).List()) + } + if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, resp); err != nil { + return fmt.Errorf("creating VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if err := waitForCompletion(d, ctx, client, resourceGroup, name); err != nil { + return err + } + } } - resp, err := client.Get(ctx, resourceGroup, name) + d.SetId(*resp.ID) + + return resourceArmVPNGatewayRead(d, meta) +} + +func resourceArmVPNGatewayUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Network.VpnGatewaysClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + locks.ByName(name, VPNGatewayResourceName) + defer locks.UnlockByName(name, VPNGatewayResourceName) + + existing, err := client.Get(ctx, resourceGroup, name) if err != nil { - return fmt.Errorf("Error retrieving VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("retrieving for presence of existing VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) } - d.SetId(*resp.ID) + if d.HasChange("scale_unit") { + existing.VpnGatewayScaleUnit = utils.Int32(int32(d.Get("scale_unit").(int))) + } + if d.HasChange("tags") { + existing.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + bgpSettingsRaw := d.Get("bgp_settings").([]interface{}) + if len(bgpSettingsRaw) > 0 { + val := bgpSettingsRaw[0].(map[string]interface{}) + + if d.HasChange("bgp_settings.0.instance_0_bgp_peering_address") { + if input := val["instance_0_bgp_peering_address"].([]interface{}); len(input) > 0 { + val := input[0].(map[string]interface{}) + (*existing.VpnGatewayProperties.BgpSettings.BgpPeeringAddresses)[0].CustomBgpIPAddresses = utils.ExpandStringSlice(val["custom_ips"].(*schema.Set).List()) + } + } + if d.HasChange("bgp_settings.0.instance_1_bgp_peering_address") { + if input := val["instance_1_bgp_peering_address"].([]interface{}); len(input) > 0 { + val := input[0].(map[string]interface{}) + (*existing.VpnGatewayProperties.BgpSettings.BgpPeeringAddresses)[1].CustomBgpIPAddresses = utils.ExpandStringSlice(val["custom_ips"].(*schema.Set).List()) + } + } + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, existing); err != nil { + return fmt.Errorf("creating VPN Gateway %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if err := waitForCompletion(d, ctx, client, resourceGroup, name); err != nil { + return err + } return resourceArmVPNGatewayRead(d, meta) } @@ -174,7 +317,7 @@ func resourceArmVPNGatewayRead(d *schema.ResourceData, meta interface{}) error { ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := ParseVPNGatewayID(d.Id()) + id, err := parse.VpnGatewayID(d.Id()) if err != nil { return err } @@ -222,7 +365,7 @@ func resourceArmVPNGatewayDelete(d *schema.ResourceData, meta interface{}) error ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := ParseVPNGatewayID(d.Id()) + id, err := parse.VpnGatewayID(d.Id()) if err != nil { return err } @@ -248,19 +391,39 @@ func resourceArmVPNGatewayDelete(d *schema.ResourceData, meta interface{}) error return nil } +func waitForCompletion(d *schema.ResourceData, ctx context.Context, client *network.VpnGatewaysClient, resourceGroup, name string) error { + log.Printf("[DEBUG] Waiting for Virtual Hub %q (Resource Group %q) to become available", name, resourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"pending"}, + Target: []string{"available"}, + Refresh: vpnGatewayWaitForCreatedRefreshFunc(ctx, client, resourceGroup, name), + Delay: 30 * time.Second, + PollInterval: 10 * time.Second, + ContinuousTargetOccurence: 3, + } + + if d.IsNewResource() { + stateConf.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for creation of Virtual Hub %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + return nil +} + func expandVPNGatewayBGPSettings(input []interface{}) *network.BgpSettings { if len(input) == 0 { return nil } val := input[0].(map[string]interface{}) - - asn := val["asn"].(int) - peerWeight := val["peer_weight"].(int) - return &network.BgpSettings{ - Asn: utils.Int64(int64(asn)), - PeerWeight: utils.Int32(int32(peerWeight)), + Asn: utils.Int64(int64(val["asn"].(int))), + PeerWeight: utils.Int32(int32(val["peer_weight"].(int))), } } @@ -284,11 +447,37 @@ func flattenVPNGatewayBGPSettings(input *network.BgpSettings) []interface{} { peerWeight = int(*input.PeerWeight) } + var instance0BgpPeeringAddress, instance1BgpPeeringAddress []interface{} + if input.BgpPeeringAddresses != nil && len(*input.BgpPeeringAddresses) > 0 { + instance0BgpPeeringAddress = flattenVPNGatewayIPConfigurationBgpPeeringAddress((*input.BgpPeeringAddresses)[0]) + } + if input.BgpPeeringAddresses != nil && len(*input.BgpPeeringAddresses) > 1 { + instance1BgpPeeringAddress = flattenVPNGatewayIPConfigurationBgpPeeringAddress((*input.BgpPeeringAddresses)[1]) + } + + return []interface{}{ + map[string]interface{}{ + "asn": asn, + "bgp_peering_address": bgpPeeringAddress, + "instance_0_bgp_peering_address": instance0BgpPeeringAddress, + "instance_1_bgp_peering_address": instance1BgpPeeringAddress, + "peer_weight": peerWeight, + }, + } +} + +func flattenVPNGatewayIPConfigurationBgpPeeringAddress(input network.IPConfigurationBgpPeeringAddress) []interface{} { + ipConfigurationID := "" + if input.IpconfigurationID != nil { + ipConfigurationID = *input.IpconfigurationID + } + return []interface{}{ map[string]interface{}{ - "asn": asn, - "bgp_peering_address": bgpPeeringAddress, - "peer_weight": peerWeight, + "ip_configuration_id": ipConfigurationID, + "custom_ips": utils.FlattenStringSlice(input.CustomBgpIPAddresses), + "default_ips": utils.FlattenStringSlice(input.DefaultBgpIPAddresses), + "tunnel_ips": utils.FlattenStringSlice(input.TunnelIPAddresses), }, } } diff --git a/azurerm/internal/services/network/vpn_gateway_test.go b/azurerm/internal/services/network/vpn_gateway_test.go deleted file mode 100644 index 870712a546ef..000000000000 --- a/azurerm/internal/services/network/vpn_gateway_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package network - -import ( - "testing" -) - -func TestParseVPNGateway(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *VPNGatewayResourceID - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No VPN Gateways Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Expected: nil, - }, - { - Name: "No VPN Gateways Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/vpnGateways/", - Expected: nil, - }, - { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/vpnGateways/example", - Expected: &VPNGatewayResourceID{ - Name: "example", - ResourceGroup: "foo", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ParseVPNGatewayID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/network/vpn_server_configuration_resource.go b/azurerm/internal/services/network/vpn_server_configuration_resource.go index bb404d7c9b36..5ac2105faf7d 100644 --- a/azurerm/internal/services/network/vpn_server_configuration_resource.go +++ b/azurerm/internal/services/network/vpn_server_configuration_resource.go @@ -230,21 +230,99 @@ func resourceArmVPNServerConfiguration() *schema.Resource { }, }, - "radius_server": { + "radius": { Type: schema.TypeList, Optional: true, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "address": { - Type: schema.TypeString, + "server": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "address": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "secret": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + Sensitive: true, + }, + + "score": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 30), + }, + }, + }, + }, + + "client_root_certificate": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "thumbprint": { + Type: schema.TypeString, + Required: true, + }, + }, + }, + }, + + "server_root_certificate": { + Type: schema.TypeSet, Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "public_cert_data": { + Type: schema.TypeString, + Required: true, + }, + }, + }, + }, + }, + }, + ConflictsWith: []string{ + "radius_server", + }, + }, + + "radius_server": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Deprecated: "Deprecated in favour of `radius`", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "address": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, }, "secret": { - Type: schema.TypeString, - Required: true, - Sensitive: true, + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + Sensitive: true, }, "client_root_certificate": { @@ -284,6 +362,9 @@ func resourceArmVPNServerConfiguration() *schema.Resource { }, }, }, + ConflictsWith: []string{ + "radius", + }, }, "vpn_protocols": { @@ -337,8 +418,11 @@ func resourceArmVPNServerConfigurationCreateUpdate(d *schema.ResourceData, meta ipSecPoliciesRaw := d.Get("ipsec_policy").([]interface{}) ipSecPolicies := expandVpnServerConfigurationIPSecPolicies(ipSecPoliciesRaw) - radiusServerRaw := d.Get("radius_server").([]interface{}) - radiusServer := expandVpnServerConfigurationRadiusServer(radiusServerRaw) + radiusRaw := d.Get("radius").([]interface{}) + if len(radiusRaw) == 0 { + radiusRaw = d.Get("radius_server").([]interface{}) + } + radius := expandVpnServerConfigurationRadius(radiusRaw) vpnProtocolsRaw := d.Get("vpn_protocols").(*schema.Set).List() vpnProtocols := expandVpnServerConfigurationVPNProtocols(vpnProtocolsRaw) @@ -388,14 +472,19 @@ func resourceArmVPNServerConfigurationCreateUpdate(d *schema.ResourceData, meta } if supportsRadius { - if radiusServer == nil { - return fmt.Errorf("`radius_server` must be specified when `vpn_authentication_type` is set to `Radius`") + if radius == nil { + return fmt.Errorf("`radius` must be specified when `vpn_authentication_type` is set to `Radius`") + } + + if radius.servers != nil && len(*radius.servers) != 0 { + props.RadiusServers = radius.servers } - props.RadiusServerAddress = utils.String(radiusServer.address) - props.RadiusServerSecret = utils.String(radiusServer.secret) - props.RadiusClientRootCertificates = radiusServer.clientRootCertificates - props.RadiusServerRootCertificates = radiusServer.serverRootCertificates + props.RadiusServerAddress = utils.String(radius.address) + props.RadiusServerSecret = utils.String(radius.secret) + + props.RadiusClientRootCertificates = radius.clientRootCertificates + props.RadiusServerRootCertificates = radius.serverRootCertificates } location := azure.NormalizeLocation(d.Get("location").(string)) @@ -474,9 +563,17 @@ func resourceArmVPNServerConfigurationRead(d *schema.ResourceData, meta interfac return fmt.Errorf("Error setting `ipsec_policy`: %+v", err) } - flattenedRadiusServer := flattenVpnServerConfigurationRadiusServer(props) - if err := d.Set("radius_server", flattenedRadiusServer); err != nil { - return fmt.Errorf("Error setting `radius_server`: %+v", err) + flattenedRadius := flattenVpnServerConfigurationRadius(props) + if len(flattenedRadius) > 0 { + if flattenedRadius[0].(map[string]interface{})["server"] != nil { + if err := d.Set("radius", flattenedRadius); err != nil { + return fmt.Errorf("Error setting `radius`: %+v", err) + } + } else { + if err := d.Set("radius_server", flattenedRadius); err != nil { + return fmt.Errorf("Error setting `radius_server`: %+v", err) + } + } } vpnAuthenticationTypes := make([]interface{}, 0) @@ -693,14 +790,15 @@ func flattenVpnServerConfigurationIPSecPolicies(input *[]network.IpsecPolicy) [] return output } -type vpnServerConfigurationRadiusServer struct { +type vpnServerConfigurationRadius struct { address string secret string + servers *[]network.RadiusServer clientRootCertificates *[]network.VpnServerConfigRadiusClientRootCertificate serverRootCertificates *[]network.VpnServerConfigRadiusServerRootCertificate } -func expandVpnServerConfigurationRadiusServer(input []interface{}) *vpnServerConfigurationRadiusServer { +func expandVpnServerConfigurationRadius(input []interface{}) *vpnServerConfigurationRadius { if len(input) == 0 { return nil } @@ -727,16 +825,36 @@ func expandVpnServerConfigurationRadiusServer(input []interface{}) *vpnServerCon }) } - return &vpnServerConfigurationRadiusServer{ - address: val["address"].(string), - secret: val["secret"].(string), + radiusServers := make([]network.RadiusServer, 0) + address := "" + secret := "" + + if val["server"] != nil { + radiusServersRaw := val["server"].([]interface{}) + for _, raw := range radiusServersRaw { + v := raw.(map[string]interface{}) + radiusServers = append(radiusServers, network.RadiusServer{ + RadiusServerAddress: utils.String(v["address"].(string)), + RadiusServerSecret: utils.String(v["secret"].(string)), + RadiusServerScore: utils.Int64(int64(v["score"].(int))), + }) + } + } else { + address = val["address"].(string) + secret = val["secret"].(string) + } + + return &vpnServerConfigurationRadius{ + address: address, + secret: secret, + servers: &radiusServers, clientRootCertificates: &clientRootCertificates, serverRootCertificates: &serverRootCertificates, } } -func flattenVpnServerConfigurationRadiusServer(input *network.VpnServerConfigurationProperties) []interface{} { - if input == nil || input.RadiusServerAddress == nil || input.RadiusServerRootCertificates == nil || len(*input.RadiusServerRootCertificates) == 0 { +func flattenVpnServerConfigurationRadius(input *network.VpnServerConfigurationProperties) []interface{} { + if input == nil || (input.RadiusServerAddress == nil && input.RadiusServers == nil) || input.RadiusServerRootCertificates == nil || len(*input.RadiusServerRootCertificates) == 0 { return []interface{}{} } @@ -760,16 +878,6 @@ func flattenVpnServerConfigurationRadiusServer(input *network.VpnServerConfigura } } - radiusAddress := "" - if input.RadiusServerAddress != nil { - radiusAddress = *input.RadiusServerAddress - } - - radiusSecret := "" - if input.RadiusServerSecret != nil { - radiusSecret = *input.RadiusServerSecret - } - serverRootCertificates := make([]interface{}, 0) if input.RadiusServerRootCertificates != nil { for _, v := range *input.RadiusServerRootCertificates { @@ -790,13 +898,50 @@ func flattenVpnServerConfigurationRadiusServer(input *network.VpnServerConfigura } } + schema := map[string]interface{}{ + "client_root_certificate": clientRootCertificates, + "server_root_certificate": serverRootCertificates, + } + + if input.RadiusServerAddress != nil && *input.RadiusServerAddress != "" { + schema["address"] = *input.RadiusServerAddress + } + + if input.RadiusServerSecret != nil && *input.RadiusServerSecret != "" { + schema["secret"] = *input.RadiusServerSecret + } + + if input.RadiusServers != nil && len(*input.RadiusServers) > 0 { + servers := make([]interface{}, 0) + + for _, v := range *input.RadiusServers { + address := "" + if v.RadiusServerAddress != nil { + address = *v.RadiusServerAddress + } + + secret := "" + if v.RadiusServerSecret != nil { + secret = *v.RadiusServerSecret + } + + score := 0 + if v.RadiusServerScore != nil { + score = int(*v.RadiusServerScore) + } + + servers = append(servers, map[string]interface{}{ + "address": address, + "secret": secret, + "score": score, + }) + } + + schema["server"] = servers + } + return []interface{}{ - map[string]interface{}{ - "address": radiusAddress, - "client_root_certificate": clientRootCertificates, - "secret": radiusSecret, - "server_root_certificate": serverRootCertificates, - }, + schema, } } diff --git a/azurerm/internal/services/network/vpn_site_resource.go b/azurerm/internal/services/network/vpn_site_resource.go index 44a697840ea5..057d15cee0c9 100644 --- a/azurerm/internal/services/network/vpn_site_resource.go +++ b/azurerm/internal/services/network/vpn_site_resource.go @@ -123,7 +123,7 @@ func resourceArmVpnSite() *schema.Resource { "asn": { Type: schema.TypeInt, Required: true, - ValidateFunc: validation.IntBetween(1, 4294967295), + ValidateFunc: validation.IntAtLeast(1), }, "peering_address": { Type: schema.TypeString, @@ -148,7 +148,6 @@ func resourceArmVpnSite() *schema.Resource { func resourceArmVpnSiteCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Network.VpnSitesClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -202,7 +201,7 @@ func resourceArmVpnSiteCreateUpdate(d *schema.ResourceData, meta interface{}) er if err != nil { return err } - d.SetId(id.ID(subscriptionId)) + d.SetId(id.ID()) return resourceArmVpnSiteRead(d, meta) } diff --git a/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource.go b/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource.go index 77f0218a6f2b..cd1a632d150e 100644 --- a/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource.go +++ b/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource.go @@ -11,19 +11,22 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmNotificationHubAuthorizationRule() *schema.Resource { +func resourceNotificationHubAuthorizationRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmNotificationHubAuthorizationRuleCreateUpdate, - Read: resourceArmNotificationHubAuthorizationRuleRead, - Update: resourceArmNotificationHubAuthorizationRuleCreateUpdate, - Delete: resourceArmNotificationHubAuthorizationRuleDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceNotificationHubAuthorizationRuleCreateUpdate, + Read: resourceNotificationHubAuthorizationRuleRead, + Update: resourceNotificationHubAuthorizationRuleCreateUpdate, + Delete: resourceNotificationHubAuthorizationRuleDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.NotificationHubAuthorizationRuleID(id) + return err + }), // TODO: customizeDiff for send+listen when manage selected Timeouts: &schema.ResourceTimeout{ @@ -85,7 +88,7 @@ func resourceArmNotificationHubAuthorizationRule() *schema.Resource { } } -func resourceArmNotificationHubAuthorizationRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubAuthorizationRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.HubsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -138,43 +141,39 @@ func resourceArmNotificationHubAuthorizationRuleCreateUpdate(d *schema.ResourceD d.SetId(*read.ID) - return resourceArmNotificationHubAuthorizationRuleRead(d, meta) + return resourceNotificationHubAuthorizationRuleRead(d, meta) } -func resourceArmNotificationHubAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.HubsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.NotificationHubAuthorizationRuleID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - namespaceName := id.Path["namespaces"] - notificationHubName := id.Path["notificationHubs"] - name := id.Path["AuthorizationRules"] - resp, err := client.GetAuthorizationRule(ctx, resourceGroup, namespaceName, notificationHubName, name) + resp, err := client.GetAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.NotificationHubName, id.AuthorizationRuleName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Authorization Rule %q was not found in Notification Hub %q / Namespace %q / Resource Group %q", name, notificationHubName, namespaceName, resourceGroup) + log.Printf("[DEBUG] Authorization Rule %q was not found in Notification Hub %q / Namespace %q / Resource Group %q", id.AuthorizationRuleName, id.NotificationHubName, id.NamespaceName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Authorization Rule %q (Notification Hub %q / Namespace %q / Resource Group %q): %+v", name, notificationHubName, namespaceName, resourceGroup, err) + return fmt.Errorf("Error making Read request on Authorization Rule %q (Notification Hub %q / Namespace %q / Resource Group %q): %+v", id.AuthorizationRuleName, id.NotificationHubName, id.NamespaceName, id.ResourceGroup, err) } - keysResp, err := client.ListKeys(ctx, resourceGroup, namespaceName, notificationHubName, name) + keysResp, err := client.ListKeys(ctx, id.ResourceGroup, id.NamespaceName, id.NotificationHubName, id.AuthorizationRuleName) if err != nil { - return fmt.Errorf("Error Listing Access Keys for Authorization Rule %q (Notification Hub %q / Namespace %q / Resource Group %q): %+v", name, notificationHubName, namespaceName, resourceGroup, err) + return fmt.Errorf("Error Listing Access Keys for Authorization Rule %q (Notification Hub %q / Namespace %q / Resource Group %q): %+v", id.AuthorizationRuleName, id.NotificationHubName, id.NamespaceName, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("notification_hub_name", notificationHubName) - d.Set("namespace_name", namespaceName) - d.Set("resource_group_name", resourceGroup) + d.Set("name", id.AuthorizationRuleName) + d.Set("notification_hub_name", id.NotificationHubName) + d.Set("namespace_name", id.NamespaceName) + d.Set("resource_group_name", id.ResourceGroup) if props := resp.SharedAccessAuthorizationRuleProperties; props != nil { manage, send, listen := flattenNotificationHubAuthorizationRuleRights(props.Rights) @@ -189,30 +188,26 @@ func resourceArmNotificationHubAuthorizationRuleRead(d *schema.ResourceData, met return nil } -func resourceArmNotificationHubAuthorizationRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubAuthorizationRuleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.HubsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.NotificationHubAuthorizationRuleID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - namespaceName := id.Path["namespaces"] - notificationHubName := id.Path["notificationHubs"] - name := id.Path["AuthorizationRules"] - locks.ByName(notificationHubName, notificationHubResourceName) - defer locks.UnlockByName(notificationHubName, notificationHubResourceName) + locks.ByName(id.NotificationHubName, notificationHubResourceName) + defer locks.UnlockByName(id.NotificationHubName, notificationHubResourceName) - locks.ByName(namespaceName, notificationHubNamespaceResourceName) - defer locks.UnlockByName(namespaceName, notificationHubNamespaceResourceName) + locks.ByName(id.NamespaceName, notificationHubNamespaceResourceName) + defer locks.UnlockByName(id.NamespaceName, notificationHubNamespaceResourceName) - resp, err := client.DeleteAuthorizationRule(ctx, resourceGroup, namespaceName, notificationHubName, name) + resp, err := client.DeleteAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.NotificationHubName, id.AuthorizationRuleName) if err != nil { if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error deleting Authorization Rule %q (Notification Hub %q / Namespace %q / Resource Group %q): %+v", name, notificationHubName, namespaceName, resourceGroup, err) + return fmt.Errorf("Error deleting Authorization Rule %q (Notification Hub %q / Namespace %q / Resource Group %q): %+v", id.AuthorizationRuleName, id.NotificationHubName, id.NamespaceName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource_test.go b/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource_test.go new file mode 100644 index 000000000000..d11a12fc7890 --- /dev/null +++ b/azurerm/internal/services/notificationhub/notification_hub_authorization_rule_resource_test.go @@ -0,0 +1,298 @@ +package notificationhub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NotificationHubAuthorizationRuleResource struct { +} + +func TestAccNotificationHubAuthorizationRule_listen(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") + r := NotificationHubAuthorizationRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.listen(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("false"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHubAuthorizationRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") + r := NotificationHubAuthorizationRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.listen(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("false"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccNotificationHubAuthorizationRule_manage(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") + r := NotificationHubAuthorizationRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.manage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("true"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHubAuthorizationRule_send(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") + r := NotificationHubAuthorizationRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.send(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHubAuthorizationRule_multi(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test1") + r := NotificationHubAuthorizationRuleResource{} + resourceTwoName := "azurerm_notification_hub_authorization_rule.test2" + resourceThreeName := "azurerm_notification_hub_authorization_rule.test3" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multi(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + check.That("azurerm_notification_hub_authorization_rule.test2").ExistsInAzure(r), + check.That(resourceTwoName).Key("manage").HasValue("false"), + check.That(resourceTwoName).Key("send").HasValue("true"), + check.That(resourceTwoName).Key("listen").HasValue("true"), + check.That(resourceTwoName).Key("primary_access_key").Exists(), + check.That(resourceTwoName).Key("secondary_access_key").Exists(), + check.That("azurerm_notification_hub_authorization_rule.test3").ExistsInAzure(r), + check.That(resourceThreeName).Key("manage").HasValue("false"), + check.That(resourceThreeName).Key("send").HasValue("true"), + check.That(resourceThreeName).Key("listen").HasValue("true"), + check.That(resourceThreeName).Key("primary_access_key").Exists(), + check.That(resourceThreeName).Key("secondary_access_key").Exists(), + ), + }, + data.ImportStep(), + data.ImportStepFor(resourceTwoName), + data.ImportStepFor(resourceThreeName), + }) +} + +func TestAccNotificationHubAuthorizationRule_updated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") + r := NotificationHubAuthorizationRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.listen(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("false"), + check.That(data.ResourceName).Key("send").HasValue("false"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + { + Config: r.manage(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("manage").HasValue("true"), + check.That(data.ResourceName).Key("send").HasValue("true"), + check.That(data.ResourceName).Key("listen").HasValue("true"), + check.That(data.ResourceName).Key("primary_access_key").Exists(), + check.That(data.ResourceName).Key("secondary_access_key").Exists(), + ), + }, + }) +} + +func (NotificationHubAuthorizationRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NotificationHubAuthorizationRuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NotificationHubs.HubsClient.GetAuthorizationRule(ctx, id.ResourceGroup, id.NamespaceName, id.NotificationHubName, id.AuthorizationRuleName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.SharedAccessAuthorizationRuleProperties != nil), nil +} + +func (NotificationHubAuthorizationRuleResource) listen(data acceptance.TestData) string { + template := NotificationHubAuthorizationRuleResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub_authorization_rule" "test" { + name = "acctestrule-%d" + notification_hub_name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + listen = true +} +`, template, data.RandomInteger) +} + +func (NotificationHubAuthorizationRuleResource) requiresImport(data acceptance.TestData) string { + template := NotificationHubAuthorizationRuleResource{}.listen(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub_authorization_rule" "import" { + name = azurerm_notification_hub_authorization_rule.test.name + notification_hub_name = azurerm_notification_hub_authorization_rule.test.notification_hub_name + namespace_name = azurerm_notification_hub_authorization_rule.test.namespace_name + resource_group_name = azurerm_notification_hub_authorization_rule.test.resource_group_name + listen = azurerm_notification_hub_authorization_rule.test.listen +} +`, template) +} + +func (NotificationHubAuthorizationRuleResource) send(data acceptance.TestData) string { + template := NotificationHubAuthorizationRuleResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub_authorization_rule" "test" { + name = "acctestrule-%d" + notification_hub_name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} +`, template, data.RandomInteger) +} + +func (NotificationHubAuthorizationRuleResource) multi(data acceptance.TestData) string { + template := NotificationHubAuthorizationRuleResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub_authorization_rule" "test1" { + name = "acctestruleone-%d" + notification_hub_name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} + +resource "azurerm_notification_hub_authorization_rule" "test2" { + name = "acctestruletwo-%d" + notification_hub_name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} + +resource "azurerm_notification_hub_authorization_rule" "test3" { + name = "acctestrulethree-%d" + notification_hub_name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + send = true + listen = true +} +`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (NotificationHubAuthorizationRuleResource) manage(data acceptance.TestData) string { + template := NotificationHubAuthorizationRuleResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub_authorization_rule" "test" { + name = "acctestrule-%d" + notification_hub_name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + manage = true + send = true + listen = true +} +`, template, data.RandomInteger) +} + +func (NotificationHubAuthorizationRuleResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_notification_hub_namespace" "test" { + name = "acctestnhn-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + namespace_type = "NotificationHub" + sku_name = "Free" +} + +resource "azurerm_notification_hub" "test" { + name = "acctestnh-%d" + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/notificationhub/notification_hub_data_source_test.go b/azurerm/internal/services/notificationhub/notification_hub_data_source_test.go new file mode 100644 index 000000000000..aa1a441cc99f --- /dev/null +++ b/azurerm/internal/services/notificationhub/notification_hub_data_source_test.go @@ -0,0 +1,39 @@ +package notificationhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +type NotificationHubDataSource struct{} + +func TestAccNotificationHubDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_notification_hub", "test") + d := NotificationHubDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: d.basic(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(data.ResourceName, "apns_credential.#", "0"), + resource.TestCheckResourceAttr(data.ResourceName, "gcm_credential.#", "0"), + resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), + ), + }, + }) +} + +func (d NotificationHubDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_notification_hub" "test" { + name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_notification_hub_namespace.test.resource_group_name +} +`, NotificationHubResource{}.basic(data)) +} diff --git a/azurerm/internal/services/notificationhub/notification_hub_namespace_data_source_test.go b/azurerm/internal/services/notificationhub/notification_hub_namespace_data_source_test.go new file mode 100644 index 000000000000..e2e1448cf4ad --- /dev/null +++ b/azurerm/internal/services/notificationhub/notification_hub_namespace_data_source_test.go @@ -0,0 +1,38 @@ +package notificationhub_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +type NotificationHubNamespaceDataSource struct{} + +func TestAccNotificationHubNamespaceDataSource_free(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_notification_hub_namespace", "test") + d := NotificationHubNamespaceDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: d.free(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(data.ResourceName, "namespace_type", "NotificationHub"), + resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "Free"), + resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), + ), + }, + }) +} + +func (d NotificationHubNamespaceDataSource) free(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_notification_hub_namespace" "test" { + name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_notification_hub_namespace.test.resource_group_name +} +`, NotificationHubNamespaceResource{}.free(data)) +} diff --git a/azurerm/internal/services/notificationhub/notification_hub_namespace_resource.go b/azurerm/internal/services/notificationhub/notification_hub_namespace_resource.go index b4ac1a222ab5..455d6c08ce32 100644 --- a/azurerm/internal/services/notificationhub/notification_hub_namespace_resource.go +++ b/azurerm/internal/services/notificationhub/notification_hub_namespace_resource.go @@ -16,23 +16,25 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) var notificationHubNamespaceResourceName = "azurerm_notification_hub_namespace" -func resourceArmNotificationHubNamespace() *schema.Resource { +func resourceNotificationHubNamespace() *schema.Resource { return &schema.Resource{ - Create: resourceArmNotificationHubNamespaceCreateUpdate, - Read: resourceArmNotificationHubNamespaceRead, - Update: resourceArmNotificationHubNamespaceCreateUpdate, - Delete: resourceArmNotificationHubNamespaceDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourceNotificationHubNamespaceCreateUpdate, + Read: resourceNotificationHubNamespaceRead, + Update: resourceNotificationHubNamespaceCreateUpdate, + Delete: resourceNotificationHubNamespaceDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.NamespaceID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -88,7 +90,7 @@ func resourceArmNotificationHubNamespace() *schema.Resource { } } -func resourceArmNotificationHubNamespaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubNamespaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.NamespacesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -159,34 +161,32 @@ func resourceArmNotificationHubNamespaceCreateUpdate(d *schema.ResourceData, met d.SetId(*read.ID) - return resourceArmNotificationHubNamespaceRead(d, meta) + return resourceNotificationHubNamespaceRead(d, meta) } -func resourceArmNotificationHubNamespaceRead(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubNamespaceRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.NamespacesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.NamespaceID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["namespaces"] - resp, err := client.Get(ctx, resourceGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Notification Hub Namespace %q (Resource Group %q) was not found - removing from state!", name, resourceGroup) + log.Printf("[DEBUG] Notification Hub Namespace %q (Resource Group %q) was not found - removing from state!", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Notification Hub Namespace %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("Error making Read request on Notification Hub Namespace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) } @@ -196,7 +196,7 @@ func resourceArmNotificationHubNamespaceRead(d *schema.ResourceData, meta interf return fmt.Errorf("Error setting 'sku_name': %+v", err) } } else { - return fmt.Errorf("Error making Read request on Notification Hub Namespace %q (Resource Group %q): Unable to retrieve 'sku' value", name, resourceGroup) + return fmt.Errorf("Error making Read request on Notification Hub Namespace %q (Resource Group %q): Unable to retrieve 'sku' value", id.Name, id.ResourceGroup) } if props := resp.NamespaceProperties; props != nil { @@ -208,36 +208,34 @@ func resourceArmNotificationHubNamespaceRead(d *schema.ResourceData, meta interf return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmNotificationHubNamespaceDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubNamespaceDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.NamespacesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.NamespaceID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - name := id.Path["namespaces"] - future, err := client.Delete(ctx, resourceGroup, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) if err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error deleting Notification Hub Namespace %q (Resource Group %q): %+v", name, resourceGroup, err) + return fmt.Errorf("Error deleting Notification Hub Namespace %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } } // the future returned from the Delete method is broken 50% of the time - let's poll ourselves for now // Related Bug: https://github.com/Azure/azure-sdk-for-go/issues/2254 - log.Printf("[DEBUG] Waiting for Notification Hub Namespace %q (Resource Group %q) to be deleted", name, resourceGroup) + log.Printf("[DEBUG] Waiting for Notification Hub Namespace %q (Resource Group %q) to be deleted", id.Name, id.ResourceGroup) stateConf := &resource.StateChangeConf{ Pending: []string{"200", "202"}, Target: []string{"404"}, - Refresh: notificationHubNamespaceDeleteStateRefreshFunc(ctx, client, resourceGroup, name), + Refresh: notificationHubNamespaceDeleteStateRefreshFunc(ctx, client, id.ResourceGroup, id.Name), Timeout: d.Timeout(schema.TimeoutDelete), } if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Notification Hub %q (Resource Group %q) to be deleted: %s", name, resourceGroup, err) + return fmt.Errorf("Error waiting for Notification Hub %q (Resource Group %q) to be deleted: %s", id.Name, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/notificationhub/notification_hub_namespace_resource_test.go b/azurerm/internal/services/notificationhub/notification_hub_namespace_resource_test.go new file mode 100644 index 000000000000..2fb5e1aee7d2 --- /dev/null +++ b/azurerm/internal/services/notificationhub/notification_hub_namespace_resource_test.go @@ -0,0 +1,155 @@ +package notificationhub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NotificationHubNamespaceResource struct { +} + +func TestAccNotificationHubNamespace_free(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_namespace", "test") + r := NotificationHubNamespaceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.free(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHubNamespace_updateTag(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_namespace", "test") + r := NotificationHubNamespaceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.free(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + { + Config: r.withoutTag(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + { + Config: r.free(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHubNamespace_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub_namespace", "test") + r := NotificationHubNamespaceResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.free(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (NotificationHubNamespaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NamespaceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NotificationHubs.NamespacesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.NamespaceProperties != nil), nil +} + +func (NotificationHubNamespaceResource) free(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_notification_hub_namespace" "test" { + name = "acctestnhn-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + namespace_type = "NotificationHub" + + sku_name = "Free" + + tags = { + env = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (NotificationHubNamespaceResource) withoutTag(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_notification_hub_namespace" "test" { + name = "acctestnhn-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + namespace_type = "NotificationHub" + + sku_name = "Free" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (NotificationHubNamespaceResource) requiresImport(data acceptance.TestData) string { + template := NotificationHubNamespaceResource{}.free(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub_namespace" "import" { + name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_notification_hub_namespace.test.resource_group_name + location = azurerm_notification_hub_namespace.test.location + namespace_type = azurerm_notification_hub_namespace.test.namespace_type + + sku_name = "Free" +} +`, template) +} diff --git a/azurerm/internal/services/notificationhub/notification_hub_resource.go b/azurerm/internal/services/notificationhub/notification_hub_resource.go index a0a4f661e3fe..b60321565bbf 100644 --- a/azurerm/internal/services/notificationhub/notification_hub_resource.go +++ b/azurerm/internal/services/notificationhub/notification_hub_resource.go @@ -23,17 +23,19 @@ import ( var notificationHubResourceName = "azurerm_notification_hub" -const apnsProductionName = "Production" -const apnsProductionEndpoint = "https://api.push.apple.com:443/3/device" -const apnsSandboxName = "Sandbox" -const apnsSandboxEndpoint = "https://api.development.push.apple.com:443/3/device" +const ( + apnsProductionName = "Production" + apnsProductionEndpoint = "https://api.push.apple.com:443/3/device" + apnsSandboxName = "Sandbox" + apnsSandboxEndpoint = "https://api.development.push.apple.com:443/3/device" +) -func resourceArmNotificationHub() *schema.Resource { +func resourceNotificationHub() *schema.Resource { return &schema.Resource{ - Create: resourceArmNotificationHubCreateUpdate, - Read: resourceArmNotificationHubRead, - Update: resourceArmNotificationHubCreateUpdate, - Delete: resourceArmNotificationHubDelete, + Create: resourceNotificationHubCreateUpdate, + Read: resourceNotificationHubRead, + Update: resourceNotificationHubCreateUpdate, + Delete: resourceNotificationHubDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { _, err := parse.NotificationHubID(id) @@ -144,7 +146,7 @@ func resourceArmNotificationHub() *schema.Resource { } } -func resourceArmNotificationHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.HubsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -211,7 +213,7 @@ func resourceArmNotificationHubCreateUpdate(d *schema.ResourceData, meta interfa d.SetId(*read.ID) - return resourceArmNotificationHubRead(d, meta) + return resourceNotificationHubRead(d, meta) } func notificationHubStateRefreshFunc(ctx context.Context, client *notificationhubs.Client, resourceGroup, namespaceName, name string) resource.StateRefreshFunc { @@ -229,7 +231,7 @@ func notificationHubStateRefreshFunc(ctx context.Context, client *notificationhu } } -func resourceArmNotificationHubRead(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.HubsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() @@ -277,7 +279,7 @@ func resourceArmNotificationHubRead(d *schema.ResourceData, meta interface{}) er return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmNotificationHubDelete(d *schema.ResourceData, meta interface{}) error { +func resourceNotificationHubDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).NotificationHubs.HubsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/notificationhub/notification_hub_resource_test.go b/azurerm/internal/services/notificationhub/notification_hub_resource_test.go new file mode 100644 index 000000000000..ab69413ccc17 --- /dev/null +++ b/azurerm/internal/services/notificationhub/notification_hub_resource_test.go @@ -0,0 +1,169 @@ +package notificationhub_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type NotificationHubResource struct { +} + +func TestAccNotificationHub_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub", "test") + r := NotificationHubResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("apns_credential.#").HasValue("0"), + check.That(data.ResourceName).Key("gcm_credential.#").HasValue("0"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHub_updateTag(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub", "test") + r := NotificationHubResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + { + Config: r.withoutTag(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccNotificationHub_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_notification_hub", "test") + r := NotificationHubResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("apns_credential.#").HasValue("0"), + check.That(data.ResourceName).Key("gcm_credential.#").HasValue("0"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (NotificationHubResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NotificationHubID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.NotificationHubs.HubsClient.Get(ctx, id.ResourceGroup, id.NamespaceName, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (NotificationHubResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRGpol-%d" + location = "%s" +} + +resource "azurerm_notification_hub_namespace" "test" { + name = "acctestnhn-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + namespace_type = "NotificationHub" + sku_name = "Free" +} + +resource "azurerm_notification_hub" "test" { + name = "acctestnh-%d" + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + + tags = { + env = "Test" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (NotificationHubResource) withoutTag(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRGpol-%d" + location = "%s" +} + +resource "azurerm_notification_hub_namespace" "test" { + name = "acctestnhn-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + namespace_type = "NotificationHub" + sku_name = "Free" +} + +resource "azurerm_notification_hub" "test" { + name = "acctestnh-%d" + namespace_name = azurerm_notification_hub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (NotificationHubResource) requiresImport(data acceptance.TestData) string { + template := NotificationHubResource{}.basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_notification_hub" "import" { + name = azurerm_notification_hub.test.name + namespace_name = azurerm_notification_hub.test.namespace_name + resource_group_name = azurerm_notification_hub.test.resource_group_name + location = azurerm_notification_hub.test.location +} +`, template) +} diff --git a/azurerm/internal/services/notificationhub/parse/namespace.go b/azurerm/internal/services/notificationhub/parse/namespace.go new file mode 100644 index 000000000000..395b7c279e81 --- /dev/null +++ b/azurerm/internal/services/notificationhub/parse/namespace.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type NamespaceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewNamespaceID(subscriptionId, resourceGroup, name string) NamespaceId { + return NamespaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id NamespaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Namespace", segmentsStr) +} + +func (id NamespaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NotificationHubs/namespaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// NamespaceID parses a Namespace ID into an NamespaceId struct +func NamespaceID(input string) (*NamespaceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := NamespaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/notificationhub/parse/namespace_test.go b/azurerm/internal/services/notificationhub/parse/namespace_test.go new file mode 100644 index 000000000000..f4ed3885cea7 --- /dev/null +++ b/azurerm/internal/services/notificationhub/parse/namespace_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NamespaceId{} + +func TestNamespaceIDFormatter(t *testing.T) { + actual := NewNamespaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "namespace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestNamespaceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *NamespaceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1", + Expected: &NamespaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "namespace1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NOTIFICATIONHUBS/NAMESPACES/NAMESPACE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := NamespaceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/notificationhub/parse/notification_hub.go b/azurerm/internal/services/notificationhub/parse/notification_hub.go index 6b7b78f6e5c8..c1ba6cc097f8 100644 --- a/azurerm/internal/services/notificationhub/parse/notification_hub.go +++ b/azurerm/internal/services/notificationhub/parse/notification_hub.go @@ -1,32 +1,69 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "fmt" + "strings" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" ) type NotificationHubId struct { - ResourceGroup string - NamespaceName string - Name string + SubscriptionId string + ResourceGroup string + NamespaceName string + Name string +} + +func NewNotificationHubID(subscriptionId, resourceGroup, namespaceName, name string) NotificationHubId { + return NotificationHubId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NamespaceName: namespaceName, + Name: name, + } } +func (id NotificationHubId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Namespace Name %q", id.NamespaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Notification Hub", segmentsStr) +} + +func (id NotificationHubId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NotificationHubs/namespaces/%s/notificationHubs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NamespaceName, id.Name) +} + +// NotificationHubID parses a NotificationHub ID into an NotificationHubId struct func NotificationHubID(input string) (*NotificationHubId, error) { id, err := azure.ParseAzureResourceID(input) if err != nil { - return nil, fmt.Errorf("parsing Notification Hub ID %q: %+v", input, err) + return nil, err } - app := NotificationHubId{ - ResourceGroup: id.ResourceGroup, + resourceId := NotificationHubId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, } - if app.NamespaceName, err = id.PopSegment("namespaces"); err != nil { - return nil, err + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if app.Name, err = id.PopSegment("notificationHubs"); err != nil { + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NamespaceName, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("notificationHubs"); err != nil { return nil, err } @@ -34,5 +71,5 @@ func NotificationHubID(input string) (*NotificationHubId, error) { return nil, err } - return &app, nil + return &resourceId, nil } diff --git a/azurerm/internal/services/notificationhub/parse/notification_hub_authorization_rule.go b/azurerm/internal/services/notificationhub/parse/notification_hub_authorization_rule.go new file mode 100644 index 000000000000..067b62341f81 --- /dev/null +++ b/azurerm/internal/services/notificationhub/parse/notification_hub_authorization_rule.go @@ -0,0 +1,81 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type NotificationHubAuthorizationRuleId struct { + SubscriptionId string + ResourceGroup string + NamespaceName string + NotificationHubName string + AuthorizationRuleName string +} + +func NewNotificationHubAuthorizationRuleID(subscriptionId, resourceGroup, namespaceName, notificationHubName, authorizationRuleName string) NotificationHubAuthorizationRuleId { + return NotificationHubAuthorizationRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NamespaceName: namespaceName, + NotificationHubName: notificationHubName, + AuthorizationRuleName: authorizationRuleName, + } +} + +func (id NotificationHubAuthorizationRuleId) String() string { + segments := []string{ + fmt.Sprintf("Authorization Rule Name %q", id.AuthorizationRuleName), + fmt.Sprintf("Notification Hub Name %q", id.NotificationHubName), + fmt.Sprintf("Namespace Name %q", id.NamespaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Notification Hub Authorization Rule", segmentsStr) +} + +func (id NotificationHubAuthorizationRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.NotificationHubs/namespaces/%s/notificationHubs/%s/AuthorizationRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NamespaceName, id.NotificationHubName, id.AuthorizationRuleName) +} + +// NotificationHubAuthorizationRuleID parses a NotificationHubAuthorizationRule ID into an NotificationHubAuthorizationRuleId struct +func NotificationHubAuthorizationRuleID(input string) (*NotificationHubAuthorizationRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := NotificationHubAuthorizationRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NamespaceName, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + if resourceId.NotificationHubName, err = id.PopSegment("notificationHubs"); err != nil { + return nil, err + } + if resourceId.AuthorizationRuleName, err = id.PopSegment("AuthorizationRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/notificationhub/parse/notification_hub_authorization_rule_test.go b/azurerm/internal/services/notificationhub/parse/notification_hub_authorization_rule_test.go new file mode 100644 index 000000000000..0372460a3808 --- /dev/null +++ b/azurerm/internal/services/notificationhub/parse/notification_hub_authorization_rule_test.go @@ -0,0 +1,144 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NotificationHubAuthorizationRuleId{} + +func TestNotificationHubAuthorizationRuleIDFormatter(t *testing.T) { + actual := NewNotificationHubAuthorizationRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "namespace1", "hub1", "authorizationRule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/AuthorizationRules/authorizationRule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestNotificationHubAuthorizationRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *NotificationHubAuthorizationRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/", + Error: true, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/", + Error: true, + }, + + { + // missing NotificationHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/", + Error: true, + }, + + { + // missing value for NotificationHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/", + Error: true, + }, + + { + // missing AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/", + Error: true, + }, + + { + // missing value for AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/AuthorizationRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/AuthorizationRules/authorizationRule1", + Expected: &NotificationHubAuthorizationRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NamespaceName: "namespace1", + NotificationHubName: "hub1", + AuthorizationRuleName: "authorizationRule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NOTIFICATIONHUBS/NAMESPACES/NAMESPACE1/NOTIFICATIONHUBS/HUB1/AUTHORIZATIONRULES/AUTHORIZATIONRULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := NotificationHubAuthorizationRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NamespaceName != v.Expected.NamespaceName { + t.Fatalf("Expected %q but got %q for NamespaceName", v.Expected.NamespaceName, actual.NamespaceName) + } + if actual.NotificationHubName != v.Expected.NotificationHubName { + t.Fatalf("Expected %q but got %q for NotificationHubName", v.Expected.NotificationHubName, actual.NotificationHubName) + } + if actual.AuthorizationRuleName != v.Expected.AuthorizationRuleName { + t.Fatalf("Expected %q but got %q for AuthorizationRuleName", v.Expected.AuthorizationRuleName, actual.AuthorizationRuleName) + } + } +} diff --git a/azurerm/internal/services/notificationhub/parse/notification_hub_test.go b/azurerm/internal/services/notificationhub/parse/notification_hub_test.go index d75a70f2379d..de6bb51b05b5 100644 --- a/azurerm/internal/services/notificationhub/parse/notification_hub_test.go +++ b/azurerm/internal/services/notificationhub/parse/notification_hub_test.go @@ -1,86 +1,126 @@ package parse +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + import ( "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" ) +var _ resourceid.Formatter = NotificationHubId{} + +func TestNotificationHubIDFormatter(t *testing.T) { + actual := NewNotificationHubID("12345678-1234-9876-4563-123456789012", "resGroup1", "namespace1", "hub1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + func TestNotificationHubID(t *testing.T) { testData := []struct { - Name string Input string + Error bool Expected *NotificationHubId }{ + { - Name: "Empty", - Input: "", - Expected: nil, + // empty + Input: "", + Error: true, }, + { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, + // missing SubscriptionId + Input: "/", + Error: true, }, + { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, }, + { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/", - Expected: nil, + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, }, + { - Name: "No namespace Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces", - Expected: nil, + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, }, + { - Name: "Missing notification Hub Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1", - Expected: nil, + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/", + Error: true, }, + { - Name: "Missing notification Hub Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs", - Expected: nil, + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/", + Error: true, }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/", + Error: true, + }, + { - Name: "notification Hub ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1", + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1", Expected: &NotificationHubId{ - ResourceGroup: "resGroup1", - NamespaceName: "namespace1", - Name: "hub1", + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NamespaceName: "namespace1", + Name: "hub1", }, }, + { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/NotificationHubs/hub1", - Expected: nil, + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NOTIFICATIONHUBS/NAMESPACES/NAMESPACE1/NOTIFICATIONHUBS/HUB1", + Error: true, }, } for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) + t.Logf("[DEBUG] Testing %q", v.Input) actual, err := NotificationHubID(v.Input) if err != nil { - if v.Expected == nil { + if v.Error { continue } - t.Fatalf("Expected a value but got an error: %s", err) + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") } + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) } - if actual.NamespaceName != v.Expected.NamespaceName { t.Fatalf("Expected %q but got %q for NamespaceName", v.Expected.NamespaceName, actual.NamespaceName) } - if actual.Name != v.Expected.Name { t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) } diff --git a/azurerm/internal/services/notificationhub/registration.go b/azurerm/internal/services/notificationhub/registration.go index 0859e9c37df2..22846cfef56a 100644 --- a/azurerm/internal/services/notificationhub/registration.go +++ b/azurerm/internal/services/notificationhub/registration.go @@ -29,8 +29,8 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_notification_hub_authorization_rule": resourceArmNotificationHubAuthorizationRule(), - "azurerm_notification_hub_namespace": resourceArmNotificationHubNamespace(), - "azurerm_notification_hub": resourceArmNotificationHub(), + "azurerm_notification_hub_authorization_rule": resourceNotificationHubAuthorizationRule(), + "azurerm_notification_hub_namespace": resourceNotificationHubNamespace(), + "azurerm_notification_hub": resourceNotificationHub(), } } diff --git a/azurerm/internal/services/notificationhub/resourceids.go b/azurerm/internal/services/notificationhub/resourceids.go new file mode 100644 index 000000000000..ccc52624d8a5 --- /dev/null +++ b/azurerm/internal/services/notificationhub/resourceids.go @@ -0,0 +1,5 @@ +package notificationhub + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Namespace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NotificationHub -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=NotificationHubAuthorizationRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/AuthorizationRules/authorizationRule1 diff --git a/azurerm/internal/services/notificationhub/tests/notification_hub_authorization_rule_resource_test.go b/azurerm/internal/services/notificationhub/tests/notification_hub_authorization_rule_resource_test.go deleted file mode 100644 index 0db7fe750fdb..000000000000 --- a/azurerm/internal/services/notificationhub/tests/notification_hub_authorization_rule_resource_test.go +++ /dev/null @@ -1,365 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMNotificationHubAuthorizationRule_listen(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMNotificationHubAuthorizationRule_listen(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHubAuthorizationRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMNotificationHubAuthorizationRule_listen(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.RequiresImportErrorStep(testAzureRMNotificationHubAuthorizationRule_requiresImport), - }, - }) -} - -func TestAccAzureRMNotificationHubAuthorizationRule_manage(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMNotificationHubAuthorizationRule_manage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHubAuthorizationRule_send(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMNotificationHubAuthorizationRule_send(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHubAuthorizationRule_multi(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test1") - resourceTwoName := "azurerm_notification_hub_authorization_rule.test2" - resourceThreeName := "azurerm_notification_hub_authorization_rule.test3" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMNotificationHubAuthorizationRule_multi(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - testCheckAzureRMNotificationHubAuthorizationRuleExists(resourceTwoName), - resource.TestCheckResourceAttr(resourceTwoName, "manage", "false"), - resource.TestCheckResourceAttr(resourceTwoName, "send", "true"), - resource.TestCheckResourceAttr(resourceTwoName, "listen", "true"), - resource.TestCheckResourceAttrSet(resourceTwoName, "primary_access_key"), - resource.TestCheckResourceAttrSet(resourceTwoName, "secondary_access_key"), - testCheckAzureRMNotificationHubAuthorizationRuleExists(resourceThreeName), - resource.TestCheckResourceAttr(resourceThreeName, "manage", "false"), - resource.TestCheckResourceAttr(resourceThreeName, "send", "true"), - resource.TestCheckResourceAttr(resourceThreeName, "listen", "true"), - resource.TestCheckResourceAttrSet(resourceThreeName, "primary_access_key"), - resource.TestCheckResourceAttrSet(resourceThreeName, "secondary_access_key"), - ), - }, - data.ImportStep(), - { - ResourceName: resourceTwoName, - ImportState: true, - ImportStateVerify: true, - }, - { - ResourceName: resourceThreeName, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMNotificationHubAuthorizationRule_updated(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_authorization_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubAuthorizationRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAzureRMNotificationHubAuthorizationRule_listen(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - { - Config: testAzureRMNotificationHubAuthorizationRule_manage(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubAuthorizationRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "manage", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "send", "true"), - resource.TestCheckResourceAttr(data.ResourceName, "listen", "true"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_access_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_access_key"), - ), - }, - }, - }) -} - -func testCheckAzureRMNotificationHubAuthorizationRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NotificationHubs.HubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - notificationHubName := rs.Primary.Attributes["notification_hub_name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.GetAuthorizationRule(ctx, resourceGroup, namespaceName, notificationHubName, ruleName) - if err != nil { - return fmt.Errorf("Bad: Get on notificationHubsClient: %s", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Notification Hub Authorization Rule does not exist: %s", ruleName) - } - - return nil - } -} - -func testCheckAzureRMNotificationHubAuthorizationRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NotificationHubs.HubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_notification_hub_authorization_rule" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - notificationHubName := rs.Primary.Attributes["notification_hub_name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - ruleName := rs.Primary.Attributes["name"] - resp, err := client.GetAuthorizationRule(ctx, resourceGroup, namespaceName, notificationHubName, ruleName) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Notification Hub Authorization Rule still exists:%s", *resp.Name) - } - } - - return nil -} - -func testAzureRMNotificationHubAuthorizationRule_listen(data acceptance.TestData) string { - template := testAzureRMNotificationHubAuthorizationRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub_authorization_rule" "test" { - name = "acctestrule-%d" - notification_hub_name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - listen = true -} -`, template, data.RandomInteger) -} - -func testAzureRMNotificationHubAuthorizationRule_requiresImport(data acceptance.TestData) string { - template := testAzureRMNotificationHubAuthorizationRule_listen(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub_authorization_rule" "import" { - name = azurerm_notification_hub_authorization_rule.test.name - notification_hub_name = azurerm_notification_hub_authorization_rule.test.notification_hub_name - namespace_name = azurerm_notification_hub_authorization_rule.test.namespace_name - resource_group_name = azurerm_notification_hub_authorization_rule.test.resource_group_name - listen = azurerm_notification_hub_authorization_rule.test.listen -} -`, template) -} - -func testAzureRMNotificationHubAuthorizationRule_send(data acceptance.TestData) string { - template := testAzureRMNotificationHubAuthorizationRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub_authorization_rule" "test" { - name = "acctestrule-%d" - notification_hub_name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} -`, template, data.RandomInteger) -} - -func testAzureRMNotificationHubAuthorizationRule_multi(data acceptance.TestData) string { - template := testAzureRMNotificationHubAuthorizationRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub_authorization_rule" "test1" { - name = "acctestruleone-%d" - notification_hub_name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} - -resource "azurerm_notification_hub_authorization_rule" "test2" { - name = "acctestruletwo-%d" - notification_hub_name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} - -resource "azurerm_notification_hub_authorization_rule" "test3" { - name = "acctestrulethree-%d" - notification_hub_name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - send = true - listen = true -} -`, template, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAzureRMNotificationHubAuthorizationRule_manage(data acceptance.TestData) string { - template := testAzureRMNotificationHubAuthorizationRule_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub_authorization_rule" "test" { - name = "acctestrule-%d" - notification_hub_name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - manage = true - send = true - listen = true -} -`, template, data.RandomInteger) -} - -func testAzureRMNotificationHubAuthorizationRule_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_notification_hub_namespace" "test" { - name = "acctestnhn-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - namespace_type = "NotificationHub" - sku_name = "Free" -} - -resource "azurerm_notification_hub" "test" { - name = "acctestnh-%d" - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/notificationhub/tests/notification_hub_data_source_test.go b/azurerm/internal/services/notificationhub/tests/notification_hub_data_source_test.go deleted file mode 100644 index 8e9403ba12f0..000000000000 --- a/azurerm/internal/services/notificationhub/tests/notification_hub_data_source_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMNotificationHub_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_notification_hub", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMNotificationHubBasic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "apns_credential.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "gcm_credential.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMNotificationHubBasic(data acceptance.TestData) string { - template := testAccAzureRMNotificationHub_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_notification_hub" "test" { - name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_notification_hub_namespace.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/notificationhub/tests/notification_hub_namespace_data_source_test.go b/azurerm/internal/services/notificationhub/tests/notification_hub_namespace_data_source_test.go deleted file mode 100644 index e3226e876986..000000000000 --- a/azurerm/internal/services/notificationhub/tests/notification_hub_namespace_data_source_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMNotificationHubNamespace_free(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_notification_hub_namespace", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMNotificationHubNamespaceFree(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "namespace_type", "NotificationHub"), - resource.TestCheckResourceAttr(data.ResourceName, "sku.0.name", "Free"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMNotificationHubNamespaceFree(data acceptance.TestData) string { - template := testAccAzureRMNotificationHubNamespace_free(data) - return fmt.Sprintf(` -%s - -data "azurerm_notification_hub_namespace" "test" { - name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_notification_hub_namespace.test.resource_group_name -} -`, template) -} diff --git a/azurerm/internal/services/notificationhub/tests/notification_hub_namespace_resource_test.go b/azurerm/internal/services/notificationhub/tests/notification_hub_namespace_resource_test.go deleted file mode 100644 index a1a18248c21f..000000000000 --- a/azurerm/internal/services/notificationhub/tests/notification_hub_namespace_resource_test.go +++ /dev/null @@ -1,198 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMNotificationHubNamespace_free(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_namespace", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNotificationHubNamespace_free(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubNamespaceExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHubNamespace_updateTag(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_namespace", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNotificationHubNamespace_free(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNotificationHubNamespace_withoutTag(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNotificationHubNamespace_free(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHubNamespace_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub_namespace", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNotificationHubNamespace_free(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubNamespaceExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMNotificationHubNamespace_requiresImport), - }, - }) -} - -func testCheckAzureRMNotificationHubNamespaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NotificationHubs.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - namespaceName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, namespaceName) - if err != nil { - return fmt.Errorf("Bad: Get on notificationNamespacesClient: %s", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Notification Hub Namespace does not exist: %s", namespaceName) - } - - return nil - } -} - -func testCheckAzureRMNotificationHubNamespaceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NotificationHubs.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_notification_hub_namespace" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - namespaceName := rs.Primary.Attributes["name"] - resp, err := client.Get(ctx, resourceGroup, namespaceName) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Notification Hub Namespace still exists:%s", *resp.Name) - } - } - - return nil -} - -func testAccAzureRMNotificationHubNamespace_free(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_notification_hub_namespace" "test" { - name = "acctestnhn-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - namespace_type = "NotificationHub" - - sku_name = "Free" - - tags = { - env = "Test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMNotificationHubNamespace_withoutTag(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_notification_hub_namespace" "test" { - name = "acctestnhn-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - namespace_type = "NotificationHub" - - sku_name = "Free" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMNotificationHubNamespace_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMNotificationHubNamespace_free(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub_namespace" "import" { - name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_notification_hub_namespace.test.resource_group_name - location = azurerm_notification_hub_namespace.test.location - namespace_type = azurerm_notification_hub_namespace.test.namespace_type - - sku_name = "Free" -} -`, template) -} diff --git a/azurerm/internal/services/notificationhub/tests/notification_hub_resource_test.go b/azurerm/internal/services/notificationhub/tests/notification_hub_resource_test.go deleted file mode 100644 index 084056973972..000000000000 --- a/azurerm/internal/services/notificationhub/tests/notification_hub_resource_test.go +++ /dev/null @@ -1,215 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMNotificationHub_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNotificationHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "apns_credential.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "gcm_credential.#", "0"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHub_updateTag(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNotificationHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNotificationHub_withoutTag(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMNotificationHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMNotificationHub_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_notification_hub", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMNotificationHubDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMNotificationHub_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMNotificationHubExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "apns_credential.#", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "gcm_credential.#", "0"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMNotificationHub_requiresImport), - }, - }) -} - -func testCheckAzureRMNotificationHubExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NotificationHubs.HubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - hubName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, namespaceName, hubName) - if err != nil { - return fmt.Errorf("Bad: Get on notificationHubsClient: %s", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Notification Hub does not exist: %s", hubName) - } - - return nil - } -} - -func testCheckAzureRMNotificationHubDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).NotificationHubs.HubsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_notification_hub" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - namespaceName := rs.Primary.Attributes["namespace_name"] - hubName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, namespaceName, hubName) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Notification Hub still exists:%s", *resp.Name) - } - } - - return nil -} - -func testAccAzureRMNotificationHub_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRGpol-%d" - location = "%s" -} - -resource "azurerm_notification_hub_namespace" "test" { - name = "acctestnhn-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - namespace_type = "NotificationHub" - sku_name = "Free" -} - -resource "azurerm_notification_hub" "test" { - name = "acctestnh-%d" - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - - tags = { - env = "Test" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNotificationHub_withoutTag(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRGpol-%d" - location = "%s" -} - -resource "azurerm_notification_hub_namespace" "test" { - name = "acctestnhn-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - namespace_type = "NotificationHub" - sku_name = "Free" -} - -resource "azurerm_notification_hub" "test" { - name = "acctestnh-%d" - namespace_name = azurerm_notification_hub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMNotificationHub_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMNotificationHub_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_notification_hub" "import" { - name = azurerm_notification_hub.test.name - namespace_name = azurerm_notification_hub.test.namespace_name - resource_group_name = azurerm_notification_hub.test.resource_group_name - location = azurerm_notification_hub.test.location -} -`, template) -} diff --git a/azurerm/internal/services/notificationhub/validate/namespace_id.go b/azurerm/internal/services/notificationhub/validate/namespace_id.go new file mode 100644 index 000000000000..c8a2d27874e5 --- /dev/null +++ b/azurerm/internal/services/notificationhub/validate/namespace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" +) + +func NamespaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NamespaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/notificationhub/validate/namespace_id_test.go b/azurerm/internal/services/notificationhub/validate/namespace_id_test.go new file mode 100644 index 000000000000..6f7b4c25b8ad --- /dev/null +++ b/azurerm/internal/services/notificationhub/validate/namespace_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNamespaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NOTIFICATIONHUBS/NAMESPACES/NAMESPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NamespaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/notificationhub/validate/notification_hub_authorization_rule_id.go b/azurerm/internal/services/notificationhub/validate/notification_hub_authorization_rule_id.go new file mode 100644 index 000000000000..0d483fda35b8 --- /dev/null +++ b/azurerm/internal/services/notificationhub/validate/notification_hub_authorization_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" +) + +func NotificationHubAuthorizationRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NotificationHubAuthorizationRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/notificationhub/validate/notification_hub_authorization_rule_id_test.go b/azurerm/internal/services/notificationhub/validate/notification_hub_authorization_rule_id_test.go new file mode 100644 index 000000000000..e31255d8fe03 --- /dev/null +++ b/azurerm/internal/services/notificationhub/validate/notification_hub_authorization_rule_id_test.go @@ -0,0 +1,100 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNotificationHubAuthorizationRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/", + Valid: false, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/", + Valid: false, + }, + + { + // missing NotificationHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/", + Valid: false, + }, + + { + // missing value for NotificationHubName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/", + Valid: false, + }, + + { + // missing AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/", + Valid: false, + }, + + { + // missing value for AuthorizationRuleName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/AuthorizationRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1/AuthorizationRules/authorizationRule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NOTIFICATIONHUBS/NAMESPACES/NAMESPACE1/NOTIFICATIONHUBS/HUB1/AUTHORIZATIONRULES/AUTHORIZATIONRULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NotificationHubAuthorizationRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/notificationhub/validate/notification_hub_id.go b/azurerm/internal/services/notificationhub/validate/notification_hub_id.go new file mode 100644 index 000000000000..dbd0358ec2c3 --- /dev/null +++ b/azurerm/internal/services/notificationhub/validate/notification_hub_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/notificationhub/parse" +) + +func NotificationHubID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NotificationHubID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/notificationhub/validate/notification_hub_id_test.go b/azurerm/internal/services/notificationhub/validate/notification_hub_id_test.go new file mode 100644 index 000000000000..a4f622a170f5 --- /dev/null +++ b/azurerm/internal/services/notificationhub/validate/notification_hub_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNotificationHubID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/", + Valid: false, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.NotificationHubs/namespaces/namespace1/notificationHubs/hub1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NOTIFICATIONHUBS/NAMESPACES/NAMESPACE1/NOTIFICATIONHUBS/HUB1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NotificationHubID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/policy/client/client.go b/azurerm/internal/services/policy/client/client.go index 97ca75b8ea18..8ef38c632fe1 100644 --- a/azurerm/internal/services/policy/client/client.go +++ b/azurerm/internal/services/policy/client/client.go @@ -1,7 +1,7 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/policyinsights/mgmt/2019-10-01/policyinsights" + "github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2019-10-01-preview/policyinsights" "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2019-09-01/policy" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" ) diff --git a/azurerm/internal/services/policy/parse/scope.go b/azurerm/internal/services/policy/parse/scope.go index fe2ac4d4411e..5835309f61a9 100644 --- a/azurerm/internal/services/policy/parse/scope.go +++ b/azurerm/internal/services/policy/parse/scope.go @@ -46,6 +46,7 @@ type ScopeAtManagementGroup struct { func (id ScopeAtManagementGroup) ScopeId() string { return id.scopeId } + func PolicyScopeID(input string) (PolicyScopeId, error) { if input == "" { return nil, fmt.Errorf("unable to parse Remediation Scope ID: ID is empty") diff --git a/azurerm/internal/services/policy/policy.go b/azurerm/internal/services/policy/policy.go index 40fcc0ced200..e65bb0f59043 100644 --- a/azurerm/internal/services/policy/policy.go +++ b/azurerm/internal/services/policy/policy.go @@ -7,6 +7,7 @@ import ( "fmt" "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2019-09-01/policy" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) func getPolicyDefinitionByDisplayName(ctx context.Context, client *policy.DefinitionsClient, displayName, managementGroupName string) (policy.Definition, error) { @@ -47,9 +48,12 @@ func getPolicyDefinitionByDisplayName(ctx context.Context, client *policy.Defini return results[0], nil } -func getPolicyDefinitionByName(ctx context.Context, client *policy.DefinitionsClient, name string, managementGroupName string) (res policy.Definition, err error) { +func getPolicyDefinitionByName(ctx context.Context, client *policy.DefinitionsClient, name, managementGroupName string) (res policy.Definition, err error) { if managementGroupName == "" { res, err = client.Get(ctx, name) + if utils.ResponseWasNotFound(res.Response) { + res, err = client.GetBuiltIn(ctx, name) + } } else { res, err = client.GetAtManagementGroup(ctx, name, managementGroupName) } @@ -57,9 +61,12 @@ func getPolicyDefinitionByName(ctx context.Context, client *policy.DefinitionsCl return res, err } -func getPolicySetDefinitionByName(ctx context.Context, client *policy.SetDefinitionsClient, name string, managementGroupID string) (res policy.SetDefinition, err error) { +func getPolicySetDefinitionByName(ctx context.Context, client *policy.SetDefinitionsClient, name, managementGroupID string) (res policy.SetDefinition, err error) { if managementGroupID == "" { res, err = client.Get(ctx, name) + if utils.ResponseWasNotFound(res.Response) { + res, err = client.GetBuiltIn(ctx, name) + } } else { res, err = client.GetAtManagementGroup(ctx, name, managementGroupID) } @@ -113,7 +120,7 @@ func expandParameterDefinitionsValueFromString(jsonString string) (map[string]*p return result, err } -func flattenParameterDefintionsValueToString(input map[string]*policy.ParameterDefinitionsValue) (string, error) { +func flattenParameterDefinitionsValueToString(input map[string]*policy.ParameterDefinitionsValue) (string, error) { if len(input) == 0 { return "", nil } diff --git a/azurerm/internal/services/policy/policy_assignment_resource.go b/azurerm/internal/services/policy/policy_assignment_resource.go index ef0d5d13ec87..ae9fa8de589d 100644 --- a/azurerm/internal/services/policy/policy_assignment_resource.go +++ b/azurerm/internal/services/policy/policy_assignment_resource.go @@ -2,14 +2,13 @@ package policy import ( "context" + "encoding/json" "fmt" "log" "reflect" "strconv" "time" - "encoding/json" - "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2019-09-01/policy" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" diff --git a/azurerm/internal/services/policy/policy_assignment_resource_test.go b/azurerm/internal/services/policy/policy_assignment_resource_test.go new file mode 100644 index 000000000000..20c7d1ce45c3 --- /dev/null +++ b/azurerm/internal/services/policy/policy_assignment_resource_test.go @@ -0,0 +1,556 @@ +package policy_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PolicyAssignmentResource struct{} + +func TestAccAzureRMPolicyAssignment_basicCustom(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicCustom(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMPolicyAssignment_basicBuiltin(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicBuiltin(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMPolicyAssignment_basicBuiltInSet(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicBuiltInSet(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMPolicyAssignment_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicCustom(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccAzureRMPolicyAssignment_deployIfNotExists_policy(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.deployIfNotExistsPolicy(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMPolicyAssignment_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMPolicyAssignment_not_scopes(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.notScopes(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccAzureRMPolicyAssignment_enforcement_mode(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_policy_assignment", "test") + r := PolicyAssignmentResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.enforcementMode(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r PolicyAssignmentResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + assignmentsClient := client.Policy.AssignmentsClient + resp, err := assignmentsClient.GetByID(ctx, state.ID) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Policy Assignment %q: %+v", state.ID, err) + } + return utils.Bool(resp.AssignmentProperties != nil), nil +} + +func (r PolicyAssignmentResource) basicCustom(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_policy_definition" "test" { + name = "acctestpol-%[1]d" + policy_type = "Custom" + mode = "All" + display_name = "acctestpol-%[1]d" + + policy_rule = < 260 { - errors = append(errors, fmt.Errorf("%s cannot be empty and must not exceed '260' characters", k)) - return - } - const invalidCharacters = `%^#/\&?` - if strings.ContainsAny(v, invalidCharacters) { - errors = append(errors, fmt.Errorf("%s cannot contain the following characters: %s", k, invalidCharacters)) - } - // Despite the service accepts remediation name with capitalized characters, but in the response, - // all upper case characters will be converted to lower cases. Therefore we forbid user to use upper case letters here - if v != strings.ToLower(v) { - errors = append(errors, fmt.Errorf("%s cannot contain upper case letters", k)) - } - - return warnings, errors -} - -func RemediationID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := parse.PolicyRemediationID(v); err != nil { - errors = append(errors, fmt.Errorf("cannot parse %q as a Policy Remediation ID: %+v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/policy/validate/remediation_name.go b/azurerm/internal/services/policy/validate/remediation_name.go new file mode 100644 index 000000000000..e9fffbb98864 --- /dev/null +++ b/azurerm/internal/services/policy/validate/remediation_name.go @@ -0,0 +1,33 @@ +package validate + +import ( + "fmt" + "strings" +) + +func RemediationName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + // The service returns error when name of remediation is too long + // error: The remediation name cannot be empty and must not exceed '260' characters. + // By my additional test, the name of remediation cannot contain the following characters: %^#/\&?. + if len(v) == 0 || len(v) > 260 { + errors = append(errors, fmt.Errorf("%s cannot be empty and must not exceed '260' characters", k)) + return + } + const invalidCharacters = `%^#/\&?` + if strings.ContainsAny(v, invalidCharacters) { + errors = append(errors, fmt.Errorf("%s cannot contain the following characters: %s", k, invalidCharacters)) + } + // Despite the service accepts remediation name with capitalized characters, but in the response, + // all upper case characters will be converted to lower cases. Therefore we forbid user to use upper case letters here + if v != strings.ToLower(v) { + errors = append(errors, fmt.Errorf("%s cannot contain upper case letters", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/policy/validate/remediation_test.go b/azurerm/internal/services/policy/validate/remediation_name_test.go similarity index 100% rename from azurerm/internal/services/policy/validate/remediation_test.go rename to azurerm/internal/services/policy/validate/remediation_name_test.go diff --git a/azurerm/internal/services/policy/validate/scope.go b/azurerm/internal/services/policy/validate/scope_id.go similarity index 100% rename from azurerm/internal/services/policy/validate/scope.go rename to azurerm/internal/services/policy/validate/scope_id.go diff --git a/azurerm/internal/services/policy/validate/set_definition.go b/azurerm/internal/services/policy/validate/set_definition_id.go similarity index 100% rename from azurerm/internal/services/policy/validate/set_definition.go rename to azurerm/internal/services/policy/validate/set_definition_id.go diff --git a/azurerm/internal/services/portal/dashboard_resource.go b/azurerm/internal/services/portal/dashboard_resource.go deleted file mode 100644 index 7d45d40d55af..000000000000 --- a/azurerm/internal/services/portal/dashboard_resource.go +++ /dev/null @@ -1,166 +0,0 @@ -package portal - -import ( - "encoding/json" - "fmt" - "regexp" - "time" - - "github.com/Azure/azure-sdk-for-go/services/preview/portal/mgmt/2019-01-01-preview/portal" - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmDashboard() *schema.Resource { - return &schema.Resource{ - Create: resourceArmDashboardCreateUpdate, - Read: resourceArmDashboardRead, - Update: resourceArmDashboardCreateUpdate, - Delete: resourceArmDashboardDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validateDashboardName, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - "location": azure.SchemaLocation(), - "tags": tags.Schema(), - "dashboard_properties": { - Type: schema.TypeString, - Optional: true, - Computed: true, - StateFunc: utils.NormalizeJson, - }, - }, - } -} - -func resourceArmDashboardCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Portal.DashboardsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - t := d.Get("tags").(map[string]interface{}) - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - location := azure.NormalizeLocation(d.Get("location").(string)) - dashboardProps := d.Get("dashboard_properties").(string) - - dashboard := portal.Dashboard{ - Location: &location, - Tags: tags.Expand(t), - } - - var dashboardProperties portal.DashboardProperties - - if err := json.Unmarshal([]byte(dashboardProps), &dashboardProperties); err != nil { - return fmt.Errorf("Error parsing JSON: %+v", err) - } - dashboard.DashboardProperties = &dashboardProperties - - _, err := client.CreateOrUpdate(ctx, resourceGroup, name, dashboard) - if err != nil { - return fmt.Errorf("Error creating/updating Dashboard %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - // get it back again to set the props - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error making Read request for Dashboard %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - d.SetId(*resp.ID) - - return resourceArmDashboardRead(d, meta) -} - -func resourceArmDashboardRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Portal.DashboardsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, parseErr := azure.ParseAzureResourceID(d.Id()) - if parseErr != nil { - return parseErr - } - resourceGroup := id.ResourceGroup - name := id.Path["dashboards"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request for Dashboard %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*resp.Location)) - } - - props, jsonErr := json.Marshal(resp.DashboardProperties) - if jsonErr != nil { - return fmt.Errorf("Error parsing DashboardProperties JSON: %+v", jsonErr) - } - d.Set("dashboard_properties", string(props)) - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmDashboardDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Portal.DashboardsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, parseErr := azure.ParseAzureResourceID(d.Id()) - if parseErr != nil { - return parseErr - } - resourceGroup := id.ResourceGroup - name := id.Path["dashboards"] - - resp, err := client.Delete(ctx, resourceGroup, name) - if err != nil { - if !response.WasNotFound(resp.Response) { - return fmt.Errorf("Error retrieving Key Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - - return nil -} - -func validateDashboardName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if len(value) > 64 { - errors = append(errors, fmt.Errorf("%q may not exceed 64 characters in length", k)) - } - - // only alpanumeric and hyphens - if matched := regexp.MustCompile(`^[-\w]+$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("%q may only contain alphanumeric and hyphen characters", k)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/portal/parse/dashboard.go b/azurerm/internal/services/portal/parse/dashboard.go new file mode 100644 index 000000000000..2514b709d55d --- /dev/null +++ b/azurerm/internal/services/portal/parse/dashboard.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DashboardId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewDashboardID(subscriptionId, resourceGroup, name string) DashboardId { + return DashboardId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id DashboardId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Dashboard", segmentsStr) +} + +func (id DashboardId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Portal/dashboards/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// DashboardID parses a Dashboard ID into an DashboardId struct +func DashboardID(input string) (*DashboardId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DashboardId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("dashboards"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/portal/parse/dashboard_test.go b/azurerm/internal/services/portal/parse/dashboard_test.go new file mode 100644 index 000000000000..1952580709ec --- /dev/null +++ b/azurerm/internal/services/portal/parse/dashboard_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DashboardId{} + +func TestDashboardIDFormatter(t *testing.T) { + actual := NewDashboardID("12345678-1234-9876-4563-123456789012", "group1", "dashboard1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Portal/dashboards/dashboard1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDashboardID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DashboardId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Portal/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Portal/dashboards/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Portal/dashboards/dashboard1", + Expected: &DashboardId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + Name: "dashboard1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.PORTAL/DASHBOARDS/DASHBOARD1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DashboardID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/portal/portal_dashboard_resource.go b/azurerm/internal/services/portal/portal_dashboard_resource.go new file mode 100644 index 000000000000..b76768916496 --- /dev/null +++ b/azurerm/internal/services/portal/portal_dashboard_resource.go @@ -0,0 +1,147 @@ +package portal + +import ( + "encoding/json" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/preview/portal/mgmt/2019-01-01-preview/portal" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/portal/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/portal/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDashboard() *schema.Resource { + return &schema.Resource{ + Create: resourceDashboardCreateUpdate, + Read: resourceDashboardRead, + Update: resourceDashboardCreateUpdate, + Delete: resourceDashboardDelete, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DashboardID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DashboardName, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + "location": azure.SchemaLocation(), + "tags": tags.Schema(), + "dashboard_properties": { + Type: schema.TypeString, + Optional: true, + Computed: true, + StateFunc: utils.NormalizeJson, + }, + }, + } +} + +func resourceDashboardCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Portal.DashboardsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + t := d.Get("tags").(map[string]interface{}) + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + location := azure.NormalizeLocation(d.Get("location").(string)) + dashboardProps := d.Get("dashboard_properties").(string) + + // TODO: requires import support + + dashboard := portal.Dashboard{ + Location: &location, + Tags: tags.Expand(t), + } + + var dashboardProperties portal.DashboardProperties + + if err := json.Unmarshal([]byte(dashboardProps), &dashboardProperties); err != nil { + return fmt.Errorf("Error parsing JSON: %+v", err) + } + dashboard.DashboardProperties = &dashboardProperties + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, name, dashboard); err != nil { + return fmt.Errorf("creating/updating Dashboard %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(parse.NewDashboardID(subscriptionId, resourceGroup, name).ID()) + return resourceDashboardRead(d, meta) +} + +func resourceDashboardRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Portal.DashboardsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DashboardID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Dashboard %q was not found in Resource Group %q - removing from state", id.Name, id.ResourceGroup) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Dashboard %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*resp.Location)) + } + + props, jsonErr := json.Marshal(resp.DashboardProperties) + if jsonErr != nil { + return fmt.Errorf("parsing JSON for Dashboard Properties: %+v", jsonErr) + } + d.Set("dashboard_properties", string(props)) + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceDashboardDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Portal.DashboardsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DashboardID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + if !response.WasNotFound(resp.Response) { + return fmt.Errorf("deleting Dashboard %q (Resource Group %q): %+v", id.Name, id.Name, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/portal/portal_dashboard_resource_test.go b/azurerm/internal/services/portal/portal_dashboard_resource_test.go new file mode 100644 index 000000000000..185cbb7787cd --- /dev/null +++ b/azurerm/internal/services/portal/portal_dashboard_resource_test.go @@ -0,0 +1,97 @@ +package portal_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/portal/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PortalDashboardResource struct { +} + +func TestAccPortalDashboard_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_dashboard", "test") + r := PortalDashboardResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (PortalDashboardResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DashboardID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Portal.DashboardsClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.DashboardProperties != nil), nil +} + +func (PortalDashboardResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_dashboard" "test" { + name = "my-test-dashboard" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + dashboard_properties = < 64 { + errors = append(errors, fmt.Errorf("%q may not exceed 64 characters in length", k)) + } + + // only alpanumeric and hyphens + if matched := regexp.MustCompile(`^[-\w]+$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q may only contain alphanumeric and hyphen characters", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/postgres/parse/azure_active_directory_administrator.go b/azurerm/internal/services/postgres/parse/azure_active_directory_administrator.go new file mode 100644 index 000000000000..e96bf9f540da --- /dev/null +++ b/azurerm/internal/services/postgres/parse/azure_active_directory_administrator.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AzureActiveDirectoryAdministratorId struct { + SubscriptionId string + ResourceGroup string + ServerName string + AdministratorName string +} + +func NewAzureActiveDirectoryAdministratorID(subscriptionId, resourceGroup, serverName, administratorName string) AzureActiveDirectoryAdministratorId { + return AzureActiveDirectoryAdministratorId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + AdministratorName: administratorName, + } +} + +func (id AzureActiveDirectoryAdministratorId) String() string { + segments := []string{ + fmt.Sprintf("Administrator Name %q", id.AdministratorName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Azure Active Directory Administrator", segmentsStr) +} + +func (id AzureActiveDirectoryAdministratorId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s/administrators/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.AdministratorName) +} + +// AzureActiveDirectoryAdministratorID parses a AzureActiveDirectoryAdministrator ID into an AzureActiveDirectoryAdministratorId struct +func AzureActiveDirectoryAdministratorID(input string) (*AzureActiveDirectoryAdministratorId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AzureActiveDirectoryAdministratorId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.AdministratorName, err = id.PopSegment("administrators"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/azure_active_directory_administrator_test.go b/azurerm/internal/services/postgres/parse/azure_active_directory_administrator_test.go new file mode 100644 index 000000000000..b87aa7d1424a --- /dev/null +++ b/azurerm/internal/services/postgres/parse/azure_active_directory_administrator_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AzureActiveDirectoryAdministratorId{} + +func TestAzureActiveDirectoryAdministratorIDFormatter(t *testing.T) { + actual := NewAzureActiveDirectoryAdministratorID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "activeDirectory").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/administrators/activeDirectory" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAzureActiveDirectoryAdministratorID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AzureActiveDirectoryAdministratorId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // missing AdministratorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Error: true, + }, + + { + // missing value for AdministratorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/administrators/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/administrators/activeDirectory", + Expected: &AzureActiveDirectoryAdministratorId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + AdministratorName: "activeDirectory", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/ADMINISTRATORS/ACTIVEDIRECTORY", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AzureActiveDirectoryAdministratorID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.AdministratorName != v.Expected.AdministratorName { + t.Fatalf("Expected %q but got %q for AdministratorName", v.Expected.AdministratorName, actual.AdministratorName) + } + } +} diff --git a/azurerm/internal/services/postgres/parse/configuration.go b/azurerm/internal/services/postgres/parse/configuration.go new file mode 100644 index 000000000000..54ed4a6a1d65 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/configuration.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ConfigurationId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewConfigurationID(subscriptionId, resourceGroup, serverName, name string) ConfigurationId { + return ConfigurationId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id ConfigurationId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Configuration", segmentsStr) +} + +func (id ConfigurationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s/configurations/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// ConfigurationID parses a Configuration ID into an ConfigurationId struct +func ConfigurationID(input string) (*ConfigurationId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ConfigurationId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("configurations"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/configuration_test.go b/azurerm/internal/services/postgres/parse/configuration_test.go new file mode 100644 index 000000000000..48b14a63c330 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/configuration_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ConfigurationId{} + +func TestConfigurationIDFormatter(t *testing.T) { + actual := NewConfigurationID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "configuration1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/configurations/configuration1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestConfigurationID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ConfigurationId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/configurations/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/configurations/configuration1", + Expected: &ConfigurationId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + Name: "configuration1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/CONFIGURATIONS/CONFIGURATION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ConfigurationID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/postgres/parse/database.go b/azurerm/internal/services/postgres/parse/database.go new file mode 100644 index 000000000000..21c6ab086908 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/database.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DatabaseId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewDatabaseID(subscriptionId, resourceGroup, serverName, name string) DatabaseId { + return DatabaseId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id DatabaseId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Database", segmentsStr) +} + +func (id DatabaseId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s/databases/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// DatabaseID parses a Database ID into an DatabaseId struct +func DatabaseID(input string) (*DatabaseId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := DatabaseId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("databases"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/database_test.go b/azurerm/internal/services/postgres/parse/database_test.go new file mode 100644 index 000000000000..57ec3330770d --- /dev/null +++ b/azurerm/internal/services/postgres/parse/database_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = DatabaseId{} + +func TestDatabaseIDFormatter(t *testing.T) { + actual := NewDatabaseID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "database1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/databases/database1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestDatabaseID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *DatabaseId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/databases/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/databases/database1", + Expected: &DatabaseId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + Name: "database1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/DATABASES/DATABASE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := DatabaseID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/postgres/parse/firewall_rule.go b/azurerm/internal/services/postgres/parse/firewall_rule.go new file mode 100644 index 000000000000..2a874b065361 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/firewall_rule.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type FirewallRuleId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewFirewallRuleID(subscriptionId, resourceGroup, serverName, name string) FirewallRuleId { + return FirewallRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id FirewallRuleId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Firewall Rule", segmentsStr) +} + +func (id FirewallRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s/firewallRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// FirewallRuleID parses a FirewallRule ID into an FirewallRuleId struct +func FirewallRuleID(input string) (*FirewallRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FirewallRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("firewallRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/firewall_rule_test.go b/azurerm/internal/services/postgres/parse/firewall_rule_test.go new file mode 100644 index 000000000000..40479a06f926 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/firewall_rule_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = FirewallRuleId{} + +func TestFirewallRuleIDFormatter(t *testing.T) { + actual := NewFirewallRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "firewallRule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/firewallRules/firewallRule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestFirewallRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *FirewallRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/firewallRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/firewallRules/firewallRule1", + Expected: &FirewallRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + Name: "firewallRule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/FIREWALLRULES/FIREWALLRULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FirewallRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/postgres/parse/postgresql_server.go b/azurerm/internal/services/postgres/parse/postgresql_server.go deleted file mode 100644 index 96a48457abc5..000000000000 --- a/azurerm/internal/services/postgres/parse/postgresql_server.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type PostgreSQLServerId struct { - ResourceGroup string - Name string -} - -func PostgreSQLServerID(input string) (*PostgreSQLServerId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse PostgreSQL Server ID %q: %+v", input, err) - } - - server := PostgreSQLServerId{ - ResourceGroup: id.ResourceGroup, - } - - if server.Name, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/postgres/parse/postgresql_server_key.go b/azurerm/internal/services/postgres/parse/postgresql_server_key.go deleted file mode 100644 index d0590a6fdf33..000000000000 --- a/azurerm/internal/services/postgres/parse/postgresql_server_key.go +++ /dev/null @@ -1,38 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type PostgreSQLServerKeyId struct { - Name string - ServerName string - ResourceGroup string -} - -func PostgreSQLServerKeyID(input string) (*PostgreSQLServerKeyId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("unable to parse Postgres Server Key ID %q: %+v", input, err) - } - - server := PostgreSQLServerKeyId{ - ResourceGroup: id.ResourceGroup, - } - - if server.ServerName, err = id.PopSegment("servers"); err != nil { - return nil, err - } - - if server.Name, err = id.PopSegment("keys"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &server, nil -} diff --git a/azurerm/internal/services/postgres/parse/postgresql_server_key_test.go b/azurerm/internal/services/postgres/parse/postgresql_server_key_test.go deleted file mode 100644 index 7e4bed78b251..000000000000 --- a/azurerm/internal/services/postgres/parse/postgresql_server_key_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package parse - -import "testing" - -func TestPostgreSQLServerKeyID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *PostgreSQLServerKeyId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Servers Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", - Expected: nil, - }, - { - Name: "Postgres Server ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/Server1/", - Expected: nil, - }, - { - Name: "Missing Key Name", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/Server1/keys/", - Expected: nil, - }, - { - Name: "PostgreSQL Server Key ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/Server1/keys/key1", - Expected: &PostgreSQLServerKeyId{ - Name: "key1", - ServerName: "Server1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/Servers/", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := PostgreSQLServerKeyID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ServerName != v.Expected.ServerName { - t.Fatalf("Expected %q but got %q for Name", v.Expected.ServerName, actual.ServerName) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/postgres/parse/postgresql_server_test.go b/azurerm/internal/services/postgres/parse/postgresql_server_test.go deleted file mode 100644 index 61bb7e234f54..000000000000 --- a/azurerm/internal/services/postgres/parse/postgresql_server_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestAnalysisServicesServerId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *PostgreSQLServerId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing Servers Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", - Expected: nil, - }, - { - Name: "Postgres Server ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/Server1", - Expected: &PostgreSQLServerId{ - Name: "Server1", - ResourceGroup: "resGroup1", - }, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/Servers/", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := PostgreSQLServerID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/postgres/parse/server.go b/azurerm/internal/services/postgres/parse/server.go new file mode 100644 index 000000000000..601f456835e4 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/server.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewServerID(subscriptionId, resourceGroup, name string) ServerId { + return ServerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id ServerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server", segmentsStr) +} + +func (id ServerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// ServerID parses a Server ID into an ServerId struct +func ServerID(input string) (*ServerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("servers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/server_key.go b/azurerm/internal/services/postgres/parse/server_key.go new file mode 100644 index 000000000000..3a53da94e930 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/server_key.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ServerKeyId struct { + SubscriptionId string + ResourceGroup string + ServerName string + KeyName string +} + +func NewServerKeyID(subscriptionId, resourceGroup, serverName, keyName string) ServerKeyId { + return ServerKeyId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + KeyName: keyName, + } +} + +func (id ServerKeyId) String() string { + segments := []string{ + fmt.Sprintf("Key Name %q", id.KeyName), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Server Key", segmentsStr) +} + +func (id ServerKeyId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s/keys/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.KeyName) +} + +// ServerKeyID parses a ServerKey ID into an ServerKeyId struct +func ServerKeyID(input string) (*ServerKeyId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ServerKeyId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.KeyName, err = id.PopSegment("keys"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/server_key_test.go b/azurerm/internal/services/postgres/parse/server_key_test.go new file mode 100644 index 000000000000..d616fc513d59 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/server_key_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerKeyId{} + +func TestServerKeyIDFormatter(t *testing.T) { + actual := NewServerKeyID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "key1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/keys/key1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerKeyID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerKeyId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // missing KeyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Error: true, + }, + + { + // missing value for KeyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/keys/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/keys/key1", + Expected: &ServerKeyId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + KeyName: "key1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/KEYS/KEY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerKeyID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.KeyName != v.Expected.KeyName { + t.Fatalf("Expected %q but got %q for KeyName", v.Expected.KeyName, actual.KeyName) + } + } +} diff --git a/azurerm/internal/services/postgres/parse/server_test.go b/azurerm/internal/services/postgres/parse/server_test.go new file mode 100644 index 000000000000..b6bbbde3c961 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/server_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ServerId{} + +func TestServerIDFormatter(t *testing.T) { + actual := NewServerID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestServerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ServerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1", + Expected: &ServerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "server1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ServerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/postgres/parse/virtual_network_rule.go b/azurerm/internal/services/postgres/parse/virtual_network_rule.go new file mode 100644 index 000000000000..bdc2879625a0 --- /dev/null +++ b/azurerm/internal/services/postgres/parse/virtual_network_rule.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VirtualNetworkRuleId struct { + SubscriptionId string + ResourceGroup string + ServerName string + Name string +} + +func NewVirtualNetworkRuleID(subscriptionId, resourceGroup, serverName, name string) VirtualNetworkRuleId { + return VirtualNetworkRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + ServerName: serverName, + Name: name, + } +} + +func (id VirtualNetworkRuleId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Server Name %q", id.ServerName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Network Rule", segmentsStr) +} + +func (id VirtualNetworkRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DBforPostgreSQL/servers/%s/virtualNetworkRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ServerName, id.Name) +} + +// VirtualNetworkRuleID parses a VirtualNetworkRule ID into an VirtualNetworkRuleId struct +func VirtualNetworkRuleID(input string) (*VirtualNetworkRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VirtualNetworkRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.ServerName, err = id.PopSegment("servers"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("virtualNetworkRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/postgres/parse/virtual_network_rule_test.go b/azurerm/internal/services/postgres/parse/virtual_network_rule_test.go new file mode 100644 index 000000000000..7ec10ec36dbf --- /dev/null +++ b/azurerm/internal/services/postgres/parse/virtual_network_rule_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VirtualNetworkRuleId{} + +func TestVirtualNetworkRuleIDFormatter(t *testing.T) { + actual := NewVirtualNetworkRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "server1", "virtualNetworkRule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/virtualNetworkRules/virtualNetworkRule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVirtualNetworkRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VirtualNetworkRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Error: true, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/virtualNetworkRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/virtualNetworkRules/virtualNetworkRule1", + Expected: &VirtualNetworkRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + ServerName: "server1", + Name: "virtualNetworkRule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/VIRTUALNETWORKRULES/VIRTUALNETWORKRULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VirtualNetworkRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.ServerName != v.Expected.ServerName { + t.Fatalf("Expected %q but got %q for ServerName", v.Expected.ServerName, actual.ServerName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/postgres/postgresql_aad_administrator_resource.go b/azurerm/internal/services/postgres/postgresql_aad_administrator_resource.go index 39633f2ca409..c51c02ac5e7a 100644 --- a/azurerm/internal/services/postgres/postgresql_aad_administrator_resource.go +++ b/azurerm/internal/services/postgres/postgresql_aad_administrator_resource.go @@ -12,19 +12,22 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPostgreSQLAdministrator() *schema.Resource { +func resourcePostgreSQLAdministrator() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLAdministratorCreateUpdate, - Read: resourceArmPostgreSQLAdministratorRead, - Update: resourceArmPostgreSQLAdministratorCreateUpdate, - Delete: resourceArmPostgreSQLAdministratorDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePostgreSQLAdministratorCreateUpdate, + Read: resourcePostgreSQLAdministratorRead, + Update: resourcePostgreSQLAdministratorCreateUpdate, + Delete: resourcePostgreSQLAdministratorDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.AzureActiveDirectoryAdministratorID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -62,7 +65,7 @@ func resourceArmPostgreSQLAdministrator() *schema.Resource { } } -func resourceArmPostgreSQLAdministratorCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLAdministratorCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServerAdministratorsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -114,20 +117,17 @@ func resourceArmPostgreSQLAdministratorCreateUpdate(d *schema.ResourceData, meta return nil } -func resourceArmPostgreSQLAdministratorRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLAdministratorRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServerAdministratorsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.AzureActiveDirectoryAdministratorID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - serverName := id.Path["servers"] - - resp, err := client.Get(ctx, resourceGroup, serverName) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Error reading PostgreSQL AD administrator %q - removing from state", d.Id()) @@ -138,31 +138,35 @@ func resourceArmPostgreSQLAdministratorRead(d *schema.ResourceData, meta interfa return fmt.Errorf("Error reading PostgreSQL AD administrator: %+v", err) } - d.Set("resource_group_name", resourceGroup) - d.Set("server_name", serverName) - d.Set("login", resp.Login) - d.Set("object_id", resp.Sid.String()) - d.Set("tenant_id", resp.TenantID.String()) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("server_name", id.ServerName) + + if props := resp.ServerAdministratorProperties; props != nil { + d.Set("login", props.Login) + d.Set("object_id", props.Sid.String()) + d.Set("tenant_id", props.TenantID.String()) + } return nil } -func resourceArmPostgreSQLAdministratorDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLAdministratorDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServerAdministratorsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.AzureActiveDirectoryAdministratorID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - serverName := id.Path["servers"] - - _, err = client.Delete(ctx, resourceGroup, serverName) + future, err := client.Delete(ctx, id.ResourceGroup, id.ServerName) if err != nil { - return fmt.Errorf("Error deleting PostgreSQL AD Administrator: %+v", err) + return fmt.Errorf("deleting AD Administrator (PostgreSQL Server %q / Resource Group %q): %+v", id.ServerName, id.ResourceGroup, err) + } + + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of AD Administrator (PostgreSQL Server %q / Resource Group %q): %+v", id.ServerName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/postgres/postgresql_aad_administrator_resource_test.go b/azurerm/internal/services/postgres/postgresql_aad_administrator_resource_test.go new file mode 100644 index 000000000000..b0a49533d1d2 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_aad_administrator_resource_test.go @@ -0,0 +1,211 @@ +package postgres_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSqlAdministratorResource struct { +} + +func TestAccPostgreSqlAdministrator_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_active_directory_administrator", "test") + r := PostgreSqlAdministratorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("login").HasValue("sqladmin"), + ), + }, + data.ImportStep(), + { + Config: r.withUpdates(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("login").HasValue("sqladmin2"), + ), + }, + }) +} + +func TestAccPostgreSqlAdministrator_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_active_directory_administrator", "test") + r := PostgreSqlAdministratorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("login").HasValue("sqladmin"), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_postgresql_active_directory_administrator"), + }, + }) +} + +func TestAccPostgreSqlAdministrator_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_active_directory_administrator", "test") + r := PostgreSqlAdministratorResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckAzurePostgreSqlAdministratorDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func (t PostgreSqlAdministratorResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AzureActiveDirectoryAdministratorID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.ServerAdministratorsClient.Get(ctx, id.ResourceGroup, id.ServerName) + if err != nil { + return nil, fmt.Errorf("reading Postgresql AAD Admnistrator (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckAzurePostgreSqlAdministratorDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServerAdministratorsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + resourceGroup := rs.Primary.Attributes["resource_group_name"] + serverName := rs.Primary.Attributes["server_name"] + + if _, err := client.Delete(ctx, resourceGroup, serverName); err != nil { + return fmt.Errorf("Bad: Delete on postgresAdministratorClient: %+v", err) + } + + return nil + } +} + +func (PostgreSqlAdministratorResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_active_directory_administrator" "test" { + server_name = azurerm_postgresql_server.test.name + resource_group_name = azurerm_resource_group.test.name + login = "sqladmin" + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.client_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r PostgreSqlAdministratorResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_active_directory_administrator" "import" { + server_name = azurerm_postgresql_active_directory_administrator.test.server_name + resource_group_name = azurerm_postgresql_active_directory_administrator.test.resource_group_name + login = azurerm_postgresql_active_directory_administrator.test.login + tenant_id = azurerm_postgresql_active_directory_administrator.test.tenant_id + object_id = azurerm_postgresql_active_directory_administrator.test.object_id +} +`, r.basic(data)) +} + +func (PostgreSqlAdministratorResource) withUpdates(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" { +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_active_directory_administrator" "test" { + server_name = azurerm_postgresql_server.test.name + resource_group_name = azurerm_resource_group.test.name + login = "sqladmin2" + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.client_id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/postgres/postgresql_configuration_resource.go b/azurerm/internal/services/postgres/postgresql_configuration_resource.go index 720fdc4cd7c9..ca6b0e9a1ac3 100644 --- a/azurerm/internal/services/postgres/postgresql_configuration_resource.go +++ b/azurerm/internal/services/postgres/postgresql_configuration_resource.go @@ -10,19 +10,22 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPostgreSQLConfiguration() *schema.Resource { +func resourcePostgreSQLConfiguration() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLConfigurationCreateUpdate, - Read: resourceArmPostgreSQLConfigurationRead, - Delete: resourceArmPostgreSQLConfigurationDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePostgreSQLConfigurationCreateUpdate, + Read: resourcePostgreSQLConfigurationRead, + Delete: resourcePostgreSQLConfigurationDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ConfigurationID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -44,7 +47,7 @@ func resourceArmPostgreSQLConfiguration() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgreSQLServerName, + ValidateFunc: validate.ServerName, }, "value": { @@ -56,7 +59,7 @@ func resourceArmPostgreSQLConfiguration() *schema.Resource { } } -func resourceArmPostgreSQLConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLConfigurationCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ConfigurationsClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -96,58 +99,55 @@ func resourceArmPostgreSQLConfigurationCreateUpdate(d *schema.ResourceData, meta d.SetId(*read.ID) - return resourceArmPostgreSQLConfigurationRead(d, meta) + return resourcePostgreSQLConfigurationRead(d, meta) } -func resourceArmPostgreSQLConfigurationRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLConfigurationRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ConfigurationsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ConfigurationID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["configurations"] - resp, err := client.Get(ctx, resGroup, serverName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] PostgreSQL Configuration '%s' was not found (resource group '%s')", name, resGroup) + log.Printf("[WARN] PostgreSQL Configuration '%s' was not found (resource group '%s')", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Azure PostgreSQL Configuration %s: %+v", name, err) + return fmt.Errorf("Error making Read request on Azure PostgreSQL Configuration %s: %+v", id.Name, err) } - d.Set("name", resp.Name) - d.Set("server_name", serverName) - d.Set("resource_group_name", resGroup) - d.Set("value", resp.ConfigurationProperties.Value) + d.Set("name", id.Name) + d.Set("server_name", id.ServerName) + d.Set("resource_group_name", id.ResourceGroup) + + if props := resp.ConfigurationProperties; props != nil { + d.Set("value", props.Value) + } return nil } -func resourceArmPostgreSQLConfigurationDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLConfigurationDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ConfigurationsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ConfigurationID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["configurations"] // "delete" = resetting this to the default value - resp, err := client.Get(ctx, resGroup, serverName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { - return fmt.Errorf("Error retrieving Postgresql Configuration '%s': %+v", name, err) + return fmt.Errorf("Error retrieving Postgresql Configuration '%s': %+v", id.Name, err) } properties := postgresql.Configuration{ @@ -157,7 +157,7 @@ func resourceArmPostgreSQLConfigurationDelete(d *schema.ResourceData, meta inter }, } - future, err := client.CreateOrUpdate(ctx, resGroup, serverName, name, properties) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.ServerName, id.Name, properties) if err != nil { if response.WasNotFound(future.Response()) { return nil diff --git a/azurerm/internal/services/postgres/postgresql_configuration_resource_test.go b/azurerm/internal/services/postgres/postgresql_configuration_resource_test.go new file mode 100644 index 000000000000..3cc985a60c42 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_configuration_resource_test.go @@ -0,0 +1,213 @@ +package postgres_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSQLConfigurationResource struct { +} + +func TestAccPostgreSQLConfiguration_backslashQuote(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_configuration", "test") + r := PostgreSQLConfigurationResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.backslashQuote(data), + Check: resource.ComposeTestCheckFunc( + testCheckPostgreSQLConfigurationValue(data.ResourceName, "on"), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + testCheckPostgreSQLConfigurationValueReset(data.RandomInteger, "backslash_quote"), + ), + }, + }) +} + +func TestAccPostgreSQLConfiguration_clientMinMessages(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_configuration", "test") + r := PostgreSQLConfigurationResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.clientMinMessages(data), + Check: resource.ComposeTestCheckFunc( + testCheckPostgreSQLConfigurationValue(data.ResourceName, "DEBUG5"), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + testCheckPostgreSQLConfigurationValueReset(data.RandomInteger, "client_min_messages"), + ), + }, + }) +} + +func TestAccPostgreSQLConfiguration_deadlockTimeout(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_configuration", "test") + r := PostgreSQLConfigurationResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.deadlockTimeout(data), + Check: resource.ComposeTestCheckFunc( + testCheckPostgreSQLConfigurationValue(data.ResourceName, "5000"), + ), + }, + data.ImportStep(), + { + Config: r.empty(data), + Check: resource.ComposeTestCheckFunc( + // "delete" resets back to the default value + testCheckPostgreSQLConfigurationValueReset(data.RandomInteger, "deadlock_timeout"), + ), + }, + }) +} + +func testCheckPostgreSQLConfigurationValue(resourceName string, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ConfigurationsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + serverName := rs.Primary.Attributes["server_name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for PostgreSQL Configuration: %s", name) + } + + resp, err := client.Get(ctx, resourceGroup, serverName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: PostgreSQL Configuration %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) + } + + return fmt.Errorf("Bad: Get on postgresqlConfigurationsClient: %+v", err) + } + + if *resp.Value != value { + return fmt.Errorf("PostgreSQL Configuration wasn't set. Expected '%s' - got '%s': \n%+v", value, *resp.Value, resp) + } + + return nil + } +} + +func testCheckPostgreSQLConfigurationValueReset(rInt int, configurationName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ConfigurationsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + resourceGroup := fmt.Sprintf("acctestRG-psql-%d", rInt) + serverName := fmt.Sprintf("acctest-psql-server-%d", rInt) + + resp, err := client.Get(ctx, resourceGroup, serverName, configurationName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: PostgreSQL Configuration %q (server %q resource group: %q) does not exist", configurationName, serverName, resourceGroup) + } + return fmt.Errorf("Bad: Get on postgresqlConfigurationsClient: %+v", err) + } + + actualValue := *resp.Value + defaultValue := *resp.DefaultValue + + if defaultValue != actualValue { + return fmt.Errorf("PostgreSQL Configuration wasn't set to the default value. Expected '%s' - got '%s': \n%+v", defaultValue, actualValue, resp) + } + + return nil + } +} + +func (t PostgreSQLConfigurationResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ConfigurationID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.ConfigurationsClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Postgresql Configuration (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (r PostgreSQLConfigurationResource) backslashQuote(data acceptance.TestData) string { + return r.template(data, "backslash_quote", "on") +} + +func (r PostgreSQLConfigurationResource) clientMinMessages(data acceptance.TestData) string { + return r.template(data, "client_min_messages", "DEBUG5") +} + +func (r PostgreSQLConfigurationResource) deadlockTimeout(data acceptance.TestData) string { + return r.template(data, "deadlock_timeout", "5000") +} + +func (r PostgreSQLConfigurationResource) template(data acceptance.TestData, name string, value string) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_configuration" "test" { + name = "%s" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + value = "%s" +} +`, r.empty(data), name, value) +} + +func (PostgreSQLConfigurationResource) empty(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/postgres/postgresql_database_resource.go b/azurerm/internal/services/postgres/postgresql_database_resource.go index 5ff90b15b5b9..4f25a948f4e9 100644 --- a/azurerm/internal/services/postgres/postgresql_database_resource.go +++ b/azurerm/internal/services/postgres/postgresql_database_resource.go @@ -12,19 +12,22 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPostgreSQLDatabase() *schema.Resource { +func resourcePostgreSQLDatabase() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLDatabaseCreate, - Read: resourceArmPostgreSQLDatabaseRead, - Delete: resourceArmPostgreSQLDatabaseDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePostgreSQLDatabaseCreate, + Read: resourcePostgreSQLDatabaseRead, + Delete: resourcePostgreSQLDatabaseDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DatabaseID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(60 * time.Minute), @@ -46,7 +49,7 @@ func resourceArmPostgreSQLDatabase() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgreSQLServerName, + ValidateFunc: validate.ServerName, }, "charset": { @@ -60,13 +63,13 @@ func resourceArmPostgreSQLDatabase() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgresDatabaseCollation, + ValidateFunc: validate.DatabaseCollation, }, }, } } -func resourceArmPostgreSQLDatabaseCreate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLDatabaseCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.DatabasesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -117,36 +120,33 @@ func resourceArmPostgreSQLDatabaseCreate(d *schema.ResourceData, meta interface{ d.SetId(*read.ID) - return resourceArmPostgreSQLDatabaseRead(d, meta) + return resourcePostgreSQLDatabaseRead(d, meta) } -func resourceArmPostgreSQLDatabaseRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLDatabaseRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.DatabasesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["databases"] - resp, err := client.Get(ctx, resGroup, serverName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] PostgreSQL Database '%s' was not found (resource group '%s')", name, resGroup) + log.Printf("[WARN] PostgreSQL Database %q was not found (Server %q / Resource Group %q)", id.Name, id.ServerName, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Azure PostgreSQL Database %s: %+v", name, err) + return fmt.Errorf("retrieving PostgreSQL Database %q (Server %q / Resource Group %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) - d.Set("server_name", serverName) + d.Set("name", id.Name) + d.Set("server_name", id.ServerName) + d.Set("resource_group_name", id.ResourceGroup) if props := resp.DatabaseProperties; props != nil { d.Set("charset", props.Charset) @@ -156,20 +156,17 @@ func resourceArmPostgreSQLDatabaseRead(d *schema.ResourceData, meta interface{}) return nil } -func resourceArmPostgreSQLDatabaseDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLDatabaseDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.DatabasesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DatabaseID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["databases"] - future, err := client.Delete(ctx, resGroup, serverName, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if response.WasNotFound(future.Response()) { return nil diff --git a/azurerm/internal/services/postgres/postgresql_database_resource_test.go b/azurerm/internal/services/postgres/postgresql_database_resource_test.go new file mode 100644 index 000000000000..c3945f92a595 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_database_resource_test.go @@ -0,0 +1,282 @@ +package postgres_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSQLDatabaseResource struct { +} + +func TestAccPostgreSQLDatabase_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") + r := PostgreSQLDatabaseResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("UTF8"), + check.That(data.ResourceName).Key("collation").HasValue("English_United States.1252"), + ), + }, + }) +} + +func TestAccPostgreSQLDatabase_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") + r := PostgreSQLDatabaseResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("UTF8"), + check.That(data.ResourceName).Key("collation").HasValue("English_United States.1252"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPostgreSQLDatabase_collationWithHyphen(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") + r := PostgreSQLDatabaseResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.collationWithHyphen(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("UTF8"), + check.That(data.ResourceName).Key("collation").HasValue("En-US"), + ), + }, + }) +} + +func TestAccPostgreSQLDatabase_charsetLowercase(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") + r := PostgreSQLDatabaseResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.charsetLowercase(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("UTF8"), + check.That(data.ResourceName).Key("collation").HasValue("English_United States.1252"), + ), + }, + }) +} + +func TestAccPostgreSQLDatabase_charsetMixedcase(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") + r := PostgreSQLDatabaseResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.charsetMixedcase(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("charset").HasValue("UTF8"), + check.That(data.ResourceName).Key("collation").HasValue("English_United States.1252"), + ), + }, + }) +} + +func (t PostgreSQLDatabaseResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.DatabaseID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.DatabasesClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Postgresql Database (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PostgreSQLDatabaseResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_database" "test" { + name = "acctest_PSQL_db_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + charset = "UTF8" + collation = "English_United States.1252" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PostgreSQLDatabaseResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_database" "import" { + name = azurerm_postgresql_database.test.name + resource_group_name = azurerm_postgresql_database.test.resource_group_name + server_name = azurerm_postgresql_database.test.server_name + charset = azurerm_postgresql_database.test.charset + collation = azurerm_postgresql_database.test.collation +} +`, r.basic(data)) +} + +func (PostgreSQLDatabaseResource) collationWithHyphen(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_database" "test" { + name = "acctest_PSQL_db_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + charset = "UTF8" + collation = "En-US" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PostgreSQLDatabaseResource) charsetLowercase(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_database" "test" { + name = "acctest_PSQL_db_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + charset = "utf8" + collation = "English_United States.1252" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PostgreSQLDatabaseResource) charsetMixedcase(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_database" "test" { + name = "acctest_PSQL_db_%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + charset = "Utf8" + collation = "English_United States.1252" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/postgres/postgresql_firewall_rule_resource.go b/azurerm/internal/services/postgres/postgresql_firewall_rule_resource.go index 93fd150c1720..a450a67cc0f5 100644 --- a/azurerm/internal/services/postgres/postgresql_firewall_rule_resource.go +++ b/azurerm/internal/services/postgres/postgresql_firewall_rule_resource.go @@ -3,27 +3,32 @@ package postgres import ( "fmt" "log" + "regexp" "time" "github.com/Azure/azure-sdk-for-go/services/postgresql/mgmt/2020-01-01/postgresql" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPostgreSQLFirewallRule() *schema.Resource { +func resourcePostgreSQLFirewallRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLFirewallRuleCreate, - Read: resourceArmPostgreSQLFirewallRuleRead, - Delete: resourceArmPostgreSQLFirewallRuleDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePostgreSQLFirewallRuleCreate, + Read: resourcePostgreSQLFirewallRuleRead, + Delete: resourcePostgreSQLFirewallRuleDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.FirewallRuleID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -37,6 +42,10 @@ func resourceArmPostgreSQLFirewallRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9(_)]{1,128}$"), + "Rule name must be 1 - 128 characters long, can contain letters, numbers, underscores, and hyphens (but the first and last character must be a letter or number).", + ), }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -45,25 +54,27 @@ func resourceArmPostgreSQLFirewallRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgreSQLServerName, + ValidateFunc: validate.ServerName, }, "start_ip_address": { - Type: schema.TypeString, - Required: true, - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsIPv4Address, }, "end_ip_address": { - Type: schema.TypeString, - Required: true, - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.IsIPv4Address, }, }, } } -func resourceArmPostgreSQLFirewallRuleCreate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLFirewallRuleCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.FirewallRulesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -113,56 +124,53 @@ func resourceArmPostgreSQLFirewallRuleCreate(d *schema.ResourceData, meta interf d.SetId(*read.ID) - return resourceArmPostgreSQLFirewallRuleRead(d, meta) + return resourcePostgreSQLFirewallRuleRead(d, meta) } -func resourceArmPostgreSQLFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.FirewallRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.FirewallRuleID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["firewallRules"] - resp, err := client.Get(ctx, resGroup, serverName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] PostgreSQL Firewall Rule '%s' was not found (resource group '%s')", name, resGroup) + log.Printf("[WARN] PostgreSQL Firewall Rule %q was not found (Resource Group %q)", id.Name, id.ResourceGroup) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Azure PostgreSQL Firewall Rule %s: %+v", name, err) + return fmt.Errorf("retrieving PostgreSQL Firewall Rule %q: %+v", id.Name, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) - d.Set("server_name", serverName) - d.Set("start_ip_address", resp.StartIPAddress) - d.Set("end_ip_address", resp.EndIPAddress) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("server_name", id.ServerName) + + if props := resp.FirewallRuleProperties; props != nil { + d.Set("start_ip_address", props.StartIPAddress) + d.Set("end_ip_address", props.EndIPAddress) + } return nil } -func resourceArmPostgreSQLFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.FirewallRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.FirewallRuleID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["firewallRules"] - future, err := client.Delete(ctx, resGroup, serverName, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if response.WasNotFound(future.Response()) { return nil diff --git a/azurerm/internal/services/postgres/postgresql_firewall_rule_resource_test.go b/azurerm/internal/services/postgres/postgresql_firewall_rule_resource_test.go new file mode 100644 index 000000000000..9a3c28983052 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_firewall_rule_resource_test.go @@ -0,0 +1,118 @@ +package postgres_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSQLFirewallRuleResource struct { +} + +func TestAccPostgreSQLFirewallRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_firewall_rule", "test") + r := PostgreSQLFirewallRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue("0.0.0.0"), + check.That(data.ResourceName).Key("end_ip_address").HasValue("255.255.255.255"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPostgreSQLFirewallRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_firewall_rule", "test") + r := PostgreSQLFirewallRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("start_ip_address").HasValue("0.0.0.0"), + check.That(data.ResourceName).Key("end_ip_address").HasValue("255.255.255.255"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t PostgreSQLFirewallRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.FirewallRuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.FirewallRulesClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Postgresql Firewall Rule (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PostgreSQLFirewallRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.6" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_firewall_rule" "test" { + name = "acctest-PSQL-fwrule-%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + start_ip_address = "0.0.0.0" + end_ip_address = "255.255.255.255" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PostgreSQLFirewallRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_firewall_rule" "import" { + name = azurerm_postgresql_firewall_rule.test.name + resource_group_name = azurerm_postgresql_firewall_rule.test.resource_group_name + server_name = azurerm_postgresql_firewall_rule.test.server_name + start_ip_address = azurerm_postgresql_firewall_rule.test.start_ip_address + end_ip_address = azurerm_postgresql_firewall_rule.test.end_ip_address +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/postgres/postgresql_server_data_source.go b/azurerm/internal/services/postgres/postgresql_server_data_source.go index b0c38299f9bc..ed13dd594f7a 100644 --- a/azurerm/internal/services/postgres/postgresql_server_data_source.go +++ b/azurerm/internal/services/postgres/postgresql_server_data_source.go @@ -14,7 +14,7 @@ import ( func dataSourcePostgreSqlServer() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmPostgreSqlServerRead, + Read: dataSourcePostgreSqlServerRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -81,7 +81,7 @@ func dataSourcePostgreSqlServer() *schema.Resource { } } -func dataSourceArmPostgreSqlServerRead(d *schema.ResourceData, meta interface{}) error { +func dataSourcePostgreSqlServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServersClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/azurerm/internal/services/postgres/postgresql_server_data_source_test.go b/azurerm/internal/services/postgres/postgresql_server_data_source_test.go new file mode 100644 index 000000000000..2f2cd9815732 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_server_data_source_test.go @@ -0,0 +1,44 @@ +package postgres_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type PostgreSQLServerDataSource struct { +} + +func TestAccDataSourcePostgreSqlServer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_postgresql_server", "test") + r := PostgreSQLServerDataSource{} + version := "9.5" + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data, version), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("fqdn").Exists(), + check.That(data.ResourceName).Key("version").Exists(), + check.That(data.ResourceName).Key("administrator_login").Exists(), + check.That(data.ResourceName).Key("sku_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func (r PostgreSQLServerDataSource) basic(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%s + +data "azurerm_postgresql_server" "test" { + name = azurerm_postgresql_server.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, PostgreSQLServerResource{}.basic(data, version)) +} diff --git a/azurerm/internal/services/postgres/postgresql_server_key_resource.go b/azurerm/internal/services/postgres/postgresql_server_key_resource.go index 0e97bf7939f1..c9bee6591b00 100644 --- a/azurerm/internal/services/postgres/postgresql_server_key_resource.go +++ b/azurerm/internal/services/postgres/postgresql_server_key_resource.go @@ -21,15 +21,15 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPostgreSQLServerKey() *schema.Resource { +func resourcePostgreSQLServerKey() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLServerKeyCreateUpdate, - Read: resourceArmPostgreSQLServerKeyRead, - Update: resourceArmPostgreSQLServerKeyCreateUpdate, - Delete: resourceArmPostgreSQLServerKeyDelete, + Create: resourcePostgreSQLServerKeyCreateUpdate, + Read: resourcePostgreSQLServerKeyRead, + Update: resourcePostgreSQLServerKeyCreateUpdate, + Delete: resourcePostgreSQLServerKeyDelete, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.PostgreSQLServerKeyID(id) + _, err := parse.ServerKeyID(id) return err }), @@ -45,7 +45,7 @@ func resourceArmPostgreSQLServerKey() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgreSQLServerID, + ValidateFunc: validate.ServerID, }, "key_vault_key_id": { @@ -70,20 +70,20 @@ func getPostgreSQLServerKeyName(ctx context.Context, vaultsClient *keyvault.Vaul if keyVaultIDRaw == nil { return nil, fmt.Errorf("cannot get the keyvault ID from keyvault URL %q", keyVaultKeyID.KeyVaultBaseUrl) } - keyVaultID, err := keyVaultParse.KeyVaultID(*keyVaultIDRaw) + keyVaultID, err := keyVaultParse.VaultID(*keyVaultIDRaw) if err != nil { return nil, err } return utils.String(fmt.Sprintf("%s_%s_%s", keyVaultID.Name, keyVaultKeyID.Name, keyVaultKeyID.Version)), nil } -func resourceArmPostgreSQLServerKeyCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerKeyCreateUpdate(d *schema.ResourceData, meta interface{}) error { keysClient := meta.(*clients.Client).Postgres.ServerKeysClient vaultsClient := meta.(*clients.Client).KeyVault.VaultsClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() - serverID, err := parse.PostgreSQLServerID(d.Get("server_id").(string)) + serverID, err := parse.ServerID(d.Get("server_id").(string)) if err != nil { return err } @@ -137,29 +137,29 @@ func resourceArmPostgreSQLServerKeyCreateUpdate(d *schema.ResourceData, meta int } d.SetId(*resp.ID) - return resourceArmPostgreSQLServerKeyRead(d, meta) + return resourcePostgreSQLServerKeyRead(d, meta) } -func resourceArmPostgreSQLServerKeyRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerKeyRead(d *schema.ResourceData, meta interface{}) error { serversClient := meta.(*clients.Client).Postgres.ServersClient keysClient := meta.(*clients.Client).Postgres.ServerKeysClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.PostgreSQLServerKeyID(d.Id()) + id, err := parse.ServerKeyID(d.Id()) if err != nil { return err } - resp, err := keysClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + resp, err := keysClient.Get(ctx, id.ResourceGroup, id.ServerName, id.KeyName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] PostgreSQL Server Key %q was not found (Resource Group %q / Server %q)", id.Name, id.ResourceGroup, id.ServerName) + log.Printf("[WARN] PostgreSQL Server Key %q was not found (Resource Group %q / Server %q)", id.KeyName, id.ResourceGroup, id.ServerName) d.SetId("") return nil } - return fmt.Errorf("retrieving PostgreSQL Server Key %q (Resource Group %q / Server %q): %+v", id.Name, id.ResourceGroup, id.ServerName, err) + return fmt.Errorf("retrieving PostgreSQL Server Key %q (Resource Group %q / Server %q): %+v", id.KeyName, id.ResourceGroup, id.ServerName, err) } respServer, err := serversClient.Get(ctx, id.ResourceGroup, id.ServerName) @@ -175,12 +175,12 @@ func resourceArmPostgreSQLServerKeyRead(d *schema.ResourceData, meta interface{} return nil } -func resourceArmPostgreSQLServerKeyDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerKeyDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServerKeysClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.PostgreSQLServerKeyID(d.Id()) + id, err := parse.ServerKeyID(d.Id()) if err != nil { return err } @@ -188,12 +188,12 @@ func resourceArmPostgreSQLServerKeyDelete(d *schema.ResourceData, meta interface locks.ByName(id.ServerName, postgreSQLServerResourceName) defer locks.UnlockByName(id.ServerName, postgreSQLServerResourceName) - future, err := client.Delete(ctx, id.ServerName, id.Name, id.ResourceGroup) + future, err := client.Delete(ctx, id.ServerName, id.KeyName, id.ResourceGroup) if err != nil { - return fmt.Errorf("deleting PostgreSQL Server Key %q (Resource Group %q / Server %q): %+v", id.Name, id.ResourceGroup, id.ServerName, err) + return fmt.Errorf("deleting PostgreSQL Server Key %q (Resource Group %q / Server %q): %+v", id.KeyName, id.ResourceGroup, id.ServerName, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of PostgreSQL Server Key %q (Resource Group %q / Server %q): %+v", id.Name, id.ResourceGroup, id.ServerName, err) + return fmt.Errorf("waiting for deletion of PostgreSQL Server Key %q (Resource Group %q / Server %q): %+v", id.KeyName, id.ResourceGroup, id.ServerName, err) } return nil diff --git a/azurerm/internal/services/postgres/postgresql_server_key_resource_test.go b/azurerm/internal/services/postgres/postgresql_server_key_resource_test.go new file mode 100644 index 000000000000..2bccce9fa2be --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_server_key_resource_test.go @@ -0,0 +1,209 @@ +package postgres_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSQLServerKeyResource struct { +} + +func TestAccPostgreSQLServerKey_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server_key", "test") + r := PostgreSQLServerKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPostgreSQLServerKey_updateKey(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server_key", "test") + r := PostgreSQLServerKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.updated(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPostgreSQLServerKey_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server_key", "test") + r := PostgreSQLServerKeyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t PostgreSQLServerKeyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerKeyID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.ServerKeysClient.Get(ctx, id.ResourceGroup, id.ServerName, id.KeyName) + if err != nil { + return nil, fmt.Errorf("reading Postgresql Server Key (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PostgreSQLServerKeyResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = false + } + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_key_vault" "test" { + name = "acctestkv%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "standard" + soft_delete_enabled = true + purge_protection_enabled = true +} + +resource "azurerm_key_vault_access_policy" "server" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = azurerm_postgresql_server.test.identity.0.principal_id + + key_permissions = ["get", "unwrapkey", "wrapkey"] + secret_permissions = ["get"] +} + +resource "azurerm_key_vault_access_policy" "client" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = ["get", "create", "delete", "list", "restore", "recover", "unwrapkey", "wrapkey", "purge", "encrypt", "decrypt", "sign", "verify"] + secret_permissions = ["get"] +} + +resource "azurerm_key_vault_key" "first" { + name = "first" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + + depends_on = [ + azurerm_key_vault_access_policy.client, + azurerm_key_vault_access_policy.server, + ] +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-postgre-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + sku_name = "GP_Gen5_2" + version = "11" + storage_mb = 51200 + + ssl_enforcement_enabled = true + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (r PostgreSQLServerKeyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_server_key" "test" { + server_id = azurerm_postgresql_server.test.id + key_vault_key_id = azurerm_key_vault_key.first.id +} +`, r.template(data)) +} + +func (r PostgreSQLServerKeyResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_server_key" "import" { + server_id = azurerm_postgresql_server_key.test.server_id + key_vault_key_id = azurerm_postgresql_server_key.test.key_vault_key_id +} +`, r.basic(data)) +} + +func (r PostgreSQLServerKeyResource) updated(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_key_vault_key" "second" { + name = "second" + key_vault_id = azurerm_key_vault.test.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + + depends_on = [ + azurerm_key_vault_access_policy.client, + azurerm_key_vault_access_policy.server, + ] +} + +resource "azurerm_postgresql_server_key" "test" { + server_id = azurerm_postgresql_server.test.id + key_vault_key_id = azurerm_key_vault_key.second.id +} +`, r.template(data)) +} diff --git a/azurerm/internal/services/postgres/postgresql_server_resource.go b/azurerm/internal/services/postgres/postgresql_server_resource.go index f67bbebf5c00..a92c45fe6e29 100644 --- a/azurerm/internal/services/postgres/postgresql_server_resource.go +++ b/azurerm/internal/services/postgres/postgresql_server_resource.go @@ -28,16 +28,16 @@ const ( postgreSQLServerResourceName = "azurerm_postgresql_server" ) -func resourceArmPostgreSQLServer() *schema.Resource { +func resourcePostgreSQLServer() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLServerCreate, - Read: resourceArmPostgreSQLServerRead, - Update: resourceArmPostgreSQLServerUpdate, - Delete: resourceArmPostgreSQLServerDelete, + Create: resourcePostgreSQLServerCreate, + Read: resourcePostgreSQLServerRead, + Update: resourcePostgreSQLServerUpdate, + Delete: resourcePostgreSQLServerDelete, Importer: &schema.ResourceImporter{ State: func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - if _, err := parse.PostgreSQLServerID(d.Id()); err != nil { + if _, err := parse.ServerID(d.Id()); err != nil { return []*schema.ResourceData{d}, err } @@ -62,7 +62,7 @@ func resourceArmPostgreSQLServer() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgreSQLServerName, + ValidateFunc: validate.ServerName, }, "location": azure.SchemaLocation(), @@ -154,6 +154,7 @@ func resourceArmPostgreSQLServer() *schema.Resource { Type: schema.TypeString, Optional: true, Computed: true, + ForceNew: true, ConflictsWith: []string{"geo_redundant_backup_enabled"}, Deprecated: "this has been moved to the top level and will be removed in version 3.0 of the provider.", ValidateFunc: validation.StringInSlice([]string{ @@ -198,6 +199,7 @@ func resourceArmPostgreSQLServer() *schema.Resource { "geo_redundant_backup_enabled": { Type: schema.TypeBool, Optional: true, + ForceNew: true, Computed: true, // TODO: remove in 2.0 and default to false ConflictsWith: []string{"storage_profile", "storage_profile.0.geo_redundant_backup"}, }, @@ -217,7 +219,7 @@ func resourceArmPostgreSQLServer() *schema.Resource { "creation_source_server_id": { Type: schema.TypeString, Optional: true, - ValidateFunc: validate.PostgreSQLServerID, + ValidateFunc: validate.ServerID, }, "identity": { @@ -271,7 +273,7 @@ func resourceArmPostgreSQLServer() *schema.Resource { Computed: true, ConflictsWith: []string{"storage_profile", "storage_profile.0.storage_mb"}, ValidateFunc: validation.All( - validation.IntBetween(5120, 4194304), + validation.IntBetween(5120, 16777216), validation.IntDivisibleBy(1024), ), }, @@ -397,7 +399,7 @@ func resourceArmPostgreSQLServer() *schema.Resource { } } -func resourceArmPostgreSQLServerCreate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServersClient securityClient := meta.(*clients.Client).Postgres.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -561,10 +563,10 @@ func resourceArmPostgreSQLServerCreate(d *schema.ResourceData, meta interface{}) } } - return resourceArmPostgreSQLServerRead(d, meta) + return resourcePostgreSQLServerRead(d, meta) } -func resourceArmPostgreSQLServerUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServersClient securityClient := meta.(*clients.Client).Postgres.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) @@ -574,7 +576,7 @@ func resourceArmPostgreSQLServerUpdate(d *schema.ResourceData, meta interface{}) log.Printf("[INFO] preparing arguments for AzureRM PostgreSQL Server update.") - id, err := parse.PostgreSQLServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing Postgres Server ID : %v", err) } @@ -630,16 +632,16 @@ func resourceArmPostgreSQLServerUpdate(d *schema.ResourceData, meta interface{}) } } - return resourceArmPostgreSQLServerRead(d, meta) + return resourcePostgreSQLServerRead(d, meta) } -func resourceArmPostgreSQLServerRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServersClient securityClient := meta.(*clients.Client).Postgres.ServerSecurityAlertPoliciesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.PostgreSQLServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing Postgres Server ID : %v", err) } @@ -715,12 +717,12 @@ func resourceArmPostgreSQLServerRead(d *schema.ResourceData, meta interface{}) e return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmPostgreSQLServerDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLServerDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.ServersClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.PostgreSQLServerID(d.Id()) + id, err := parse.ServerID(d.Id()) if err != nil { return fmt.Errorf("parsing Postgres Server ID : %v", err) } @@ -898,8 +900,8 @@ func flattenSecurityAlertPolicy(props *postgresql.SecurityAlertPolicyProperties, block["enabled"] = props.State == postgresql.ServerSecurityAlertPolicyStateEnabled - block["disabled_alerts"] = utils.FlattenStringSlice(props.DisabledAlerts) - block["email_addresses"] = utils.FlattenStringSlice(props.EmailAddresses) + block["disabled_alerts"] = flattenSecurityAlertPolicySet(props.DisabledAlerts) + block["email_addresses"] = flattenSecurityAlertPolicySet(props.EmailAddresses) if v := props.EmailAccountAdmins; v != nil { block["email_account_admins"] = *v @@ -950,3 +952,18 @@ func flattenServerIdentity(input *postgresql.ResourceIdentity) []interface{} { }, } } + +func flattenSecurityAlertPolicySet(input *[]string) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + // When empty, `disabledAlerts` and `emailAddresses` are returned as `[""]` by the api. We'll catch that here and return + // an empty interface to set. + attr := *input + if len(attr) == 1 && attr[0] == "" { + return make([]interface{}, 0) + } + + return utils.FlattenStringSlice(input) +} diff --git a/azurerm/internal/services/postgres/postgresql_server_resource_test.go b/azurerm/internal/services/postgres/postgresql_server_resource_test.go new file mode 100644 index 000000000000..91480cac1aa8 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_server_resource_test.go @@ -0,0 +1,769 @@ +package postgres_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSQLServerResource struct { +} + +func TestAccPostgreSQLServer_basicNinePointFive(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "9.5"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_basicNinePointFiveDeprecated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDeprecated(data, "9.5"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.gp(data, "9.5"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_basicNinePointSix(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_basicTenPointZero(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "10.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_gpTenPointZero(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gp(data, "10.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_moTenPointZero(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.mo(data, "10.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_basicEleven(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_basicWithIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithIdentity(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_autogrowOnly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.autogrow(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.gp(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, "10.0"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPostgreSQLServer_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_updatedDeprecated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDeprecated(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.completeDeprecated(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.basicDeprecated(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_updated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gp(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.complete(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.complete2(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.gp(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_completeDeprecatedUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeDeprecated(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.complete(data, "9.6"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_updateSKU(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.sku(data, "10.0", "B_Gen5_2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.sku(data, "10.0", "GP_Gen5_2"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.sku(data, "10.0", "MO_Gen5_16"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_createReplica(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gp(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + Config: r.createReplica(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_createPointInTimeRestore(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + restoreTime := time.Now().Add(11 * time.Minute) + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.gp(data, "11"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + { + PreConfig: func() { time.Sleep(restoreTime.Sub(time.Now().Add(-7 * time.Minute))) }, + Config: r.createPointInTimeRestore(data, "11", restoreTime.Format(time.RFC3339)), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func TestAccPostgreSQLServer_threatDetectionEmptyAttrs(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") + r := PostgreSQLServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.emptyAttrs(data, "9.5"), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("administrator_login_password"), + }) +} + +func (t PostgreSQLServerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ServerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.ServersClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Postgresql Server (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PostgreSQLServerResource) template(data acceptance.TestData, sku, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + sku_name = "%s" + version = "%s" + storage_mb = 51200 + + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku, version) +} + +func (r PostgreSQLServerResource) basic(data acceptance.TestData, version string) string { + return r.template(data, "B_Gen5_1", version) +} + +func (PostgreSQLServerResource) basicWithIdentity(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + sku_name = "B_Gen5_1" + version = "%s" + storage_mb = 51200 + + ssl_enforcement_enabled = true + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (r PostgreSQLServerResource) mo(data acceptance.TestData, version string) string { + return r.template(data, "MO_Gen5_2", version) +} + +func (r PostgreSQLServerResource) gp(data acceptance.TestData, version string) string { + return r.template(data, "GP_Gen5_2", version) +} + +func (PostgreSQLServerResource) autogrow(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + sku_name = "GP_Gen5_2" + version = "%s" + auto_grow_enabled = true + + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (PostgreSQLServerResource) basicDeprecated(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + sku_name = "GP_Gen5_2" + version = "%s" + + storage_profile { + storage_mb = 51200 + } + + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (r PostgreSQLServerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_server" "import" { + name = azurerm_postgresql_server.test.name + location = azurerm_postgresql_server.test.location + resource_group_name = azurerm_postgresql_server.test.resource_group_name + + administrator_login = azurerm_postgresql_server.test.administrator_login + administrator_login_password = azurerm_postgresql_server.test.administrator_login_password + + sku_name = azurerm_postgresql_server.test.sku_name + version = azurerm_postgresql_server.test.version + storage_mb = azurerm_postgresql_server.test.storage_mb + + ssl_enforcement_enabled = azurerm_postgresql_server.test.ssl_enforcement_enabled +} +`, r.basic(data, "10.0")) +} + +func (PostgreSQLServerResource) completeDeprecated(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + version = "%s" + sku_name = "GP_Gen5_2" + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + infrastructure_encryption_enabled = true + public_network_access_enabled = false + ssl_minimal_tls_version_enforced = "TLS1_2" + + ssl_enforcement_enabled = true + + storage_profile { + storage_mb = 640000 + backup_retention_days = 7 + geo_redundant_backup = "Enabled" + auto_grow = "Enabled" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) +} + +func (PostgreSQLServerResource) complete(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!updated" + + sku_name = "GP_Gen5_4" + version = "%[3]s" + storage_mb = 640000 + + backup_retention_days = 7 + geo_redundant_backup_enabled = true + auto_grow_enabled = true + + infrastructure_encryption_enabled = true + public_network_access_enabled = false + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + + threat_detection_policy { + enabled = true + disabled_alerts = ["Sql_Injection", "Data_Exfiltration"] + email_account_admins = true + email_addresses = ["kt@example.com", "admin@example.com"] + + retention_days = 7 + } +} +`, data.RandomInteger, data.Locations.Primary, version) +} + +func (PostgreSQLServerResource) complete2(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "accsa%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!updated" + + sku_name = "GP_Gen5_4" + version = "%[3]s" + storage_mb = 640000 + + backup_retention_days = 14 + geo_redundant_backup_enabled = false + auto_grow_enabled = false + + infrastructure_encryption_enabled = false + public_network_access_enabled = true + ssl_enforcement_enabled = false + ssl_minimal_tls_version_enforced = "TLS1_1" + + threat_detection_policy { + enabled = true + disabled_alerts = ["Sql_Injection"] + email_account_admins = true + email_addresses = ["kt@example.com"] + + retention_days = 7 + + storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint + storage_account_access_key = azurerm_storage_account.test.primary_access_key + } +} +`, data.RandomInteger, data.Locations.Primary, version) +} + +func (PostgreSQLServerResource) sku(data acceptance.TestData, version, sku string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + + sku_name = "%s" + storage_mb = 51200 + version = "%s" + + ssl_enforcement_enabled = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku, version) +} + +func (r PostgreSQLServerResource) createReplica(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_postgresql_server" "replica" { + name = "acctest-psql-server-%[2]d-replica" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + version = "%[3]s" + + create_mode = "Replica" + creation_source_server_id = azurerm_postgresql_server.test.id + + ssl_enforcement_enabled = true +} +`, r.basic(data, version), data.RandomInteger, version) +} + +func (r PostgreSQLServerResource) createPointInTimeRestore(data acceptance.TestData, version, restoreTime string) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_postgresql_server" "restore" { + name = "acctest-psql-server-%[2]d-restore" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + version = "%[4]s" + storage_mb = 51200 + + create_mode = "PointInTimeRestore" + creation_source_server_id = azurerm_postgresql_server.test.id + restore_point_in_time = "%[3]s" + + ssl_enforcement_enabled = true +} +`, r.basic(data, version), data.RandomInteger, restoreTime, version) +} + +func (PostgreSQLServerResource) emptyAttrs(data acceptance.TestData, version string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%[1]d" + location = "%[2]s" +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!updated" + + sku_name = "GP_Gen5_4" + version = "%[3]s" + storage_mb = 640000 + + ssl_enforcement_enabled = false + ssl_minimal_tls_version_enforced = "TLSEnforcementDisabled" + + threat_detection_policy { + enabled = true + email_account_admins = true + + retention_days = 7 + } +} +`, data.RandomInteger, data.Locations.Primary, version) +} diff --git a/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource.go b/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource.go index 95c05c93b0a2..eca9f9005269 100644 --- a/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource.go +++ b/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource.go @@ -14,20 +14,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" azValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPostgreSQLVirtualNetworkRule() *schema.Resource { +func resourcePostgreSQLVirtualNetworkRule() *schema.Resource { return &schema.Resource{ - Create: resourceArmPostgreSQLVirtualNetworkRuleCreateUpdate, - Read: resourceArmPostgreSQLVirtualNetworkRuleRead, - Update: resourceArmPostgreSQLVirtualNetworkRuleCreateUpdate, - Delete: resourceArmPostgreSQLVirtualNetworkRuleDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePostgreSQLVirtualNetworkRuleCreateUpdate, + Read: resourcePostgreSQLVirtualNetworkRuleRead, + Update: resourcePostgreSQLVirtualNetworkRuleCreateUpdate, + Delete: resourcePostgreSQLVirtualNetworkRuleDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.VirtualNetworkRuleID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -50,7 +53,7 @@ func resourceArmPostgreSQLVirtualNetworkRule() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.PostgreSQLServerName, + ValidateFunc: validate.ServerName, }, "subnet_id": { @@ -67,7 +70,7 @@ func resourceArmPostgreSQLVirtualNetworkRule() *schema.Resource { } } -func resourceArmPostgreSQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.VirtualNetworkRulesClient ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -100,11 +103,11 @@ func resourceArmPostgreSQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, future, err := client.CreateOrUpdate(ctx, resourceGroup, serverName, name, parameters) if err != nil { - return fmt.Errorf("Error submitting PostgreSQL Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) + return fmt.Errorf("creating Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating PostgreSQL Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) + return fmt.Errorf("waiting for creation of Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) } // Wait for the provisioning state to become ready @@ -134,24 +137,20 @@ func resourceArmPostgreSQLVirtualNetworkRuleCreateUpdate(d *schema.ResourceData, d.SetId(*resp.ID) - return resourceArmPostgreSQLVirtualNetworkRuleRead(d, meta) + return resourcePostgreSQLVirtualNetworkRuleRead(d, meta) } -func resourceArmPostgreSQLVirtualNetworkRuleRead(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLVirtualNetworkRuleRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.VirtualNetworkRulesClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.VirtualNetworkRuleID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["virtualNetworkRules"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] Error reading PostgreSQL Virtual Network Rule %q - removing from state", d.Id()) @@ -159,12 +158,12 @@ func resourceArmPostgreSQLVirtualNetworkRuleRead(d *schema.ResourceData, meta in return nil } - return fmt.Errorf("Error reading PostgreSQL Virtual Network Rule: %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) + return fmt.Errorf("retrieving Virtual Network Rule %q (PostgreSQL Server %q / Resource Group %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) - d.Set("server_name", serverName) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("server_name", id.ServerName) if props := resp.VirtualNetworkRuleProperties; props != nil { d.Set("subnet_id", props.VirtualNetworkSubnetID) @@ -174,27 +173,23 @@ func resourceArmPostgreSQLVirtualNetworkRuleRead(d *schema.ResourceData, meta in return nil } -func resourceArmPostgreSQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePostgreSQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).Postgres.VirtualNetworkRulesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.VirtualNetworkRuleID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - serverName := id.Path["servers"] - name := id.Path["virtualNetworkRules"] - - future, err := client.Delete(ctx, resourceGroup, serverName, name) + future, err := client.Delete(ctx, id.ResourceGroup, id.ServerName, id.Name) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("Error deleting PostgreSQL Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) + return fmt.Errorf("deleting PostgreSQL Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { @@ -202,7 +197,7 @@ func resourceArmPostgreSQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta return nil } - return fmt.Errorf("Error waiting for deletion of PostgreSQL Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q): %+v", name, serverName, resourceGroup, err) + return fmt.Errorf("waiting for deletion of Virtual Network Rule %q (PostgreSQL Server %q / Resource Group %q): %+v", id.Name, id.ServerName, id.ResourceGroup, err) } return nil @@ -211,7 +206,6 @@ func resourceArmPostgreSQLVirtualNetworkRuleDelete(d *schema.ResourceData, meta func postgreSQLVirtualNetworkStateStatusCodeRefreshFunc(ctx context.Context, client *postgresql.VirtualNetworkRulesClient, resourceGroup string, serverName string, name string) resource.StateRefreshFunc { return func() (interface{}, string, error) { resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[DEBUG] Retrieving PostgreSQL Virtual Network Rule %q (PostgreSQL Server: %q, Resource Group: %q) returned 404.", resourceGroup, serverName, name) diff --git a/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource_test.go b/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource_test.go new file mode 100644 index 000000000000..e0c4a171eaa0 --- /dev/null +++ b/azurerm/internal/services/postgres/postgresql_virtual_network_rule_resource_test.go @@ -0,0 +1,505 @@ +package postgres_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PostgreSQLVirtualNetworkRuleResource struct { +} + +func TestAccPostgreSQLVirtualNetworkRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") + r := PostgreSQLVirtualNetworkRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPostgreSQLVirtualNetworkRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") + r := PostgreSQLVirtualNetworkRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPostgreSQLVirtualNetworkRule_switchSubnets(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") + r := PostgreSQLVirtualNetworkRuleResource{} + + // Create regex strings that will ensure that one subnet name exists, but not the other + preConfigRegex := regexp.MustCompile(fmt.Sprintf("(acctest-SN1-%d)$|(acctest-SN[^2]-%d)$", data.RandomInteger, data.RandomInteger)) // subnet 1 but not 2 + postConfigRegex := regexp.MustCompile(fmt.Sprintf("(acctest-SN2-%d)$|(acctest-SN-[^1]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 2 but not 1 + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subnetSwitchPre(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", preConfigRegex), + ), + }, + { + Config: r.subnetSwitchPost(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", postConfigRegex), + ), + }, + }) +} + +func TestAccPostgreSQLVirtualNetworkRule_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") + r := PostgreSQLVirtualNetworkRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckPostgreSQLVirtualNetworkRuleDisappears(data.ResourceName), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccPostgreSQLVirtualNetworkRule_multipleSubnets(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "rule1") + r := PostgreSQLVirtualNetworkRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multipleSubnets(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccPostgreSQLVirtualNetworkRule_IgnoreEndpointValid(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") + r := PostgreSQLVirtualNetworkRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.ignoreEndpointValid(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("ignore_missing_vnet_service_endpoint").HasValue("true"), + ), + }, + }) +} + +func (t PostgreSQLVirtualNetworkRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.VirtualNetworkRuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Postgres.VirtualNetworkRulesClient.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Postgresql Virtual Network Rule (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func testCheckPostgreSQLVirtualNetworkRuleDisappears(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.VirtualNetworkRulesClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + resourceGroup := rs.Primary.Attributes["resource_group_name"] + serverName := rs.Primary.Attributes["server_name"] + ruleName := rs.Primary.Attributes["name"] + + future, err := client.Delete(ctx, resourceGroup, serverName, ruleName) + if err != nil { + // If the error is that the resource we want to delete does not exist in the first + // place (404), then just return with no error. + if response.WasNotFound(future.Response()) { + return nil + } + + return fmt.Errorf("Error deleting PostgreSQL Virtual Network Rule: %+v", err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + // Same deal as before. Just in case. + if response.WasNotFound(future.Response()) { + return nil + } + + return fmt.Errorf("Error deleting PostgreSQL Virtual Network Rule: %+v", err) + } + + return nil + } +} + +func (PostgreSQLVirtualNetworkRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VNET-%d" + address_space = ["10.7.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-SN-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.5" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_postgresql_virtual_network_rule" "test" { + name = "acctest-PSQL-VNET-rule-%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r PostgreSQLVirtualNetworkRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_postgresql_virtual_network_rule" "import" { + name = azurerm_postgresql_virtual_network_rule.test.name + resource_group_name = azurerm_postgresql_virtual_network_rule.test.resource_group_name + server_name = azurerm_postgresql_virtual_network_rule.test.server_name + subnet_id = azurerm_postgresql_virtual_network_rule.test.subnet_id +} +`, r.basic(data)) +} + +func (PostgreSQLVirtualNetworkRuleResource) subnetSwitchPre(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VNET-%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test1" { + name = "acctest-SN1-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "test2" { + name = "acctest-SN2-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.128/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.5" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_postgresql_virtual_network_rule" "test" { + name = "acctest-PSQL-VNET-rule-%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.test1.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (PostgreSQLVirtualNetworkRuleResource) subnetSwitchPost(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VNET-%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test1" { + name = "acctest-SN1-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "test2" { + name = "acctest-SN2-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.128/25" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.5" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_postgresql_virtual_network_rule" "test" { + name = "acctest-PSQL-VNET-rule-%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (PostgreSQLVirtualNetworkRuleResource) multipleSubnets(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "vnet1" { + name = "acctestvnet1%d" + address_space = ["10.7.29.0/24"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_virtual_network" "vnet2" { + name = "acctestvnet2%d" + address_space = ["10.1.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "vnet1_subnet1" { + name = "acctestsubnet1%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet1.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "vnet1_subnet2" { + name = "acctestsubnet2%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet1.name + address_prefix = "10.7.29.128/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_subnet" "vnet2_subnet1" { + name = "acctestsubnet3%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.vnet2.name + address_prefix = "10.1.29.0/29" + service_endpoints = ["Microsoft.Sql"] +} + +resource "azurerm_postgresql_server" "test" { + name = "acctest-psql-server-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.5" + ssl_enforcement_enabled = true + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } +} + +resource "azurerm_postgresql_virtual_network_rule" "rule1" { + name = "acctestpostgresqlvnetrule1%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.vnet1_subnet1.id + ignore_missing_vnet_service_endpoint = false +} + +resource "azurerm_postgresql_virtual_network_rule" "rule2" { + name = "acctestpostgresqlvnetrule2%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.vnet1_subnet2.id + ignore_missing_vnet_service_endpoint = false +} + +resource "azurerm_postgresql_virtual_network_rule" "rule3" { + name = "acctestpostgresqlvnetrule3%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.vnet2_subnet1.id + ignore_missing_vnet_service_endpoint = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (PostgreSQLVirtualNetworkRuleResource) ignoreEndpointValid(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-psql-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctest-VNET-%d" + address_space = ["10.7.29.0/29"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest-SN-%d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.7.29.0/29" + service_endpoints = ["Microsoft.Storage"] +} + +resource "azurerm_postgresql_server" "test" { + name = "acctestpostgresqlsvr-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "GP_Gen5_2" + + storage_profile { + storage_mb = 51200 + backup_retention_days = 7 + geo_redundant_backup = "Disabled" + } + + administrator_login = "acctestun" + administrator_login_password = "H@Sh1CoR3!" + version = "9.5" + ssl_enforcement_enabled = true +} + +resource "azurerm_postgresql_virtual_network_rule" "test" { + name = "acctestpostgresqlvnetrule%d" + resource_group_name = azurerm_resource_group.test.name + server_name = azurerm_postgresql_server.test.name + subnet_id = azurerm_subnet.test.id + ignore_missing_vnet_service_endpoint = true +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/postgres/registration.go b/azurerm/internal/services/postgres/registration.go index 05d43c85bd64..7ba5d0bf0b89 100644 --- a/azurerm/internal/services/postgres/registration.go +++ b/azurerm/internal/services/postgres/registration.go @@ -28,12 +28,12 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_postgresql_configuration": resourceArmPostgreSQLConfiguration(), - "azurerm_postgresql_database": resourceArmPostgreSQLDatabase(), - "azurerm_postgresql_firewall_rule": resourceArmPostgreSQLFirewallRule(), - "azurerm_postgresql_server": resourceArmPostgreSQLServer(), - "azurerm_postgresql_server_key": resourceArmPostgreSQLServerKey(), - "azurerm_postgresql_virtual_network_rule": resourceArmPostgreSQLVirtualNetworkRule(), - "azurerm_postgresql_active_directory_administrator": resourceArmPostgreSQLAdministrator(), + "azurerm_postgresql_configuration": resourcePostgreSQLConfiguration(), + "azurerm_postgresql_database": resourcePostgreSQLDatabase(), + "azurerm_postgresql_firewall_rule": resourcePostgreSQLFirewallRule(), + "azurerm_postgresql_server": resourcePostgreSQLServer(), + "azurerm_postgresql_server_key": resourcePostgreSQLServerKey(), + "azurerm_postgresql_virtual_network_rule": resourcePostgreSQLVirtualNetworkRule(), + "azurerm_postgresql_active_directory_administrator": resourcePostgreSQLAdministrator(), } } diff --git a/azurerm/internal/services/postgres/resourceids.go b/azurerm/internal/services/postgres/resourceids.go new file mode 100644 index 000000000000..7e927f010e92 --- /dev/null +++ b/azurerm/internal/services/postgres/resourceids.go @@ -0,0 +1,9 @@ +package postgres + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=AzureActiveDirectoryAdministrator -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/administrators/activeDirectory +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Configuration -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/configurations/configuration1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Database -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/databases/database1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=FirewallRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/firewallRules/firewallRule1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Server -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ServerKey -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/keys/key1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualNetworkRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/virtualNetworkRules/virtualNetworkRule1 diff --git a/azurerm/internal/services/postgres/tests/postgresql_administrator_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_administrator_resource_test.go deleted file mode 100644 index e2877f874972..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_administrator_resource_test.go +++ /dev/null @@ -1,247 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzurePostgreSqlAdministrator_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_active_directory_administrator", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzurePostgreSqlAdministratorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzurePostgreSqlAdministrator_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzurePostgreSqlAdministratorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "login", "sqladmin"), - ), - }, - data.ImportStep(), - { - Config: testAccAzurePostgreSqlAdministrator_withUpdates(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzurePostgreSqlAdministratorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "login", "sqladmin2"), - ), - }, - }, - }) -} -func TestAccAzurePostgreSqlAdministrator_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_active_directory_administrator", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzurePostgreSqlAdministratorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzurePostgreSqlAdministrator_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzurePostgreSqlAdministratorExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "login", "sqladmin"), - ), - }, - { - Config: testAccAzurePostgreSqlAdministrator_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_postgresql_active_directory_administrator"), - }, - }, - }) -} - -func TestAccAzurePostgreSqlAdministrator_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_active_directory_administrator", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzurePostgreSqlAdministratorDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzurePostgreSqlAdministrator_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzurePostgreSqlAdministratorExists(data.ResourceName), - testCheckAzurePostgreSqlAdministratorDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func testCheckAzurePostgreSqlAdministratorExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServerAdministratorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - _, err := client.Get(ctx, resourceGroup, serverName) - return err - } -} - -func testCheckAzurePostgreSqlAdministratorDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServerAdministratorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - if _, err := client.Delete(ctx, resourceGroup, serverName); err != nil { - return fmt.Errorf("Bad: Delete on postgresAdministratorClient: %+v", err) - } - - return nil - } -} - -func testCheckAzurePostgreSqlAdministratorDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServerAdministratorsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_active_directory_administrator" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("PostgreSQL AD Administrator (server %q / resource group %q) still exists: %+v", serverName, resourceGroup, resp) - } - - return nil -} - -func testAccAzurePostgreSqlAdministrator_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_active_directory_administrator" "test" { - server_name = azurerm_postgresql_server.test.name - resource_group_name = azurerm_resource_group.test.name - login = "sqladmin" - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.client_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzurePostgreSqlAdministrator_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_active_directory_administrator" "import" { - server_name = azurerm_postgresql_active_directory_administrator.test.server_name - resource_group_name = azurerm_postgresql_active_directory_administrator.test.resource_group_name - login = azurerm_postgresql_active_directory_administrator.test.login - tenant_id = azurerm_postgresql_active_directory_administrator.test.tenant_id - object_id = azurerm_postgresql_active_directory_administrator.test.object_id -} -`, testAccAzurePostgreSqlAdministrator_basic(data)) -} - -func testAccAzurePostgreSqlAdministrator_withUpdates(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -data "azurerm_client_config" "current" { -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_active_directory_administrator" "test" { - server_name = azurerm_postgresql_server.test.name - resource_group_name = azurerm_resource_group.test.name - login = "sqladmin2" - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.client_id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_configuration_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_configuration_resource_test.go deleted file mode 100644 index 7fb7c5530ec3..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_configuration_resource_test.go +++ /dev/null @@ -1,233 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPostgreSQLConfiguration_backslashQuote(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_configuration", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLConfiguration_backslashQuote(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLConfigurationValue(data.ResourceName, "on"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPostgreSQLConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMPostgreSQLConfigurationValueReset(data.RandomInteger, "backslash_quote"), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLConfiguration_clientMinMessages(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_configuration", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLConfiguration_clientMinMessages(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLConfigurationValue(data.ResourceName, "DEBUG5"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPostgreSQLConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMPostgreSQLConfigurationValueReset(data.RandomInteger, "client_min_messages"), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLConfiguration_deadlockTimeout(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_configuration", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLConfigurationDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLConfiguration_deadlockTimeout(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLConfigurationValue(data.ResourceName, "5000"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPostgreSQLConfiguration_empty(data), - Check: resource.ComposeTestCheckFunc( - // "delete" resets back to the default value - testCheckAzureRMPostgreSQLConfigurationValueReset(data.RandomInteger, "deadlock_timeout"), - ), - }, - }, - }) -} - -func testCheckAzureRMPostgreSQLConfigurationValue(resourceName string, value string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for PostgreSQL Configuration: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Configuration %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on postgresqlConfigurationsClient: %+v", err) - } - - if *resp.Value != value { - return fmt.Errorf("PostgreSQL Configuration wasn't set. Expected '%s' - got '%s': \n%+v", value, *resp.Value, resp) - } - - return nil - } -} - -func testCheckAzureRMPostgreSQLConfigurationValueReset(rInt int, configurationName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - resourceGroup := fmt.Sprintf("acctestRG-psql-%d", rInt) - serverName := fmt.Sprintf("acctest-psql-server-%d", rInt) - - resp, err := client.Get(ctx, resourceGroup, serverName, configurationName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Configuration %q (server %q resource group: %q) does not exist", configurationName, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on postgresqlConfigurationsClient: %+v", err) - } - - actualValue := *resp.Value - defaultValue := *resp.DefaultValue - - if defaultValue != actualValue { - return fmt.Errorf("PostgreSQL Configuration wasn't set to the default value. Expected '%s' - got '%s': \n%+v", defaultValue, actualValue, resp) - } - - return nil - } -} - -func testCheckAzureRMPostgreSQLConfigurationDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ConfigurationsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_configuration" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - } - - return nil -} - -func testAccAzureRMPostgreSQLConfiguration_backslashQuote(data acceptance.TestData) string { - return testAccAzureRMPostgreSQLConfiguration_template(data, "backslash_quote", "on") -} - -func testAccAzureRMPostgreSQLConfiguration_clientMinMessages(data acceptance.TestData) string { - return testAccAzureRMPostgreSQLConfiguration_template(data, "client_min_messages", "DEBUG5") -} - -func testAccAzureRMPostgreSQLConfiguration_deadlockTimeout(data acceptance.TestData) string { - return testAccAzureRMPostgreSQLConfiguration_template(data, "deadlock_timeout", "5000") -} - -func testAccAzureRMPostgreSQLConfiguration_template(data acceptance.TestData, name string, value string) string { - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_configuration" "test" { - name = "%s" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - value = "%s" -} -`, testAccAzureRMPostgreSQLConfiguration_empty(data), name, value) -} - -func testAccAzureRMPostgreSQLConfiguration_empty(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_database_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_database_resource_test.go deleted file mode 100644 index efe6a43264de..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_database_resource_test.go +++ /dev/null @@ -1,342 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPostgreSQLDatabase_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "UTF8"), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "English_United States.1252"), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLDatabase_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLDatabase_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "UTF8"), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "English_United States.1252"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPostgreSQLDatabase_requiresImport), - }, - }) -} - -func TestAccAzureRMPostgreSQLDatabase_collationWithHyphen(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLDatabase_collationWithHyphen(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "UTF8"), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "En-US"), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLDatabase_charsetLowercase(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLDatabase_charsetLowercase(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "UTF8"), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "English_United States.1252"), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLDatabase_charsetMixedcase(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_database", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLDatabaseDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLDatabase_charsetMixedcase(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLDatabaseExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "charset", "UTF8"), - resource.TestCheckResourceAttr(data.ResourceName, "collation", "English_United States.1252"), - ), - }, - }, - }) -} - -func testCheckAzureRMPostgreSQLDatabaseExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for PostgreSQL Database: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Database %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - return fmt.Errorf("Bad: Get on postgresqlDatabasesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMPostgreSQLDatabaseDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_database" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("PostgreSQL Database still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMPostgreSQLDatabase_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_database" "test" { - name = "acctest_PSQL_db_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - charset = "UTF8" - collation = "English_United States.1252" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLDatabase_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLDatabase_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_database" "import" { - name = azurerm_postgresql_database.test.name - resource_group_name = azurerm_postgresql_database.test.resource_group_name - server_name = azurerm_postgresql_database.test.server_name - charset = azurerm_postgresql_database.test.charset - collation = azurerm_postgresql_database.test.collation -} -`, template) -} - -func testAccAzureRMPostgreSQLDatabase_collationWithHyphen(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_database" "test" { - name = "acctest_PSQL_db_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - charset = "UTF8" - collation = "En-US" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLDatabase_charsetLowercase(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_database" "test" { - name = "acctest_PSQL_db_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - charset = "utf8" - collation = "English_United States.1252" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLDatabase_charsetMixedcase(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_database" "test" { - name = "acctest_PSQL_db_%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - charset = "Utf8" - collation = "English_United States.1252" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_firewall_rule_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_firewall_rule_resource_test.go deleted file mode 100644 index cba4acef9b56..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_firewall_rule_resource_test.go +++ /dev/null @@ -1,167 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPostgreSQLFirewallRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_firewall_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", "0.0.0.0"), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", "255.255.255.255"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPostgreSQLFirewallRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_firewall_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLFirewallRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "start_ip_address", "0.0.0.0"), - resource.TestCheckResourceAttr(data.ResourceName, "end_ip_address", "255.255.255.255"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPostgreSQLFirewallRule_requiresImport), - }, - }) -} - -func testCheckAzureRMPostgreSQLFirewallRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.FirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for PostgreSQL Firewall Rule: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Firewall Rule %q (server %q resource group: %q) does not exist", name, serverName, resourceGroup) - } - - return fmt.Errorf("Bad: Get on postgresqlFirewallRulesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMPostgreSQLFirewallRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.DatabasesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_firewall_rule" { - continue - } - - name := rs.Primary.Attributes["name"] - serverName := rs.Primary.Attributes["server_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("PostgreSQL Firewall Rule still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMPostgreSQLFirewallRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.6" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_firewall_rule" "test" { - name = "acctest-PSQL-fwrule-%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - start_ip_address = "0.0.0.0" - end_ip_address = "255.255.255.255" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLFirewallRule_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLFirewallRule_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_firewall_rule" "import" { - name = azurerm_postgresql_firewall_rule.test.name - resource_group_name = azurerm_postgresql_firewall_rule.test.resource_group_name - server_name = azurerm_postgresql_firewall_rule.test.server_name - start_ip_address = azurerm_postgresql_firewall_rule.test.start_ip_address - end_ip_address = azurerm_postgresql_firewall_rule.test.end_ip_address -} -`, template) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_server_data_source_test.go b/azurerm/internal/services/postgres/tests/postgresql_server_data_source_test.go deleted file mode 100644 index ec6551586530..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_server_data_source_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMPostgreSqlServer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_postgresql_server", "test") - version := "9.5" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMPostgreSqlServer_basic(data, version), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "location"), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - resource.TestCheckResourceAttrSet(data.ResourceName, "version"), - resource.TestCheckResourceAttrSet(data.ResourceName, "administrator_login"), - resource.TestCheckResourceAttrSet(data.ResourceName, "sku_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMPostgreSqlServer_basic(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%s - -data "azurerm_postgresql_server" "test" { - name = azurerm_postgresql_server.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, testAccAzureRMPostgreSQLServer_basic(data, version)) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_server_key_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_server_key_resource_test.go deleted file mode 100644 index 6f6f3470a9fb..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_server_key_resource_test.go +++ /dev/null @@ -1,260 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPostgreSQLServerKey_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServerKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPostgreSQLServerKey_updateKey(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServerKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPostgreSQLServerKey_updated(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerKeyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPostgreSQLServerKey_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server_key", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerKeyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServerKey_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerKeyExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPostgreSQLServerKey_requiresImport), - }, - }) -} - -func testCheckAzureRMPostgreSQLServerKeyDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServerKeysClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_server_key" { - continue - } - - id, err := parse.PostgreSQLServerKeyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) - if err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("retrieving PostgreSQL Server Key: %+v", err) - } - return nil - } - - return fmt.Errorf("PostgreSQL Server Key still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMPostgreSQLServerKeyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServerKeysClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - id, err := parse.PostgreSQLServerKeyID(rs.Primary.ID) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.ResourceGroup, id.ServerName, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Server Key %q (Resource Group %q / Server %q) does not exist", id.Name, id.ResourceGroup, id.ServerName) - } - return fmt.Errorf("Bad: Get on PostgreSQLServerKeysClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMPostgreSQLServerKey_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features { - key_vault { - purge_soft_delete_on_destroy = false - } - } -} - -data "azurerm_client_config" "current" {} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_key_vault" "test" { - name = "acctestkv%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - tenant_id = data.azurerm_client_config.current.tenant_id - sku_name = "standard" - soft_delete_enabled = true - purge_protection_enabled = true -} - -resource "azurerm_key_vault_access_policy" "server" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = azurerm_postgresql_server.test.identity.0.principal_id - - key_permissions = ["get", "unwrapkey", "wrapkey"] - secret_permissions = ["get"] -} - -resource "azurerm_key_vault_access_policy" "client" { - key_vault_id = azurerm_key_vault.test.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id - - key_permissions = ["get", "create", "delete", "list", "restore", "recover", "unwrapkey", "wrapkey", "purge", "encrypt", "decrypt", "sign", "verify"] - secret_permissions = ["get"] -} - -resource "azurerm_key_vault_key" "first" { - name = "first" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] - - depends_on = [ - azurerm_key_vault_access_policy.client, - azurerm_key_vault_access_policy.server, - ] -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-postgre-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - sku_name = "GP_Gen5_2" - version = "11" - storage_mb = 51200 - - ssl_enforcement_enabled = true - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLServerKey_basic(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLServerKey_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_server_key" "test" { - server_id = azurerm_postgresql_server.test.id - key_vault_key_id = azurerm_key_vault_key.first.id -} -`, template) -} - -func testAccAzureRMPostgreSQLServerKey_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLServerKey_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_server_key" "import" { - server_id = azurerm_postgresql_server_key.test.server_id - key_vault_key_id = azurerm_postgresql_server_key.test.key_vault_key_id -} -`, template) -} - -func testAccAzureRMPostgreSQLServerKey_updated(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLServerKey_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_key_vault_key" "second" { - name = "second" - key_vault_id = azurerm_key_vault.test.id - key_type = "RSA" - key_size = 2048 - key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] - - depends_on = [ - azurerm_key_vault_access_policy.client, - azurerm_key_vault_access_policy.server, - ] -} - -resource "azurerm_postgresql_server_key" "test" { - server_id = azurerm_postgresql_server.test.id - key_vault_key_id = azurerm_key_vault_key.second.id -} -`, template) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_server_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_server_resource_test.go deleted file mode 100644 index 9baa0c789502..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_server_resource_test.go +++ /dev/null @@ -1,828 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPostgreSQLServer_basicNinePointFive(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basic(data, "9.5"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_basicNinePointFiveDeprecated(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basicDeprecated(data, "9.5"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "9.5"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_basicNinePointSix(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basic(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_basicTenPointZero(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basic(data, "10.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_gpTenPointZero(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "10.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_moTenPointZero(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_mo(data, "10.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_basicEleven(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basic(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_basicWithIdentity(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basicWithIdentity(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_autogrowOnly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_autogrow(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basic(data, "10.0"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPostgreSQLServer_requiresImport), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_complete(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_updatedDeprecated(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_basicDeprecated(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_completeDeprecated(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_basicDeprecated(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_updated(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_complete(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_complete2(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_completeDeprecatedUpdate(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_completeDeprecated(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_complete(data, "9.6"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_updateSKU(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_sku(data, "10.0", "B_Gen5_2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_sku(data, "10.0", "GP_Gen5_2"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_sku(data, "10.0", "MO_Gen5_16"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_createReplica(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - Config: testAccAzureRMPostgreSQLServer_createReplica(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - testCheckAzureRMPostgreSQLServerExists("azurerm_postgresql_server.replica"), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func TestAccAzureRMPostgreSQLServer_createPointInTimeRestore(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_server", "test") - restoreTime := time.Now().Add(11 * time.Minute) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLServer_gp(data, "11"), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - ), - }, - data.ImportStep("administrator_login_password"), - { - PreConfig: func() { time.Sleep(restoreTime.Sub(time.Now().Add(-7 * time.Minute))) }, - Config: testAccAzureRMPostgreSQLServer_createPointInTimeRestore(data, "11", restoreTime.Format(time.RFC3339)), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLServerExists(data.ResourceName), - testCheckAzureRMPostgreSQLServerExists("azurerm_postgresql_server.restore"), - ), - }, - data.ImportStep("administrator_login_password"), - }, - }) -} - -func testCheckAzureRMPostgreSQLServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for PostgreSQL Server: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Server %q (resource group: %q) does not exist", name, resourceGroup) - } - - return fmt.Errorf("Bad: Get on postgresqlServersClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMPostgreSQLServerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.ServersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_server" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("PostgreSQL Server still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMPostgreSQLServer_template(data acceptance.TestData, sku, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - sku_name = "%s" - version = "%s" - storage_mb = 51200 - - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku, version) -} - -func testAccAzureRMPostgreSQLServer_basic(data acceptance.TestData, version string) string { - return testAccAzureRMPostgreSQLServer_template(data, "B_Gen5_1", version) -} - -func testAccAzureRMPostgreSQLServer_basicWithIdentity(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - sku_name = "B_Gen5_1" - version = "%s" - storage_mb = 51200 - - ssl_enforcement_enabled = true - - identity { - type = "SystemAssigned" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMPostgreSQLServer_mo(data acceptance.TestData, version string) string { - return testAccAzureRMPostgreSQLServer_template(data, "MO_Gen5_2", version) -} - -func testAccAzureRMPostgreSQLServer_gp(data acceptance.TestData, version string) string { - return testAccAzureRMPostgreSQLServer_template(data, "GP_Gen5_2", version) -} - -func testAccAzureRMPostgreSQLServer_autogrow(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - sku_name = "GP_Gen5_2" - version = "%s" - auto_grow_enabled = true - - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMPostgreSQLServer_basicDeprecated(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - sku_name = "GP_Gen5_2" - version = "%s" - - storage_profile { - storage_mb = 51200 - } - - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMPostgreSQLServer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLServer_basic(data, "10.0") - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_server" "import" { - name = azurerm_postgresql_server.test.name - location = azurerm_postgresql_server.test.location - resource_group_name = azurerm_postgresql_server.test.resource_group_name - - administrator_login = azurerm_postgresql_server.test.administrator_login - administrator_login_password = azurerm_postgresql_server.test.administrator_login_password - - sku_name = azurerm_postgresql_server.test.sku_name - version = azurerm_postgresql_server.test.version - storage_mb = azurerm_postgresql_server.test.storage_mb - - ssl_enforcement_enabled = azurerm_postgresql_server.test.ssl_enforcement_enabled -} -`, template) -} - -func testAccAzureRMPostgreSQLServer_completeDeprecated(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - version = "%s" - sku_name = "GP_Gen5_2" - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - infrastructure_encryption_enabled = true - public_network_access_enabled = false - ssl_minimal_tls_version_enforced = "TLS1_2" - - ssl_enforcement_enabled = true - - storage_profile { - storage_mb = 640000 - backup_retention_days = 7 - geo_redundant_backup = "Enabled" - auto_grow = "Enabled" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, version) -} - -func testAccAzureRMPostgreSQLServer_complete(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!updated" - - sku_name = "GP_Gen5_4" - version = "%[3]s" - storage_mb = 640000 - - backup_retention_days = 7 - geo_redundant_backup_enabled = true - auto_grow_enabled = true - - infrastructure_encryption_enabled = true - public_network_access_enabled = false - ssl_enforcement_enabled = true - ssl_minimal_tls_version_enforced = "TLS1_2" - - threat_detection_policy { - enabled = true - disabled_alerts = ["Sql_Injection", "Data_Exfiltration"] - email_account_admins = true - email_addresses = ["kt@example.com", "admin@example.com"] - - retention_days = 7 - } -} -`, data.RandomInteger, data.Locations.Primary, version) -} - -func testAccAzureRMPostgreSQLServer_complete2(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "accsa%[1]d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!updated" - - sku_name = "GP_Gen5_4" - version = "%[3]s" - storage_mb = 640000 - - backup_retention_days = 14 - geo_redundant_backup_enabled = false - auto_grow_enabled = false - - infrastructure_encryption_enabled = false - public_network_access_enabled = true - ssl_enforcement_enabled = false - ssl_minimal_tls_version_enforced = "TLS1_1" - - threat_detection_policy { - enabled = true - disabled_alerts = ["Sql_Injection"] - email_account_admins = true - email_addresses = ["kt@example.com"] - - retention_days = 7 - - storage_endpoint = azurerm_storage_account.test.primary_blob_endpoint - storage_account_access_key = azurerm_storage_account.test.primary_access_key - } -} -`, data.RandomInteger, data.Locations.Primary, version) -} - -func testAccAzureRMPostgreSQLServer_sku(data acceptance.TestData, version, sku string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - - sku_name = "%s" - storage_mb = 51200 - version = "%s" - - ssl_enforcement_enabled = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, sku, version) -} - -func testAccAzureRMPostgreSQLServer_createReplica(data acceptance.TestData, version string) string { - return fmt.Sprintf(` -%[1]s - -resource "azurerm_postgresql_server" "replica" { - name = "acctest-psql-server-%[2]d-replica" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - version = "%[3]s" - - create_mode = "Replica" - creation_source_server_id = azurerm_postgresql_server.test.id - - ssl_enforcement_enabled = true -} -`, testAccAzureRMPostgreSQLServer_basic(data, version), data.RandomInteger, version) -} - -func testAccAzureRMPostgreSQLServer_createPointInTimeRestore(data acceptance.TestData, version, restoreTime string) string { - return fmt.Sprintf(` -%[1]s - -resource "azurerm_postgresql_server" "restore" { - name = "acctest-psql-server-%[2]d-restore" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - version = "%[4]s" - storage_mb = 51200 - - create_mode = "PointInTimeRestore" - creation_source_server_id = azurerm_postgresql_server.test.id - restore_point_in_time = "%[3]s" - - ssl_enforcement_enabled = true -} -`, testAccAzureRMPostgreSQLServer_basic(data, version), data.RandomInteger, restoreTime, version) -} diff --git a/azurerm/internal/services/postgres/tests/postgresql_virtual_network_rule_resource_test.go b/azurerm/internal/services/postgres/tests/postgresql_virtual_network_rule_resource_test.go deleted file mode 100644 index f4d865acb9e8..000000000000 --- a/azurerm/internal/services/postgres/tests/postgresql_virtual_network_rule_resource_test.go +++ /dev/null @@ -1,569 +0,0 @@ -package tests - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPostgreSQLVirtualNetworkRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPostgreSQLVirtualNetworkRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPostgreSQLVirtualNetworkRule_requiresImport), - }, - }) -} - -func TestAccAzureRMPostgreSQLVirtualNetworkRule_switchSubnets(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") - - // Create regex strings that will ensure that one subnet name exists, but not the other - preConfigRegex := regexp.MustCompile(fmt.Sprintf("(acctest-SN1-%d)$|(acctest-SN[^2]-%d)$", data.RandomInteger, data.RandomInteger)) // subnet 1 but not 2 - postConfigRegex := regexp.MustCompile(fmt.Sprintf("(acctest-SN2-%d)$|(acctest-SN-[^1]%d)$", data.RandomInteger, data.RandomInteger)) // subnet 2 but not 1 - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_subnetSwitchPre(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", preConfigRegex), - ), - }, - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_subnetSwitchPost(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - resource.TestMatchResourceAttr(data.ResourceName, "subnet_id", postConfigRegex), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLVirtualNetworkRule_disappears(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - testCheckAzureRMPostgreSQLVirtualNetworkRuleDisappears(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLVirtualNetworkRule_multipleSubnets(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "rule1") - resourceName2 := "azurerm_postgresql_virtual_network_rule.rule2" - resourceName3 := "azurerm_postgresql_virtual_network_rule.rule3" - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_multipleSubnets(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(resourceName2), - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(resourceName3), - ), - }, - }, - }) -} - -func TestAccAzureRMPostgreSQLVirtualNetworkRule_IgnoreEndpointValid(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_postgresql_virtual_network_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPostgreSQLVirtualNetworkRule_ignoreEndpointValid(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "ignore_missing_vnet_service_endpoint", "true"), - ), - }, - }, - }) -} - -func testCheckAzureRMPostgreSQLVirtualNetworkRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, ruleName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PostgreSQL Virtual Network Rule %q (server %q / resource group %q) was not found", ruleName, serverName, resourceGroup) - } - - return err - } - - return nil - } -} - -func testCheckAzureRMPostgreSQLVirtualNetworkRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_postgresql_virtual_network_rule" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, serverName, ruleName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Bad: PostgreSQL Virtual Network Rule %q (server %q / resource group %q) still exists: %+v", ruleName, serverName, resourceGroup, resp) - } - - return nil -} - -func testCheckAzureRMPostgreSQLVirtualNetworkRuleDisappears(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Postgres.VirtualNetworkRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - serverName := rs.Primary.Attributes["server_name"] - ruleName := rs.Primary.Attributes["name"] - - future, err := client.Delete(ctx, resourceGroup, serverName, ruleName) - if err != nil { - // If the error is that the resource we want to delete does not exist in the first - // place (404), then just return with no error. - if response.WasNotFound(future.Response()) { - return nil - } - - return fmt.Errorf("Error deleting PostgreSQL Virtual Network Rule: %+v", err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - // Same deal as before. Just in case. - if response.WasNotFound(future.Response()) { - return nil - } - - return fmt.Errorf("Error deleting PostgreSQL Virtual Network Rule: %+v", err) - } - - return nil - } -} - -func testAccAzureRMPostgreSQLVirtualNetworkRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VNET-%d" - address_space = ["10.7.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctest-SN-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.5" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_postgresql_virtual_network_rule" "test" { - name = "acctest-PSQL-VNET-rule-%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLVirtualNetworkRule_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPostgreSQLVirtualNetworkRule_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_postgresql_virtual_network_rule" "import" { - name = azurerm_postgresql_virtual_network_rule.test.name - resource_group_name = azurerm_postgresql_virtual_network_rule.test.resource_group_name - server_name = azurerm_postgresql_virtual_network_rule.test.server_name - subnet_id = azurerm_postgresql_virtual_network_rule.test.subnet_id -} -`, template) -} - -func testAccAzureRMPostgreSQLVirtualNetworkRule_subnetSwitchPre(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VNET-%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "acctest-SN1-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "test2" { - name = "acctest-SN2-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.128/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.5" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_postgresql_virtual_network_rule" "test" { - name = "acctest-PSQL-VNET-rule-%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.test1.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLVirtualNetworkRule_subnetSwitchPost(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VNET-%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test1" { - name = "acctest-SN1-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "test2" { - name = "acctest-SN2-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.128/25" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.5" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_postgresql_virtual_network_rule" "test" { - name = "acctest-PSQL-VNET-rule-%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLVirtualNetworkRule_multipleSubnets(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "vnet1" { - name = "acctestvnet1%d" - address_space = ["10.7.29.0/24"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_virtual_network" "vnet2" { - name = "acctestvnet2%d" - address_space = ["10.1.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "vnet1_subnet1" { - name = "acctestsubnet1%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet1.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "vnet1_subnet2" { - name = "acctestsubnet2%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet1.name - address_prefix = "10.7.29.128/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_subnet" "vnet2_subnet1" { - name = "acctestsubnet3%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.vnet2.name - address_prefix = "10.1.29.0/29" - service_endpoints = ["Microsoft.Sql"] -} - -resource "azurerm_postgresql_server" "test" { - name = "acctest-psql-server-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.5" - ssl_enforcement_enabled = true - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } -} - -resource "azurerm_postgresql_virtual_network_rule" "rule1" { - name = "acctestpostgresqlvnetrule1%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.vnet1_subnet1.id - ignore_missing_vnet_service_endpoint = false -} - -resource "azurerm_postgresql_virtual_network_rule" "rule2" { - name = "acctestpostgresqlvnetrule2%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.vnet1_subnet2.id - ignore_missing_vnet_service_endpoint = false -} - -resource "azurerm_postgresql_virtual_network_rule" "rule3" { - name = "acctestpostgresqlvnetrule3%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.vnet2_subnet1.id - ignore_missing_vnet_service_endpoint = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPostgreSQLVirtualNetworkRule_ignoreEndpointValid(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-psql-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctest-VNET-%d" - address_space = ["10.7.29.0/29"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctest-SN-%d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.7.29.0/29" - service_endpoints = ["Microsoft.Storage"] -} - -resource "azurerm_postgresql_server" "test" { - name = "acctestpostgresqlsvr-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "GP_Gen5_2" - - storage_profile { - storage_mb = 51200 - backup_retention_days = 7 - geo_redundant_backup = "Disabled" - } - - administrator_login = "acctestun" - administrator_login_password = "H@Sh1CoR3!" - version = "9.5" - ssl_enforcement_enabled = true -} - -resource "azurerm_postgresql_virtual_network_rule" "test" { - name = "acctestpostgresqlvnetrule%d" - resource_group_name = azurerm_resource_group.test.name - server_name = azurerm_postgresql_server.test.name - subnet_id = azurerm_subnet.test.id - ignore_missing_vnet_service_endpoint = true -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/postgres/validate/azure_active_directory_administrator_id.go b/azurerm/internal/services/postgres/validate/azure_active_directory_administrator_id.go new file mode 100644 index 000000000000..b88051be98d6 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/azure_active_directory_administrator_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func AzureActiveDirectoryAdministratorID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AzureActiveDirectoryAdministratorID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/azure_active_directory_administrator_id_test.go b/azurerm/internal/services/postgres/validate/azure_active_directory_administrator_id_test.go new file mode 100644 index 000000000000..9cfb6667f702 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/azure_active_directory_administrator_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAzureActiveDirectoryAdministratorID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // missing AdministratorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Valid: false, + }, + + { + // missing value for AdministratorName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/administrators/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/administrators/activeDirectory", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/ADMINISTRATORS/ACTIVEDIRECTORY", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AzureActiveDirectoryAdministratorID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/configuration_id.go b/azurerm/internal/services/postgres/validate/configuration_id.go new file mode 100644 index 000000000000..b8d4947859cc --- /dev/null +++ b/azurerm/internal/services/postgres/validate/configuration_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func ConfigurationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ConfigurationID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/configuration_id_test.go b/azurerm/internal/services/postgres/validate/configuration_id_test.go new file mode 100644 index 000000000000..098402a2fa38 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/configuration_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestConfigurationID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/configurations/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/configurations/configuration1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/CONFIGURATIONS/CONFIGURATION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ConfigurationID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/database_collation.go b/azurerm/internal/services/postgres/validate/database_collation.go new file mode 100644 index 000000000000..d79aa3abb502 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/database_collation.go @@ -0,0 +1,23 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func DatabaseCollation(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) + return + } + + matched, _ := regexp.MatchString(`^[-A-Za-z0-9_. ]+$`, v) + + if !matched { + errors = append(errors, fmt.Errorf("%s contains invalid characters, only alphanumeric, underscore, space or hyphen characters are supported, got %s", k, v)) + return + } + + return warnings, errors +} diff --git a/azurerm/internal/services/postgres/validate/database_id.go b/azurerm/internal/services/postgres/validate/database_id.go new file mode 100644 index 000000000000..e7a173af86b2 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/database_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func DatabaseID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.DatabaseID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/database_id_test.go b/azurerm/internal/services/postgres/validate/database_id_test.go new file mode 100644 index 000000000000..8744c371280c --- /dev/null +++ b/azurerm/internal/services/postgres/validate/database_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestDatabaseID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/databases/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/databases/database1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/DATABASES/DATABASE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := DatabaseID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/firewall_rule_id.go b/azurerm/internal/services/postgres/validate/firewall_rule_id.go new file mode 100644 index 000000000000..d09183f3408b --- /dev/null +++ b/azurerm/internal/services/postgres/validate/firewall_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func FirewallRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.FirewallRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/firewall_rule_id_test.go b/azurerm/internal/services/postgres/validate/firewall_rule_id_test.go new file mode 100644 index 000000000000..77777ead15bc --- /dev/null +++ b/azurerm/internal/services/postgres/validate/firewall_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestFirewallRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/firewallRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/firewallRules/firewallRule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/FIREWALLRULES/FIREWALLRULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := FirewallRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/postgresql_server.go b/azurerm/internal/services/postgres/validate/postgresql_server.go deleted file mode 100644 index 50180a6ce530..000000000000 --- a/azurerm/internal/services/postgres/validate/postgresql_server.go +++ /dev/null @@ -1,48 +0,0 @@ -package validate - -import ( - "fmt" - "regexp" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" -) - -func PostgreSQLServerName(i interface{}, k string) (_ []string, errors []error) { - if m, regexErrs := validate.RegExHelper(i, k, `^[0-9a-z][-0-9a-z]{1,61}[0-9a-z]$`); !m { - return nil, append(regexErrs, fmt.Errorf("%q can contain only lowercase letters, numbers, and '-', but can't start or end with '-'. And must be at least 3 characters and at most 63 characters", k)) - } - - return nil, nil -} - -func PostgresDatabaseCollation(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %s to be string", k)) - return - } - - matched, _ := regexp.MatchString(`^[-A-Za-z0-9_. ]+$`, v) - - if !matched { - errors = append(errors, fmt.Errorf("%s contains invalid characters, only alphanumeric, underscore, space or hyphen characters are supported, got %s", k, v)) - return - } - - return warnings, errors -} - -func PostgreSQLServerID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return warnings, errors - } - - if _, err := parse.PostgreSQLServerID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a Postgres Server resource id: %v", k, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/postgres/validate/postgresql_server_test.go b/azurerm/internal/services/postgres/validate/postgresql_server_test.go deleted file mode 100644 index 01140d04deb3..000000000000 --- a/azurerm/internal/services/postgres/validate/postgresql_server_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package validate - -import ( - "testing" -) - -func TestValidatePostgresServerServerName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // basic example - input: "ab-c", - expected: true, - }, - { - // can't contain upper case letter - input: "AbcD", - expected: false, - }, - { - // can't start with a hyphen - input: "-abc", - expected: false, - }, - { - // can't contain underscore - input: "ab_c", - expected: false, - }, - { - // can't end with hyphen - input: "abc-", - expected: false, - }, - { - // can not short than 3 characters - input: "ab", - expected: false, - }, - { - // 63 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcde", - expected: true, - }, - { - // 64 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcdef", - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := PostgreSQLServerName(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} diff --git a/azurerm/internal/services/postgres/validate/server_id.go b/azurerm/internal/services/postgres/validate/server_id.go new file mode 100644 index 000000000000..e56c83ddd6bc --- /dev/null +++ b/azurerm/internal/services/postgres/validate/server_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func ServerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/server_id_test.go b/azurerm/internal/services/postgres/validate/server_id_test.go new file mode 100644 index 000000000000..1fcf76082f48 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/server_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/server_key_id.go b/azurerm/internal/services/postgres/validate/server_key_id.go new file mode 100644 index 000000000000..d5de7b47260d --- /dev/null +++ b/azurerm/internal/services/postgres/validate/server_key_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func ServerKeyID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ServerKeyID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/server_key_id_test.go b/azurerm/internal/services/postgres/validate/server_key_id_test.go new file mode 100644 index 000000000000..c63b0d673ad5 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/server_key_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestServerKeyID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // missing KeyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Valid: false, + }, + + { + // missing value for KeyName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/keys/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/keys/key1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/KEYS/KEY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ServerKeyID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/server_name.go b/azurerm/internal/services/postgres/validate/server_name.go new file mode 100644 index 000000000000..5cbc667e8162 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/server_name.go @@ -0,0 +1,15 @@ +package validate + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" +) + +func ServerName(i interface{}, k string) (_ []string, errors []error) { + if m, regexErrs := validate.RegExHelper(i, k, `^[0-9a-z][-0-9a-z]{1,61}[0-9a-z]$`); !m { + return nil, append(regexErrs, fmt.Errorf("%q can contain only lowercase letters, numbers, and '-', but can't start or end with '-'. And must be at least 3 characters and at most 63 characters", k)) + } + + return nil, nil +} diff --git a/azurerm/internal/services/postgres/validate/server_name_test.go b/azurerm/internal/services/postgres/validate/server_name_test.go new file mode 100644 index 000000000000..d9ab5d2a4e30 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/server_name_test.go @@ -0,0 +1,68 @@ +package validate + +import ( + "testing" +) + +func TestValidateServerName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + // empty + input: "", + expected: false, + }, + { + // basic example + input: "ab-c", + expected: true, + }, + { + // can't contain upper case letter + input: "AbcD", + expected: false, + }, + { + // can't start with a hyphen + input: "-abc", + expected: false, + }, + { + // can't contain underscore + input: "ab_c", + expected: false, + }, + { + // can't end with hyphen + input: "abc-", + expected: false, + }, + { + // can not short than 3 characters + input: "ab", + expected: false, + }, + { + // 63 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcde", + expected: true, + }, + { + // 64 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefabcdefghijklmnopqrstuvwxyzabcdef", + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := ServerName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/postgres/validate/virtual_network_rule_id.go b/azurerm/internal/services/postgres/validate/virtual_network_rule_id.go new file mode 100644 index 000000000000..13ef58bb34e7 --- /dev/null +++ b/azurerm/internal/services/postgres/validate/virtual_network_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/postgres/parse" +) + +func VirtualNetworkRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualNetworkRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/postgres/validate/virtual_network_rule_id_test.go b/azurerm/internal/services/postgres/validate/virtual_network_rule_id_test.go new file mode 100644 index 000000000000..bb3af88774cd --- /dev/null +++ b/azurerm/internal/services/postgres/validate/virtual_network_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualNetworkRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/", + Valid: false, + }, + + { + // missing value for ServerName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/virtualNetworkRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.DBforPostgreSQL/servers/server1/virtualNetworkRules/virtualNetworkRule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.DBFORPOSTGRESQL/SERVERS/SERVER1/VIRTUALNETWORKRULES/VIRTUALNETWORKRULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualNetworkRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/powerbi/parse/embedded.go b/azurerm/internal/services/powerbi/parse/embedded.go new file mode 100644 index 000000000000..3b0e6caf9fce --- /dev/null +++ b/azurerm/internal/services/powerbi/parse/embedded.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type EmbeddedId struct { + SubscriptionId string + ResourceGroup string + CapacityName string +} + +func NewEmbeddedID(subscriptionId, resourceGroup, capacityName string) EmbeddedId { + return EmbeddedId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + CapacityName: capacityName, + } +} + +func (id EmbeddedId) String() string { + segments := []string{ + fmt.Sprintf("Capacity Name %q", id.CapacityName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Embedded", segmentsStr) +} + +func (id EmbeddedId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.PowerBIDedicated/capacities/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.CapacityName) +} + +// EmbeddedID parses a Embedded ID into an EmbeddedId struct +func EmbeddedID(input string) (*EmbeddedId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := EmbeddedId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.CapacityName, err = id.PopSegment("capacities"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/powerbi/parse/embedded_test.go b/azurerm/internal/services/powerbi/parse/embedded_test.go new file mode 100644 index 000000000000..86106ecb72cf --- /dev/null +++ b/azurerm/internal/services/powerbi/parse/embedded_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = EmbeddedId{} + +func TestEmbeddedIDFormatter(t *testing.T) { + actual := NewEmbeddedID("12345678-1234-9876-4563-123456789012", "resGroup1", "capacity1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/capacity1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestEmbeddedID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *EmbeddedId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing CapacityName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/", + Error: true, + }, + + { + // missing value for CapacityName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/capacity1", + Expected: &EmbeddedId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + CapacityName: "capacity1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.POWERBIDEDICATED/CAPACITIES/CAPACITY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := EmbeddedID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.CapacityName != v.Expected.CapacityName { + t.Fatalf("Expected %q but got %q for CapacityName", v.Expected.CapacityName, actual.CapacityName) + } + } +} diff --git a/azurerm/internal/services/powerbi/parse/powerbi_embedded.go b/azurerm/internal/services/powerbi/parse/powerbi_embedded.go deleted file mode 100644 index 1ab5bbf655be..000000000000 --- a/azurerm/internal/services/powerbi/parse/powerbi_embedded.go +++ /dev/null @@ -1,33 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type PowerBIEmbeddedId struct { - ResourceGroup string - Name string -} - -func PowerBIEmbeddedID(input string) (*PowerBIEmbeddedId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse PowerBI Embedded ID %q: %+v", input, err) - } - - powerbiEmbedded := PowerBIEmbeddedId{ - ResourceGroup: id.ResourceGroup, - } - - if powerbiEmbedded.Name, err = id.PopSegment("capacities"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &powerbiEmbedded, nil -} diff --git a/azurerm/internal/services/powerbi/parse/powerbi_embedded_test.go b/azurerm/internal/services/powerbi/parse/powerbi_embedded_test.go deleted file mode 100644 index 1398643bf9ca..000000000000 --- a/azurerm/internal/services/powerbi/parse/powerbi_embedded_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package parse - -import ( - "testing" -) - -func TestPowerBIEmbeddedId(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *PowerBIEmbeddedId - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing PowerBI Embedded value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities", - Expected: nil, - }, - { - Name: "PowerBI Embedded ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/capacity1", - Expected: &PowerBIEmbeddedId{ - ResourceGroup: "resGroup1", - Name: "capacity1", - }, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := PowerBIEmbeddedID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} diff --git a/azurerm/internal/services/powerbi/powerbi_embedded_resource.go b/azurerm/internal/services/powerbi/powerbi_embedded_resource.go index 28ab8b2f5e41..4881ef35a712 100644 --- a/azurerm/internal/services/powerbi/powerbi_embedded_resource.go +++ b/azurerm/internal/services/powerbi/powerbi_embedded_resource.go @@ -13,18 +13,19 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/powerbi/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/powerbi/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPowerBIEmbedded() *schema.Resource { +func resourcePowerBIEmbedded() *schema.Resource { return &schema.Resource{ - Create: resourceArmPowerBIEmbeddedCreate, - Read: resourceArmPowerBIEmbeddedRead, - Update: resourceArmPowerBIEmbeddedUpdate, - Delete: resourceArmPowerBIEmbeddedDelete, + Create: resourcePowerBIEmbeddedCreate, + Read: resourcePowerBIEmbeddedRead, + Update: resourcePowerBIEmbeddedUpdate, + Delete: resourcePowerBIEmbeddedDelete, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -34,7 +35,7 @@ func resourceArmPowerBIEmbedded() *schema.Resource { }, Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := parse.PowerBIEmbeddedID(id) + _, err := parse.EmbeddedID(id) return err }), @@ -43,7 +44,7 @@ func resourceArmPowerBIEmbedded() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: ValidatePowerBIEmbeddedName, + ValidateFunc: validate.EmbeddedName, }, "location": azure.SchemaLocation(), @@ -68,7 +69,7 @@ func resourceArmPowerBIEmbedded() *schema.Resource { Required: true, Elem: &schema.Schema{ Type: schema.TypeString, - ValidateFunc: ValidatePowerBIEmbeddedAdministratorName, + ValidateFunc: validate.EmbeddedAdministratorName, }, }, @@ -77,7 +78,7 @@ func resourceArmPowerBIEmbedded() *schema.Resource { } } -func resourceArmPowerBIEmbeddedCreate(d *schema.ResourceData, meta interface{}) error { +func resourcePowerBIEmbeddedCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PowerBI.CapacityClient ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -130,30 +131,30 @@ func resourceArmPowerBIEmbeddedCreate(d *schema.ResourceData, meta interface{}) } d.SetId(*resp.ID) - return resourceArmPowerBIEmbeddedRead(d, meta) + return resourcePowerBIEmbeddedRead(d, meta) } -func resourceArmPowerBIEmbeddedRead(d *schema.ResourceData, meta interface{}) error { +func resourcePowerBIEmbeddedRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PowerBI.CapacityClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.PowerBIEmbeddedID(d.Id()) + id, err := parse.EmbeddedID(d.Id()) if err != nil { return err } - resp, err := client.GetDetails(ctx, id.ResourceGroup, id.Name) + resp, err := client.GetDetails(ctx, id.ResourceGroup, id.CapacityName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { log.Printf("[INFO] PowerBI Embedded %q does not exist - removing from state", d.Id()) d.SetId("") return nil } - return fmt.Errorf("Error reading PowerBI Embedded %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error reading PowerBI Embedded %q (Resource Group %q): %+v", id.CapacityName, id.ResourceGroup, err) } - d.Set("name", id.Name) + d.Set("name", id.CapacityName) d.Set("resource_group_name", id.ResourceGroup) if location := resp.Location; location != nil { d.Set("location", azure.NormalizeLocation(*location)) @@ -173,7 +174,7 @@ func resourceArmPowerBIEmbeddedRead(d *schema.ResourceData, meta interface{}) er return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmPowerBIEmbeddedUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePowerBIEmbeddedUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PowerBI.CapacityClient ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) defer cancel() @@ -204,27 +205,27 @@ func resourceArmPowerBIEmbeddedUpdate(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error waiting for update of PowerBI Embedded %q (Resource Group %q): %+v", name, resourceGroup, err) } - return resourceArmPowerBIEmbeddedRead(d, meta) + return resourcePowerBIEmbeddedRead(d, meta) } -func resourceArmPowerBIEmbeddedDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePowerBIEmbeddedDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PowerBI.CapacityClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.PowerBIEmbeddedID(d.Id()) + id, err := parse.EmbeddedID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroup, id.CapacityName) if err != nil { - return fmt.Errorf("Error deleting PowerBI Embedded %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error deleting PowerBI Embedded %q (Resource Group %q): %+v", id.CapacityName, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if !response.WasNotFound(future.Response()) { - return fmt.Errorf("Error waiting for deleting PowerBI Embedded %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + return fmt.Errorf("Error waiting for deleting PowerBI Embedded %q (Resource Group %q): %+v", id.CapacityName, id.ResourceGroup, err) } } diff --git a/azurerm/internal/services/powerbi/powerbi_embedded_resource_test.go b/azurerm/internal/services/powerbi/powerbi_embedded_resource_test.go new file mode 100644 index 000000000000..b9d1ca9fa384 --- /dev/null +++ b/azurerm/internal/services/powerbi/powerbi_embedded_resource_test.go @@ -0,0 +1,177 @@ +package powerbi_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/powerbi/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PowerBIEmbeddedResource struct { +} + +func TestAccPowerBIEmbedded_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") + r := PowerBIEmbeddedResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPowerBIEmbedded_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") + r := PowerBIEmbeddedResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.requiresImport(data), + ExpectError: acceptance.RequiresImportError("azurerm_powerbi_embedded"), + }, + }) +} + +func TestAccPowerBIEmbedded_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") + r := PowerBIEmbeddedResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku_name").HasValue("A2"), + check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPowerBIEmbedded_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") + r := PowerBIEmbeddedResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku_name").HasValue("A1"), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku_name").HasValue("A2"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("sku_name").HasValue("A1"), + ), + }, + data.ImportStep(), + }) +} + +func (PowerBIEmbeddedResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.EmbeddedID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PowerBI.CapacityClient.GetDetails(ctx, id.ResourceGroup, id.CapacityName) + if err != nil { + return nil, fmt.Errorf("retrieving %s: %v", id.String(), err) + } + + return utils.Bool(resp.DedicatedCapacityProperties != nil), nil +} + +func (PowerBIEmbeddedResource) basic(data acceptance.TestData) string { + template := PowerBIEmbeddedResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_powerbi_embedded" "test" { + name = "acctestpowerbi%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku_name = "A1" + administrators = ["${data.azurerm_client_config.test.object_id}"] +} +`, template, data.RandomInteger) +} + +func (r PowerBIEmbeddedResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_powerbi_embedded" "import" { + name = "${azurerm_powerbi_embedded.test.name}" + location = "${azurerm_powerbi_embedded.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku_name = "A1" + administrators = ["${data.azurerm_client_config.test.object_id}"] +} +`, r.basic(data)) +} + +func (PowerBIEmbeddedResource) complete(data acceptance.TestData) string { + template := PowerBIEmbeddedResource{}.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_powerbi_embedded" "test" { + name = "acctestpowerbi%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku_name = "A2" + administrators = ["${data.azurerm_client_config.test.object_id}"] + + tags = { + ENV = "Test" + } +} +`, template, data.RandomInteger) +} + +func (PowerBIEmbeddedResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-powerbi-%d" + location = "%s" +} + +data "azurerm_client_config" "test" {} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/powerbi/registration.go b/azurerm/internal/services/powerbi/registration.go index cbe82d00dbe0..5b9b48f70814 100644 --- a/azurerm/internal/services/powerbi/registration.go +++ b/azurerm/internal/services/powerbi/registration.go @@ -26,5 +26,6 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_powerbi_embedded": resourceArmPowerBIEmbedded()} + "azurerm_powerbi_embedded": resourcePowerBIEmbedded(), + } } diff --git a/azurerm/internal/services/powerbi/resourceids.go b/azurerm/internal/services/powerbi/resourceids.go new file mode 100644 index 000000000000..8903eff3dee0 --- /dev/null +++ b/azurerm/internal/services/powerbi/resourceids.go @@ -0,0 +1,3 @@ +package powerbi + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Embedded -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/capacity1 diff --git a/azurerm/internal/services/powerbi/tests/powerbi_embedded_resource_test.go b/azurerm/internal/services/powerbi/tests/powerbi_embedded_resource_test.go deleted file mode 100644 index c92a9415ac75..000000000000 --- a/azurerm/internal/services/powerbi/tests/powerbi_embedded_resource_test.go +++ /dev/null @@ -1,221 +0,0 @@ -package azurerm - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPowerBIEmbedded_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPowerBIEmbeddedDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPowerBIEmbedded_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPowerBIEmbeddedExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPowerBIEmbedded_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPowerBIEmbeddedDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPowerBIEmbedded_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPowerBIEmbeddedExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMPowerBIEmbedded_requiresImport(data), - ExpectError: acceptance.RequiresImportError("azurerm_powerbi_embedded"), - }, - }, - }) -} - -func TestAccAzureRMPowerBIEmbedded_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPowerBIEmbeddedDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPowerBIEmbedded_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPowerBIEmbeddedExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "A2"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.ENV", "Test"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPowerBIEmbedded_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_powerbi_embedded", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPowerBIEmbeddedDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPowerBIEmbedded_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPowerBIEmbeddedExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "A1"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPowerBIEmbedded_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPowerBIEmbeddedExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "A2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPowerBIEmbedded_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPowerBIEmbeddedExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "A1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPowerBIEmbeddedExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("PowerBI Embedded not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).PowerBI.CapacityClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - if resp, err := client.GetDetails(ctx, resourceGroup, name); err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: PowerBI Embedded (PowerBI Embedded Name %q / Resource Group %q) does not exist", name, resourceGroup) - } - return fmt.Errorf("Bad: Get on PowerBI.CapacityClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMPowerBIEmbeddedDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).PowerBI.CapacityClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_powerbi_embedded" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - if resp, err := client.GetDetails(ctx, resourceGroup, name); err != nil { - if !utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Get on CapacityClient: %+v", err) - } - } - - return nil - } - - return nil -} - -func testAccAzureRMPowerBIEmbedded_basic(data acceptance.TestData) string { - template := testAccAzureRMPowerBIEmbedded_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_powerbi_embedded" "test" { - name = "acctestpowerbi%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku_name = "A1" - administrators = ["${data.azurerm_client_config.test.object_id}"] -} -`, template, data.RandomInteger) -} - -func testAccAzureRMPowerBIEmbedded_requiresImport(data acceptance.TestData) string { - return fmt.Sprintf(` -%s - -resource "azurerm_powerbi_embedded" "import" { - name = "${azurerm_powerbi_embedded.test.name}" - location = "${azurerm_powerbi_embedded.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku_name = "A1" - administrators = ["${data.azurerm_client_config.test.object_id}"] -} -`, testAccAzureRMPowerBIEmbedded_basic(data)) -} - -func testAccAzureRMPowerBIEmbedded_complete(data acceptance.TestData) string { - template := testAccAzureRMPowerBIEmbedded_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_powerbi_embedded" "test" { - name = "acctestpowerbi%d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku_name = "A2" - administrators = ["${data.azurerm_client_config.test.object_id}"] - - tags = { - ENV = "Test" - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMPowerBIEmbedded_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-powerbi-%d" - location = "%s" -} - -data "azurerm_client_config" "test" {} -`, data.RandomInteger, data.Locations.Primary) -} diff --git a/azurerm/internal/services/powerbi/validate/embedded_administrator_name.go b/azurerm/internal/services/powerbi/validate/embedded_administrator_name.go new file mode 100644 index 000000000000..78b87ee18ff7 --- /dev/null +++ b/azurerm/internal/services/powerbi/validate/embedded_administrator_name.go @@ -0,0 +1,21 @@ +package validate + +import ( + "fmt" + "regexp" + + "github.com/hashicorp/go-uuid" +) + +func EmbeddedAdministratorName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(`^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$`).MatchString(value) { + if _, err := uuid.ParseUUID(value); err != nil { + errors = append(errors, fmt.Errorf("%q isn't a valid email address.", k)) + errors = append(errors, fmt.Errorf("%q isn't a valid UUID (%q): %+v", k, v, err)) + } + } + + return warnings, errors +} diff --git a/azurerm/internal/services/powerbi/validate/embedded_administrator_name_test.go b/azurerm/internal/services/powerbi/validate/embedded_administrator_name_test.go new file mode 100644 index 000000000000..933183a1ed02 --- /dev/null +++ b/azurerm/internal/services/powerbi/validate/embedded_administrator_name_test.go @@ -0,0 +1,51 @@ +package validate + +import "testing" + +func TestValidateEmbeddedAdministratorName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + // empty + input: "", + expected: false, + }, + { + // basic example + input: "hello", + expected: false, + }, + { + // valid email address + input: "hello@microsoft.com", + expected: true, + }, + { + // invalid email address + input: "#@%^%#$@#$@#.com", + expected: false, + }, + { + // valid uuid + input: "1cf9c591-172b-4654-8ab8-81964aa5335e", + expected: true, + }, + { + // invalid uuid + input: "1cf9c591-172b-4654-8ab8-81964aa5335e-0000", + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := EmbeddedAdministratorName(v.input, "administrators") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/powerbi/validate/embedded_id.go b/azurerm/internal/services/powerbi/validate/embedded_id.go new file mode 100644 index 000000000000..66a74d54c036 --- /dev/null +++ b/azurerm/internal/services/powerbi/validate/embedded_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/powerbi/parse" +) + +func EmbeddedID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.EmbeddedID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/powerbi/validate/embedded_id_test.go b/azurerm/internal/services/powerbi/validate/embedded_id_test.go new file mode 100644 index 000000000000..800efd6eaeca --- /dev/null +++ b/azurerm/internal/services/powerbi/validate/embedded_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestEmbeddedID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing CapacityName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/", + Valid: false, + }, + + { + // missing value for CapacityName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.PowerBIDedicated/capacities/capacity1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.POWERBIDEDICATED/CAPACITIES/CAPACITY1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := EmbeddedID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/powerbi/validate/embedded_name.go b/azurerm/internal/services/powerbi/validate/embedded_name.go new file mode 100644 index 000000000000..9f343cb0524e --- /dev/null +++ b/azurerm/internal/services/powerbi/validate/embedded_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func EmbeddedName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(`^[a-z][a-z0-9]{3,63}$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q must be between 4 and 64 characters in length and contains only lowercase letters or numbers.", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/powerbi/validate/embedded_name_test.go b/azurerm/internal/services/powerbi/validate/embedded_name_test.go new file mode 100644 index 000000000000..dea068f26396 --- /dev/null +++ b/azurerm/internal/services/powerbi/validate/embedded_name_test.go @@ -0,0 +1,71 @@ +package validate + +import "testing" + +func TestValidatePowerBIEmbeddedName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + // empty + input: "", + expected: false, + }, + { + // basic example + input: "hello", + expected: true, + }, + { + // can't start with an underscore + input: "_hello", + expected: false, + }, + { + // can't end with a dash + input: "hello-", + expected: false, + }, + { + // can't contain an exclamation mark + input: "hello!", + expected: false, + }, + { + // can't contain dash in the middle + input: "malcolm-in-the-middle", + expected: false, + }, + { + // can't end with a period + input: "hello.", + expected: false, + }, + { + // 63 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk", + expected: true, + }, + { + // 64 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkj", + expected: true, + }, + { + // 65 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkja", + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.input) + + _, errors := EmbeddedName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/azurerm/internal/services/powerbi/validation.go b/azurerm/internal/services/powerbi/validation.go deleted file mode 100644 index 8bf210398493..000000000000 --- a/azurerm/internal/services/powerbi/validation.go +++ /dev/null @@ -1,31 +0,0 @@ -package powerbi - -import ( - "fmt" - "regexp" - - "github.com/hashicorp/go-uuid" -) - -func ValidatePowerBIEmbeddedName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(`^[a-z][a-z0-9]{3,63}$`).MatchString(value) { - errors = append(errors, fmt.Errorf("%q must be between 4 and 64 characters in length and contains only lowercase letters or numbers.", k)) - } - - return warnings, errors -} - -func ValidatePowerBIEmbeddedAdministratorName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(`^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$`).MatchString(value) { - if _, err := uuid.ParseUUID(value); err != nil { - errors = append(errors, fmt.Errorf("%q isn't a valid email address.", k)) - errors = append(errors, fmt.Errorf("%q isn't a valid UUID (%q): %+v", k, v, err)) - } - } - - return warnings, errors -} diff --git a/azurerm/internal/services/powerbi/validation_test.go b/azurerm/internal/services/powerbi/validation_test.go deleted file mode 100644 index 47129bb98da4..000000000000 --- a/azurerm/internal/services/powerbi/validation_test.go +++ /dev/null @@ -1,119 +0,0 @@ -package powerbi - -import "testing" - -func TestValidatePowerBIEmbeddedName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // basic example - input: "hello", - expected: true, - }, - { - // can't start with an underscore - input: "_hello", - expected: false, - }, - { - // can't end with a dash - input: "hello-", - expected: false, - }, - { - // can't contain an exclamation mark - input: "hello!", - expected: false, - }, - { - // can't contain dash in the middle - input: "malcolm-in-the-middle", - expected: false, - }, - { - // can't end with a period - input: "hello.", - expected: false, - }, - { - // 63 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk", - expected: true, - }, - { - // 64 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkj", - expected: true, - }, - { - // 65 chars - input: "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkja", - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := ValidatePowerBIEmbeddedName(v.input, "name") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} - -func TestValidatePowerBIEmbeddedAdministratorName(t *testing.T) { - testData := []struct { - input string - expected bool - }{ - { - // empty - input: "", - expected: false, - }, - { - // basic example - input: "hello", - expected: false, - }, - { - // valid email address - input: "hello@microsoft.com", - expected: true, - }, - { - // invalid email address - input: "#@%^%#$@#$@#.com", - expected: false, - }, - { - // valid uuid - input: "1cf9c591-172b-4654-8ab8-81964aa5335e", - expected: true, - }, - { - // invalid uuid - input: "1cf9c591-172b-4654-8ab8-81964aa5335e-0000", - expected: false, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q..", v.input) - - _, errors := ValidatePowerBIEmbeddedAdministratorName(v.input, "administrators") - actual := len(errors) == 0 - if v.expected != actual { - t.Fatalf("Expected %t but got %t", v.expected, actual) - } - } -} diff --git a/azurerm/internal/services/privatedns/parse/a_record.go b/azurerm/internal/services/privatedns/parse/a_record.go new file mode 100644 index 000000000000..f79b7da016be --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/a_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ARecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + AName string +} + +func NewARecordID(subscriptionId, resourceGroup, privateDnsZoneName, aName string) ARecordId { + return ARecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + AName: aName, + } +} + +func (id ARecordId) String() string { + segments := []string{ + fmt.Sprintf("A Name %q", id.AName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "A Record", segmentsStr) +} + +func (id ARecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/A/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.AName) +} + +// ARecordID parses a ARecord ID into an ARecordId struct +func ARecordID(input string) (*ARecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ARecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.AName, err = id.PopSegment("A"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/a_record_test.go b/azurerm/internal/services/privatedns/parse/a_record_test.go new file mode 100644 index 000000000000..911ca307fde5 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/a_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ARecordId{} + +func TestARecordIDFormatter(t *testing.T) { + actual := NewARecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "eh1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/A/eh1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestARecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ARecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/A/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/A/eh1", + Expected: &ARecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + AName: "eh1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/A/EH1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ARecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.AName != v.Expected.AName { + t.Fatalf("Expected %q but got %q for AName", v.Expected.AName, actual.AName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/aaaa_record.go b/azurerm/internal/services/privatedns/parse/aaaa_record.go new file mode 100644 index 000000000000..4a73fa057cf3 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/aaaa_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type AaaaRecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + AAAAName string +} + +func NewAaaaRecordID(subscriptionId, resourceGroup, privateDnsZoneName, aAAAName string) AaaaRecordId { + return AaaaRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + AAAAName: aAAAName, + } +} + +func (id AaaaRecordId) String() string { + segments := []string{ + fmt.Sprintf("A A A A Name %q", id.AAAAName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Aaaa Record", segmentsStr) +} + +func (id AaaaRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/AAAA/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.AAAAName) +} + +// AaaaRecordID parses a AaaaRecord ID into an AaaaRecordId struct +func AaaaRecordID(input string) (*AaaaRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := AaaaRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.AAAAName, err = id.PopSegment("AAAA"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/aaaa_record_test.go b/azurerm/internal/services/privatedns/parse/aaaa_record_test.go new file mode 100644 index 000000000000..79b624388c18 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/aaaa_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = AaaaRecordId{} + +func TestAaaaRecordIDFormatter(t *testing.T) { + actual := NewAaaaRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "eheh1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/AAAA/eheh1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestAaaaRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *AaaaRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/AAAA/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/AAAA/eheh1", + Expected: &AaaaRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + AAAAName: "eheh1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/AAAA/EHEH1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := AaaaRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.AAAAName != v.Expected.AAAAName { + t.Fatalf("Expected %q but got %q for AAAAName", v.Expected.AAAAName, actual.AAAAName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/cname_record.go b/azurerm/internal/services/privatedns/parse/cname_record.go new file mode 100644 index 000000000000..78320651e34f --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/cname_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CnameRecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + CNAMEName string +} + +func NewCnameRecordID(subscriptionId, resourceGroup, privateDnsZoneName, cNAMEName string) CnameRecordId { + return CnameRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + CNAMEName: cNAMEName, + } +} + +func (id CnameRecordId) String() string { + segments := []string{ + fmt.Sprintf("C N A M E Name %q", id.CNAMEName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cname Record", segmentsStr) +} + +func (id CnameRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/CNAME/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.CNAMEName) +} + +// CnameRecordID parses a CnameRecord ID into an CnameRecordId struct +func CnameRecordID(input string) (*CnameRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CnameRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.CNAMEName, err = id.PopSegment("CNAME"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/cname_record_test.go b/azurerm/internal/services/privatedns/parse/cname_record_test.go new file mode 100644 index 000000000000..6f5507ef04bd --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/cname_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CnameRecordId{} + +func TestCnameRecordIDFormatter(t *testing.T) { + actual := NewCnameRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "name1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/CNAME/name1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCnameRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CnameRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/CNAME/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/CNAME/name1", + Expected: &CnameRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + CNAMEName: "name1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/CNAME/NAME1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CnameRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.CNAMEName != v.Expected.CNAMEName { + t.Fatalf("Expected %q but got %q for CNAMEName", v.Expected.CNAMEName, actual.CNAMEName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/mx_record.go b/azurerm/internal/services/privatedns/parse/mx_record.go new file mode 100644 index 000000000000..97d436721c9a --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/mx_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type MxRecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + MXName string +} + +func NewMxRecordID(subscriptionId, resourceGroup, privateDnsZoneName, mXName string) MxRecordId { + return MxRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + MXName: mXName, + } +} + +func (id MxRecordId) String() string { + segments := []string{ + fmt.Sprintf("M X Name %q", id.MXName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Mx Record", segmentsStr) +} + +func (id MxRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/MX/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.MXName) +} + +// MxRecordID parses a MxRecord ID into an MxRecordId struct +func MxRecordID(input string) (*MxRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := MxRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.MXName, err = id.PopSegment("MX"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/mx_record_test.go b/azurerm/internal/services/privatedns/parse/mx_record_test.go new file mode 100644 index 000000000000..aa620ccc9dcd --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/mx_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = MxRecordId{} + +func TestMxRecordIDFormatter(t *testing.T) { + actual := NewMxRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "mx1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/MX/mx1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestMxRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *MxRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/MX/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/MX/mx1", + Expected: &MxRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + MXName: "mx1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/MX/MX1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := MxRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.MXName != v.Expected.MXName { + t.Fatalf("Expected %q but got %q for MXName", v.Expected.MXName, actual.MXName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/private_dns_zone.go b/azurerm/internal/services/privatedns/parse/private_dns_zone.go new file mode 100644 index 000000000000..5fc3cfa26c3e --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/private_dns_zone.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PrivateDnsZoneId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewPrivateDnsZoneID(subscriptionId, resourceGroup, name string) PrivateDnsZoneId { + return PrivateDnsZoneId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id PrivateDnsZoneId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Private Dns Zone", segmentsStr) +} + +func (id PrivateDnsZoneId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// PrivateDnsZoneID parses a PrivateDnsZone ID into an PrivateDnsZoneId struct +func PrivateDnsZoneID(input string) (*PrivateDnsZoneId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PrivateDnsZoneId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/private_dns_zone_test.go b/azurerm/internal/services/privatedns/parse/private_dns_zone_test.go new file mode 100644 index 000000000000..d2508cf8b80a --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/private_dns_zone_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PrivateDnsZoneId{} + +func TestPrivateDnsZoneIDFormatter(t *testing.T) { + actual := NewPrivateDnsZoneID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPrivateDnsZoneID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PrivateDnsZoneId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1", + Expected: &PrivateDnsZoneId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "privateDnsZone1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PrivateDnsZoneID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/ptr_record.go b/azurerm/internal/services/privatedns/parse/ptr_record.go new file mode 100644 index 000000000000..db1f4f4542b6 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/ptr_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type PtrRecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + PTRName string +} + +func NewPtrRecordID(subscriptionId, resourceGroup, privateDnsZoneName, pTRName string) PtrRecordId { + return PtrRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + PTRName: pTRName, + } +} + +func (id PtrRecordId) String() string { + segments := []string{ + fmt.Sprintf("P T R Name %q", id.PTRName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Ptr Record", segmentsStr) +} + +func (id PtrRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/PTR/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.PTRName) +} + +// PtrRecordID parses a PtrRecord ID into an PtrRecordId struct +func PtrRecordID(input string) (*PtrRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := PtrRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.PTRName, err = id.PopSegment("PTR"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/ptr_record_test.go b/azurerm/internal/services/privatedns/parse/ptr_record_test.go new file mode 100644 index 000000000000..50f292bfc0ab --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/ptr_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = PtrRecordId{} + +func TestPtrRecordIDFormatter(t *testing.T) { + actual := NewPtrRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "ptr1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/PTR/ptr1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestPtrRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *PtrRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/PTR/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/PTR/ptr1", + Expected: &PtrRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + PTRName: "ptr1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/PTR/PTR1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := PtrRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.PTRName != v.Expected.PTRName { + t.Fatalf("Expected %q but got %q for PTRName", v.Expected.PTRName, actual.PTRName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/srv_record.go b/azurerm/internal/services/privatedns/parse/srv_record.go new file mode 100644 index 000000000000..d639a7f1ce7b --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/srv_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SrvRecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + SRVName string +} + +func NewSrvRecordID(subscriptionId, resourceGroup, privateDnsZoneName, sRVName string) SrvRecordId { + return SrvRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + SRVName: sRVName, + } +} + +func (id SrvRecordId) String() string { + segments := []string{ + fmt.Sprintf("S R V Name %q", id.SRVName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Srv Record", segmentsStr) +} + +func (id SrvRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/SRV/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.SRVName) +} + +// SrvRecordID parses a SrvRecord ID into an SrvRecordId struct +func SrvRecordID(input string) (*SrvRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SrvRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.SRVName, err = id.PopSegment("SRV"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/srv_record_test.go b/azurerm/internal/services/privatedns/parse/srv_record_test.go new file mode 100644 index 000000000000..11213501d40d --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/srv_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SrvRecordId{} + +func TestSrvRecordIDFormatter(t *testing.T) { + actual := NewSrvRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "srv1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/SRV/srv1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSrvRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SrvRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/SRV/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/SRV/srv1", + Expected: &SrvRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + SRVName: "srv1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/SRV/SRV1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SrvRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.SRVName != v.Expected.SRVName { + t.Fatalf("Expected %q but got %q for SRVName", v.Expected.SRVName, actual.SRVName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/txt_record.go b/azurerm/internal/services/privatedns/parse/txt_record.go new file mode 100644 index 000000000000..4fec4128c567 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/txt_record.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type TxtRecordId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + TXTName string +} + +func NewTxtRecordID(subscriptionId, resourceGroup, privateDnsZoneName, tXTName string) TxtRecordId { + return TxtRecordId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + TXTName: tXTName, + } +} + +func (id TxtRecordId) String() string { + segments := []string{ + fmt.Sprintf("T X T Name %q", id.TXTName), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Txt Record", segmentsStr) +} + +func (id TxtRecordId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/TXT/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.TXTName) +} + +// TxtRecordID parses a TxtRecord ID into an TxtRecordId struct +func TxtRecordID(input string) (*TxtRecordId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := TxtRecordId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.TXTName, err = id.PopSegment("TXT"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/txt_record_test.go b/azurerm/internal/services/privatedns/parse/txt_record_test.go new file mode 100644 index 000000000000..9cd6557885e6 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/txt_record_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = TxtRecordId{} + +func TestTxtRecordIDFormatter(t *testing.T) { + actual := NewTxtRecordID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "txt1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/TXT/txt1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestTxtRecordID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *TxtRecordId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/TXT/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/TXT/txt1", + Expected: &TxtRecordId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + TXTName: "txt1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/TXT/TXT1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := TxtRecordID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.TXTName != v.Expected.TXTName { + t.Fatalf("Expected %q but got %q for TXTName", v.Expected.TXTName, actual.TXTName) + } + } +} diff --git a/azurerm/internal/services/privatedns/parse/virtual_network_link.go b/azurerm/internal/services/privatedns/parse/virtual_network_link.go new file mode 100644 index 000000000000..43f541a2dc88 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/virtual_network_link.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type VirtualNetworkLinkId struct { + SubscriptionId string + ResourceGroup string + PrivateDnsZoneName string + Name string +} + +func NewVirtualNetworkLinkID(subscriptionId, resourceGroup, privateDnsZoneName, name string) VirtualNetworkLinkId { + return VirtualNetworkLinkId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + PrivateDnsZoneName: privateDnsZoneName, + Name: name, + } +} + +func (id VirtualNetworkLinkId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Private Dns Zone Name %q", id.PrivateDnsZoneName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Virtual Network Link", segmentsStr) +} + +func (id VirtualNetworkLinkId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/privateDnsZones/%s/virtualNetworkLinks/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.PrivateDnsZoneName, id.Name) +} + +// VirtualNetworkLinkID parses a VirtualNetworkLink ID into an VirtualNetworkLinkId struct +func VirtualNetworkLinkID(input string) (*VirtualNetworkLinkId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := VirtualNetworkLinkId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.PrivateDnsZoneName, err = id.PopSegment("privateDnsZones"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("virtualNetworkLinks"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/privatedns/parse/virtual_network_link_test.go b/azurerm/internal/services/privatedns/parse/virtual_network_link_test.go new file mode 100644 index 000000000000..5055e2f287f6 --- /dev/null +++ b/azurerm/internal/services/privatedns/parse/virtual_network_link_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = VirtualNetworkLinkId{} + +func TestVirtualNetworkLinkIDFormatter(t *testing.T) { + actual := NewVirtualNetworkLinkID("12345678-1234-9876-4563-123456789012", "resGroup1", "privateDnsZone1", "virtualNetworkLink1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/virtualNetworkLinks/virtualNetworkLink1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestVirtualNetworkLinkID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *VirtualNetworkLinkId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Error: true, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/virtualNetworkLinks/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/virtualNetworkLinks/virtualNetworkLink1", + Expected: &VirtualNetworkLinkId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + PrivateDnsZoneName: "privateDnsZone1", + Name: "virtualNetworkLink1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/VIRTUALNETWORKLINKS/VIRTUALNETWORKLINK1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := VirtualNetworkLinkID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.PrivateDnsZoneName != v.Expected.PrivateDnsZoneName { + t.Fatalf("Expected %q but got %q for PrivateDnsZoneName", v.Expected.PrivateDnsZoneName, actual.PrivateDnsZoneName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/privatedns/private_dns_a_record_resource.go b/azurerm/internal/services/privatedns/private_dns_a_record_resource.go index 5ffc7a1650bc..ccc1d2678721 100644 --- a/azurerm/internal/services/privatedns/private_dns_a_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_a_record_resource.go @@ -2,7 +2,6 @@ package privatedns import ( "fmt" - "net/http" "time" "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" @@ -11,20 +10,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsARecord() *schema.Resource { +func resourcePrivateDnsARecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsARecordCreateUpdate, - Read: resourceArmPrivateDnsARecordRead, - Update: resourceArmPrivateDnsARecordCreateUpdate, - Delete: resourceArmPrivateDnsARecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsARecordCreateUpdate, + Read: resourcePrivateDnsARecordRead, + Update: resourcePrivateDnsARecordCreateUpdate, + Delete: resourcePrivateDnsARecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ARecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -72,24 +74,22 @@ func resourceArmPrivateDnsARecord() *schema.Resource { } } -func resourceArmPrivateDnsARecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsARecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewARecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.A, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.A, resourceId.AName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS A Record %q (Private Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of existing %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { + if !utils.ResponseWasNotFound(existing.Response) { return tf.ImportAsExistsError("azurerm_private_dns_a_record", *existing.ID) } } @@ -98,7 +98,7 @@ func resourceArmPrivateDnsARecordCreateUpdate(d *schema.ResourceData, meta inter t := d.Get("tags").(map[string]interface{}) parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.AName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(t), TTL: &ttl, @@ -108,76 +108,59 @@ func resourceArmPrivateDnsARecordCreateUpdate(d *schema.ResourceData, meta inter eTag := "" ifNoneMatch := "" // set to empty to allow updates to records after creation - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.A, name, parameters, eTag, ifNoneMatch); err != nil { - return fmt.Errorf("Error creating/updating Private DNS A Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.A, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS A Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS A Record %s (resource group %s) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.A, resourceId.AName, parameters, eTag, ifNoneMatch); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsARecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsARecordRead(d, meta) } -func resourceArmPrivateDnsARecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsARecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ARecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["A"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.A, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.A, id.AName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS A record %s: %+v", name, err) + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.AName) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) if err := d.Set("records", flattenAzureRmPrivateDnsARecords(resp.ARecords)); err != nil { return err } + return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsARecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsARecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.ARecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["A"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Delete(ctx, resGroup, zoneName, privatedns.A, name, "") - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting Private DNS A Record %s: %+v", name, err) + if _, err := dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.A, id.AName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_a_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_a_record_resource_test.go new file mode 100644 index 000000000000..ef132fcc3a2c --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_a_record_resource_test.go @@ -0,0 +1,233 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsARecordResource struct { +} + +func TestAccPrivateDnsARecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") + r := PrivateDnsARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsARecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") + r := PrivateDnsARecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsARecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") + r := PrivateDnsARecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccPrivateDnsARecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") + r := PrivateDnsARecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsARecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.ARecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.A, id.AName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS AAAA Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsARecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsARecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_a_record" "import" { + name = azurerm_private_dns_a_record.test.name + resource_group_name = azurerm_private_dns_a_record.test.resource_group_name + zone_name = azurerm_private_dns_a_record.test.zone_name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] +} +`, r.basic(data)) +} + +func (PrivateDnsARecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5", "1.2.3.7"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsARecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsARecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_a_record" "test" { + name = "myarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["1.2.3.4", "1.2.4.5"] + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource.go b/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource.go index 5ac0bc8b06aa..4bce6860dc74 100644 --- a/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource.go @@ -2,7 +2,6 @@ package privatedns import ( "fmt" - "net/http" "time" "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" @@ -10,20 +9,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsAaaaRecord() *schema.Resource { +func resourcePrivateDnsAaaaRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsAaaaRecordCreateUpdate, - Read: resourceArmPrivateDnsAaaaRecordRead, - Update: resourceArmPrivateDnsAaaaRecordCreateUpdate, - Delete: resourceArmPrivateDnsAaaaRecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsAaaaRecordCreateUpdate, + Read: resourcePrivateDnsAaaaRecordRead, + Update: resourcePrivateDnsAaaaRecordCreateUpdate, + Delete: resourcePrivateDnsAaaaRecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.AaaaRecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -69,24 +71,22 @@ func resourceArmPrivateDnsAaaaRecord() *schema.Resource { } } -func resourceArmPrivateDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewAaaaRecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.AAAA, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.AAAA, resourceId.AAAAName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS AAAA Record %q (Private Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of existing %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { + if !utils.ResponseWasNotFound(existing.Response) { return tf.ImportAsExistsError("azurerm_private_dns_aaaa_record", *existing.ID) } } @@ -95,7 +95,7 @@ func resourceArmPrivateDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta in t := d.Get("tags").(map[string]interface{}) parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.AAAAName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(t), TTL: &ttl, @@ -105,50 +105,37 @@ func resourceArmPrivateDnsAaaaRecordCreateUpdate(d *schema.ResourceData, meta in eTag := "" ifNoneMatch := "" // set to empty to allow updates to records after creation - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.AAAA, name, parameters, eTag, ifNoneMatch); err != nil { - return fmt.Errorf("Error creating/updating Private DNS AAAA Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.AAAA, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS AAAA Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.AAAA, resourceId.AAAAName, parameters, eTag, ifNoneMatch); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS AAAA Record %s (resource group %s) ID", name, resGroup) - } - - d.SetId(*resp.ID) - - return resourceArmPrivateDnsAaaaRecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsAaaaRecordRead(d, meta) } -func resourceArmPrivateDnsAaaaRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsAaaaRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.AaaaRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["AAAA"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.AAAA, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.AAAA, id.AAAAName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS AAAA record %s: %+v", name, err) + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.AAAAName) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -158,23 +145,18 @@ func resourceArmPrivateDnsAaaaRecordRead(d *schema.ResourceData, meta interface{ return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsAaaaRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsAaaaRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.AaaaRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["AAAA"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Delete(ctx, resGroup, zoneName, privatedns.AAAA, name, "") - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("Error deleting Private DNS AAAA Record %s: %+v", name, err) + if _, err := dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.AAAA, id.AAAAName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource_test.go new file mode 100644 index 000000000000..50e36f3b6d47 --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_aaaa_record_resource_test.go @@ -0,0 +1,233 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsAAAARecordResource struct { +} + +func TestAccPrivateDnsAaaaRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") + r := PrivateDnsAAAARecordResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsAaaaRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") + r := PrivateDnsAAAARecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsAaaaRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") + r := PrivateDnsAAAARecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccPrivateDnsAaaaRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") + r := PrivateDnsAAAARecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsAAAARecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.AaaaRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.AAAA, id.AAAAName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS AAAA Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsAAAARecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_aaaa_record" "test" { + name = "myaaaarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsAAAARecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_aaaa_record" "import" { + name = azurerm_private_dns_aaaa_record.test.name + resource_group_name = azurerm_private_dns_aaaa_record.test.resource_group_name + zone_name = azurerm_private_dns_aaaa_record.test.zone_name + ttl = 300 + records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] +} +`, r.basic(data)) +} + +func (PrivateDnsAAAARecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_aaaa_record" "test" { + name = "myaaaarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335", "fd73:5e76:3ab5:d2e9::1"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsAAAARecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_aaaa_record" "test" { + name = "myaaaarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsAAAARecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_aaaa_record" "test" { + name = "myaaaarecord%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_cname_record_resource.go b/azurerm/internal/services/privatedns/private_dns_cname_record_resource.go index 06ba9c4df53e..896d34753586 100644 --- a/azurerm/internal/services/privatedns/private_dns_cname_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_cname_record_resource.go @@ -11,20 +11,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsCNameRecord() *schema.Resource { +func resourcePrivateDnsCNameRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsCNameRecordCreateUpdate, - Read: resourceArmPrivateDnsCNameRecordRead, - Update: resourceArmPrivateDnsCNameRecordCreateUpdate, - Delete: resourceArmPrivateDnsCNameRecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsCNameRecordCreateUpdate, + Read: resourcePrivateDnsCNameRecordRead, + Update: resourcePrivateDnsCNameRecordCreateUpdate, + Delete: resourcePrivateDnsCNameRecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.CnameRecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -74,24 +77,22 @@ func resourceArmPrivateDnsCNameRecord() *schema.Resource { } } -func resourceArmPrivateDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewCnameRecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.CNAME, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.CNAME, resourceId.CNAMEName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS CNAME Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of existing %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { + if !utils.ResponseWasNotFound(existing.Response) { return tf.ImportAsExistsError("azurerm_private_dns_cname_record", *existing.ID) } } @@ -101,7 +102,7 @@ func resourceArmPrivateDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta i t := d.Get("tags").(map[string]interface{}) parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.CNAMEName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(t), TTL: &ttl, @@ -113,50 +114,38 @@ func resourceArmPrivateDnsCNameRecordCreateUpdate(d *schema.ResourceData, meta i eTag := "" ifNoneMatch := "" // set to empty to allow updates to records after creation - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.CNAME, name, parameters, eTag, ifNoneMatch); err != nil { - return fmt.Errorf("Error creating/updating Private DNS CNAME Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.CNAME, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS CNAME Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS CNAME Record %s (resource group %s) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.CNAME, resourceId.CNAMEName, parameters, eTag, ifNoneMatch); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsCNameRecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsCNameRecordRead(d, meta) } -func resourceArmPrivateDnsCNameRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsCNameRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.CnameRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["CNAME"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.CNAME, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.CNAME, id.CNAMEName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS CNAME record %s: %+v", name, err) + + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.CNAMEName) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -169,23 +158,18 @@ func resourceArmPrivateDnsCNameRecordRead(d *schema.ResourceData, meta interface return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsCNameRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsCNameRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.CnameRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["CNAME"] - zoneName := id.Path["privateDnsZones"] - - _, err = dnsClient.Delete(ctx, resGroup, zoneName, privatedns.CNAME, name, "") - if err != nil { - return fmt.Errorf("Error deleting Private DNS CNAME Record %s: %+v", name, err) + if _, err = dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.CNAME, id.CNAMEName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_cname_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_cname_record_resource_test.go new file mode 100644 index 000000000000..aada48725b6e --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_cname_record_resource_test.go @@ -0,0 +1,271 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsCNameRecordResource struct { +} + +func TestAccPrivateDnsCNameRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") + r := PrivateDnsCNameRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsCNameRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") + r := PrivateDnsCNameRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsCNameRecord_subdomain(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") + r := PrivateDnsCNameRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subdomain(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record").HasValue("test.contoso.com"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsCNameRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") + r := PrivateDnsCNameRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccPrivateDnsCNameRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") + r := PrivateDnsCNameRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsCNameRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CnameRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.CNAME, id.CNAMEName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS CName Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsCNameRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_cname_record" "test" { + name = "acctestcname%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record = "contoso.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsCNameRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_cname_record" "import" { + name = azurerm_private_dns_cname_record.test.name + resource_group_name = azurerm_private_dns_cname_record.test.resource_group_name + zone_name = azurerm_private_dns_cname_record.test.zone_name + ttl = 300 + record = "contoso.com" +} +`, r.basic(data)) +} + +func (PrivateDnsCNameRecordResource) subdomain(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_cname_record" "test" { + name = "acctestcname%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record = "test.contoso.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsCNameRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_cname_record" "test" { + name = "acctestcname%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record = "contoso.com" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsCNameRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_cname_record" "test" { + name = "acctestcname%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record = "contoso.com" + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsCNameRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_cname_record" "test" { + name = "acctestcname%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record = "contoso.com" + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_mx_record_resource.go b/azurerm/internal/services/privatedns/private_dns_mx_record_resource.go index 82f5aa07747d..059b8223cd78 100644 --- a/azurerm/internal/services/privatedns/private_dns_mx_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_mx_record_resource.go @@ -11,20 +11,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsMxRecord() *schema.Resource { +func resourcePrivateDnsMxRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsMxRecordCreateUpdate, - Read: resourceArmPrivateDnsMxRecordRead, - Update: resourceArmPrivateDnsMxRecordCreateUpdate, - Delete: resourceArmPrivateDnsMxRecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsMxRecordCreateUpdate, + Read: resourcePrivateDnsMxRecordRead, + Update: resourcePrivateDnsMxRecordCreateUpdate, + Delete: resourcePrivateDnsMxRecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.MxRecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -90,24 +93,22 @@ func resourceArmPrivateDnsMxRecord() *schema.Resource { } } -func resourceArmPrivateDnsMxRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsMxRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewMxRecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.MX, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.MX, resourceId.MXName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS MX Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of existing %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { + if !utils.ResponseWasNotFound(existing.Response) { return tf.ImportAsExistsError("azurerm_private_dns_mx_record", *existing.ID) } } @@ -116,7 +117,7 @@ func resourceArmPrivateDnsMxRecordCreateUpdate(d *schema.ResourceData, meta inte t := d.Get("tags").(map[string]interface{}) parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.MXName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(t), TTL: &ttl, @@ -124,50 +125,38 @@ func resourceArmPrivateDnsMxRecordCreateUpdate(d *schema.ResourceData, meta inte }, } - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.MX, name, parameters, "", ""); err != nil { - return fmt.Errorf("Error creating/updating Private DNS MX Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.MX, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS MX Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS MX Record %s (resource group %s) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.MX, resourceId.MXName, parameters, "", ""); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsMxRecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsMxRecordRead(d, meta) } -func resourceArmPrivateDnsMxRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsMxRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.MxRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["MX"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.MX, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.MX, id.MXName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS MX record %s: %+v", name, err) + + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.MXName) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -178,22 +167,18 @@ func resourceArmPrivateDnsMxRecordRead(d *schema.ResourceData, meta interface{}) return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsMxRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsMxRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.MxRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["MX"] - zoneName := id.Path["privateDnsZones"] - - if _, err = dnsClient.Delete(ctx, resGroup, zoneName, privatedns.MX, name, ""); err != nil { - return fmt.Errorf("Error deleting Private DNS MX Record %s: %+v", name, err) + if _, err = dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.MX, id.MXName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_mx_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_mx_record_resource_test.go new file mode 100644 index 000000000000..3758af96ef24 --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_mx_record_resource_test.go @@ -0,0 +1,319 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsMxRecordResource struct { +} + +func TestAccPrivateDnsMxRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") + r := PrivateDnsMxRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsMxRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") + r := PrivateDnsMxRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsMxRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") + r := PrivateDnsMxRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("3"), + ), + }, + }) +} + +func TestAccPrivateDnsMxRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") + r := PrivateDnsMxRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsMxRecord_emptyName(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") + r := PrivateDnsMxRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.emptyName(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsMxRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.MxRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.MX, id.MXName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS MX Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsMxRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_mx_record" "test" { + name = "testaccmx%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + preference = 10 + exchange = "mx1.contoso.com" + } + + record { + preference = 10 + exchange = "mx2.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsMxRecordResource) emptyName(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_mx_record" "test" { + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + preference = 10 + exchange = "mx1.contoso.com" + } + + record { + preference = 10 + exchange = "mx2.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r PrivateDnsMxRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_mx_record" "import" { + name = azurerm_private_dns_mx_record.test.name + resource_group_name = azurerm_private_dns_mx_record.test.resource_group_name + zone_name = azurerm_private_dns_mx_record.test.zone_name + ttl = 300 + record { + preference = 10 + exchange = "mx1.contoso.com" + } + record { + preference = 10 + exchange = "mx2.contoso.com" + } +} +`, r.basic(data)) +} + +func (PrivateDnsMxRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_mx_record" "test" { + name = "testaccmx%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + preference = 10 + exchange = "mx1.contoso.com" + } + record { + preference = 10 + exchange = "mx2.contoso.com" + } + record { + preference = 20 + exchange = "backupmx.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsMxRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_mx_record" "test" { + name = "testaccmx%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + preference = 10 + exchange = "mx1.contoso.com" + } + record { + preference = 10 + exchange = "mx2.contoso.com" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsMxRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_mx_record" "test" { + name = "testaccmx%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + preference = 10 + exchange = "mx1.contoso.com" + } + record { + preference = 10 + exchange = "mx2.contoso.com" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_ptr_record_resource.go b/azurerm/internal/services/privatedns/private_dns_ptr_record_resource.go index 8b280f956978..fedf2085b5d3 100644 --- a/azurerm/internal/services/privatedns/private_dns_ptr_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_ptr_record_resource.go @@ -11,20 +11,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsPtrRecord() *schema.Resource { +func resourcePrivateDnsPtrRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsPtrRecordCreateUpdate, - Read: resourceArmPrivateDnsPtrRecordRead, - Update: resourceArmPrivateDnsPtrRecordCreateUpdate, - Delete: resourceArmPrivateDnsPtrRecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsPtrRecordCreateUpdate, + Read: resourcePrivateDnsPtrRecordRead, + Update: resourcePrivateDnsPtrRecordCreateUpdate, + Delete: resourcePrivateDnsPtrRecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.PtrRecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -75,24 +78,22 @@ func resourceArmPrivateDnsPtrRecord() *schema.Resource { } } -func resourceArmPrivateDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewPtrRecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.PTR, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.PTR, resourceId.PTRName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS PTR Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of existing %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { + if !utils.ResponseWasNotFound(existing.Response) { return tf.ImportAsExistsError("azurerm_private_dns_ptr_record", *existing.ID) } } @@ -101,7 +102,7 @@ func resourceArmPrivateDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta int t := d.Get("tags").(map[string]interface{}) parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.PTRName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(t), TTL: &ttl, @@ -111,79 +112,61 @@ func resourceArmPrivateDnsPtrRecordCreateUpdate(d *schema.ResourceData, meta int eTag := "" ifNoneMatch := "" // set to empty to allow updates to records after creation - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.PTR, name, parameters, eTag, ifNoneMatch); err != nil { - return fmt.Errorf("Error creating/updating Private DNS PTR Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.PTR, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS PTR Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS PTR Record %s (resource group %s) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.PTR, resourceId.PTRName, parameters, eTag, ifNoneMatch); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsPtrRecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsPtrRecordRead(d, meta) } -func resourceArmPrivateDnsPtrRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsPtrRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.PtrRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["PTR"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.PTR, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.PTR, id.PTRName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS PTR record %s: %+v", name, err) + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.PTRName) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) if props := resp.RecordSetProperties; props != nil { - if err := d.Set("records", flattenAzureRmPrivateDnsPtrRecords(resp.PtrRecords)); err != nil { - return fmt.Errorf("Error setting `records`: %+v", err) + if err := d.Set("records", flattenAzureRmPrivateDnsPtrRecords(props.PtrRecords)); err != nil { + return fmt.Errorf("setting `records`: %+v", err) } } return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsPtrRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsPtrRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.PtrRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["PTR"] - zoneName := id.Path["privateDnsZones"] - - _, err = dnsClient.Delete(ctx, resGroup, zoneName, privatedns.PTR, name, "") - if err != nil { - return fmt.Errorf("Error deleting Private DNS PTR Record %s: %+v", name, err) + if _, err = dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.PTR, id.PTRName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_ptr_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_ptr_record_resource_test.go new file mode 100644 index 000000000000..c4abe6ab2a2e --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_ptr_record_resource_test.go @@ -0,0 +1,232 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsPtrRecordResource struct { +} + +func TestAccPrivateDnsPtrRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") + r := PrivateDnsPtrRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsPtrRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") + r := PrivateDnsPtrRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsPtrRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") + r := PrivateDnsPtrRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("records.#").HasValue("3"), + ), + }, + }) +} + +func TestAccPrivateDnsPtrRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") + r := PrivateDnsPtrRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsPtrRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.PtrRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.PTR, id.PTRName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS PTR Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsPtrRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "%d.0.10.in-addr.arpa" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_ptr_record" "test" { + name = "%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["test.contoso.com", "test2.contoso.com"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsPtrRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_ptr_record" "import" { + name = azurerm_private_dns_ptr_record.test.name + resource_group_name = azurerm_private_dns_ptr_record.test.resource_group_name + zone_name = azurerm_private_dns_ptr_record.test.zone_name + ttl = 300 + records = ["test.contoso.com", "test2.contoso.com"] +} +`, r.basic(data)) +} + +func (PrivateDnsPtrRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "%d.0.10.in-addr.arpa" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_ptr_record" "test" { + name = "%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["test.contoso.com", "test2.contoso.com", "test3.contoso.com"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsPtrRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "%d.0.10.in-addr.arpa" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_ptr_record" "test" { + name = "%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["test.contoso.com", "test2.contoso.com"] + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsPtrRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "%d.0.10.in-addr.arpa" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_ptr_record" "test" { + name = "%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + records = ["test.contoso.com", "test2.contoso.com"] + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_srv_record_resource.go b/azurerm/internal/services/privatedns/private_dns_srv_record_resource.go index 3af9d653622e..e36b42a51d6c 100644 --- a/azurerm/internal/services/privatedns/private_dns_srv_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_srv_record_resource.go @@ -11,20 +11,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsSrvRecord() *schema.Resource { +func resourcePrivateDnsSrvRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsSrvRecordCreateUpdate, - Read: resourceArmPrivateDnsSrvRecordRead, - Update: resourceArmPrivateDnsSrvRecordCreateUpdate, - Delete: resourceArmPrivateDnsSrvRecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsSrvRecordCreateUpdate, + Read: resourcePrivateDnsSrvRecordRead, + Update: resourcePrivateDnsSrvRecordCreateUpdate, + Delete: resourcePrivateDnsSrvRecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.SrvRecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -100,25 +103,23 @@ func resourceArmPrivateDnsSrvRecord() *schema.Resource { } } -func resourceArmPrivateDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewSrvRecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.SRV, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.SRV, resourceId.SRVName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS SRV Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_private_dns_srv_record", *existing.ID) + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_private_dns_srv_record", resourceId.ID()) } } @@ -126,7 +127,7 @@ func resourceArmPrivateDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta int t := d.Get("tags").(map[string]interface{}) parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.SRVName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(t), TTL: &ttl, @@ -134,50 +135,37 @@ func resourceArmPrivateDnsSrvRecordCreateUpdate(d *schema.ResourceData, meta int }, } - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.SRV, name, parameters, "", ""); err != nil { - return fmt.Errorf("Error creating/updating Private DNS SRV Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.SRV, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS SRV Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS SRV Record %s (resource group %s) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.SRV, resourceId.SRVName, parameters, "", ""); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsSrvRecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsSrvRecordRead(d, meta) } -func resourceArmPrivateDnsSrvRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsSrvRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.SrvRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["SRV"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.SRV, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.SRV, id.SRVName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS SRV record %s: %+v", name, err) + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.SRVName) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) @@ -188,23 +176,18 @@ func resourceArmPrivateDnsSrvRecordRead(d *schema.ResourceData, meta interface{} return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsSrvRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsSrvRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.SrvRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["SRV"] - zoneName := id.Path["privateDnsZones"] - - _, err = dnsClient.Delete(ctx, resGroup, zoneName, privatedns.SRV, name, "") - if err != nil { - return fmt.Errorf("Error deleting Private DNS SRV Record %s: %+v", name, err) + if _, err = dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.SRV, id.SRVName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_srv_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_srv_record_resource_test.go new file mode 100644 index 000000000000..25a28afa14fc --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_srv_record_resource_test.go @@ -0,0 +1,294 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsSrvRecordResource struct { +} + +func TestAccPrivateDnsSrvRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") + r := PrivateDnsSrvRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsSrvRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") + r := PrivateDnsSrvRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsSrvRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") + r := PrivateDnsSrvRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("3"), + ), + }, + }) +} + +func TestAccPrivateDnsSrvRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") + r := PrivateDnsSrvRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsSrvRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.SrvRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.SRV, id.SRVName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS SRV Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsSrvRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_srv_record" "test" { + name = "testaccsrv%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + + record { + priority = 10 + weight = 10 + port = 8080 + target = "target2.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsSrvRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_srv_record" "import" { + name = azurerm_private_dns_srv_record.test.name + resource_group_name = azurerm_private_dns_srv_record.test.resource_group_name + zone_name = azurerm_private_dns_srv_record.test.zone_name + ttl = 300 + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + record { + priority = 10 + weight = 10 + port = 8080 + target = "target2.contoso.com" + } +} +`, r.basic(data)) +} + +func (PrivateDnsSrvRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_srv_record" "test" { + name = "test%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + record { + priority = 10 + weight = 10 + port = 8080 + target = "target2.contoso.com" + } + record { + priority = 20 + weight = 100 + port = 8080 + target = "target3.contoso.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsSrvRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_srv_record" "test" { + name = "test%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + record { + priority = 10 + weight = 10 + port = 8080 + target = "target2.contoso.com" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsSrvRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_srv_record" "test" { + name = "test%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + record { + priority = 1 + weight = 5 + port = 8080 + target = "target1.contoso.com" + } + record { + priority = 10 + weight = 10 + port = 8080 + target = "target2.contoso.com" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_txt_record_resource.go b/azurerm/internal/services/privatedns/private_dns_txt_record_resource.go index cb48020a7ffa..c4e9470f7054 100644 --- a/azurerm/internal/services/privatedns/private_dns_txt_record_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_txt_record_resource.go @@ -12,20 +12,23 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsTxtRecord() *schema.Resource { +func resourcePrivateDnsTxtRecord() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsTxtRecordCreateUpdate, - Read: resourceArmPrivateDnsTxtRecordRead, - Update: resourceArmPrivateDnsTxtRecordCreateUpdate, - Delete: resourceArmPrivateDnsTxtRecordDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsTxtRecordCreateUpdate, + Read: resourcePrivateDnsTxtRecordRead, + Update: resourcePrivateDnsTxtRecordCreateUpdate, + Delete: resourcePrivateDnsTxtRecordDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.TxtRecordID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -83,30 +86,28 @@ func resourceArmPrivateDnsTxtRecord() *schema.Resource { } } -func resourceArmPrivateDnsTxtRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsTxtRecordCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - zoneName := d.Get("zone_name").(string) - + resourceId := parse.NewTxtRecordID(subscriptionId, d.Get("resource_group_name").(string), d.Get("zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, zoneName, privatedns.TXT, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.TXT, resourceId.TXTName) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Private DNS TXT Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) + return fmt.Errorf("checking for presence of %s: %+v", resourceId, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_private_dns_txt_record", *existing.ID) + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_private_dns_txt_record", resourceId.ID()) } } parameters := privatedns.RecordSet{ - Name: &name, + Name: utils.String(resourceId.TXTName), RecordSetProperties: &privatedns.RecordSetProperties{ Metadata: tags.Expand(d.Get("tags").(map[string]interface{})), TTL: utils.Int64(int64(d.Get("ttl").(int))), @@ -114,77 +115,59 @@ func resourceArmPrivateDnsTxtRecordCreateUpdate(d *schema.ResourceData, meta int }, } - if _, err := client.CreateOrUpdate(ctx, resGroup, zoneName, privatedns.TXT, name, parameters, "", ""); err != nil { - return fmt.Errorf("Error creating/updating Private DNS TXT Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - resp, err := client.Get(ctx, resGroup, zoneName, privatedns.TXT, name) - if err != nil { - return fmt.Errorf("Error retrieving Private DNS TXT Record %q (Zone %q / Resource Group %q): %s", name, zoneName, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("Cannot read Private DNS TXT Record %s (resource group %s) ID", name, resGroup) + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, privatedns.TXT, resourceId.TXTName, parameters, "", ""); err != nil { + return fmt.Errorf("creating/updating %s: %+v", resourceId, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsTxtRecordRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsTxtRecordRead(d, meta) } -func resourceArmPrivateDnsTxtRecordRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsTxtRecordRead(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.TxtRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["TXT"] - zoneName := id.Path["privateDnsZones"] - - resp, err := dnsClient.Get(ctx, resGroup, zoneName, privatedns.TXT, name) + resp, err := dnsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.TXT, id.TXTName) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("Error reading Private DNS TXT record %s: %+v", name, err) + return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("zone_name", zoneName) + d.Set("name", id.TXTName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("zone_name", id.PrivateDnsZoneName) + d.Set("ttl", resp.TTL) d.Set("fqdn", resp.Fqdn) if err := d.Set("record", flattenAzureRmPrivateDnsTxtRecords(resp.TxtRecords)); err != nil { - return fmt.Errorf("setting `record`: %s", err) + return fmt.Errorf("setting `record`: %+v", err) } return tags.FlattenAndSet(d, resp.Metadata) } -func resourceArmPrivateDnsTxtRecordDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsTxtRecordDelete(d *schema.ResourceData, meta interface{}) error { dnsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.TxtRecordID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["TXT"] - zoneName := id.Path["privateDnsZones"] - - _, err = dnsClient.Delete(ctx, resGroup, zoneName, privatedns.TXT, name, "") - if err != nil { - return fmt.Errorf("Error deleting Private DNS TXT Record %s: %+v", name, err) + if _, err = dnsClient.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.TXT, id.PrivateDnsZoneName, ""); err != nil { + return fmt.Errorf("deleting %s: %+v", id, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_txt_record_resource_test.go b/azurerm/internal/services/privatedns/private_dns_txt_record_resource_test.go new file mode 100644 index 000000000000..f670052c5a89 --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_txt_record_resource_test.go @@ -0,0 +1,282 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsTxtRecordResource struct { +} + +func TestAccPrivateDnsTxtRecord_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") + r := PrivateDnsTxtRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("fqdn").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsTxtRecord_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") + r := PrivateDnsTxtRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsTxtRecord_updateRecords(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") + r := PrivateDnsTxtRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + data.ImportStep(), + { + Config: r.updateRecords(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("3"), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("record.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsTxtRecord_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") + r := PrivateDnsTxtRecordResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + data.ImportStep(), + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsTxtRecordResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.TxtRecordID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.RecordSetsClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, privatedns.TXT, id.TXTName) + if err != nil { + return nil, fmt.Errorf("reading Private DNS TXT Record (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsTxtRecordResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_txt_record" "test" { + name = "testacctxt%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsTxtRecordResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_txt_record" "import" { + name = azurerm_private_dns_txt_record.test.name + resource_group_name = azurerm_private_dns_txt_record.test.resource_group_name + zone_name = azurerm_private_dns_txt_record.test.zone_name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } +} +`, r.basic(data)) +} + +func (PrivateDnsTxtRecordResource) updateRecords(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_txt_record" "test" { + name = "testacctxt%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } + + record { + value = "I'm a record too'" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsTxtRecordResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_txt_record" "test" { + name = "test%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsTxtRecordResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-prvdns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "testzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_txt_record" "test" { + name = "test%d" + resource_group_name = azurerm_resource_group.test.name + zone_name = azurerm_private_dns_zone.test.name + ttl = 300 + + record { + value = "Quick brown fox" + } + + record { + value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" + } + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_zone_data_source.go b/azurerm/internal/services/privatedns/private_dns_zone_data_source.go index 145f5caa1556..5be59ffaafe2 100644 --- a/azurerm/internal/services/privatedns/private_dns_zone_data_source.go +++ b/azurerm/internal/services/privatedns/private_dns_zone_data_source.go @@ -8,16 +8,16 @@ import ( "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2020-06-01/resources" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func dataSourceArmPrivateDnsZone() *schema.Resource { +func dataSourcePrivateDnsZone() *schema.Resource { return &schema.Resource{ - Read: dataSourceArmPrivateDnsZoneRead, + Read: dataSourcePrivateDnsZoneRead, Timeouts: &schema.ResourceTimeout{ Read: schema.DefaultTimeout(5 * time.Minute), @@ -60,17 +60,16 @@ func dataSourceArmPrivateDnsZone() *schema.Resource { } } -func dataSourceArmPrivateDnsZoneRead(d *schema.ResourceData, meta interface{}) error { +func dataSourcePrivateDnsZoneRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.PrivateZonesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() name := d.Get("name").(string) resourceGroup := d.Get("resource_group_name").(string) - var ( - resp *privatedns.PrivateZone - ) + var resp *privatedns.PrivateZone if resourceGroup != "" { zone, err := client.Get(ctx, resourceGroup, name) if err != nil { @@ -96,10 +95,9 @@ func dataSourceArmPrivateDnsZoneRead(d *schema.ResourceData, meta interface{}) e resourceGroup = zone.resourceGroup } - if resp.ID == nil || *resp.ID == "" { - return fmt.Errorf("retrieving Private DNS Zone %q (Resource Group %q)", name, resourceGroup) - } - d.SetId(*resp.ID) + resourceId := parse.NewPrivateDnsZoneID(subscriptionId, resourceGroup, name) + d.SetId(resourceId.ID()) + d.Set("name", name) d.Set("resource_group_name", resourceGroup) @@ -121,7 +119,6 @@ type privateDnsZone struct { func findPrivateZone(ctx context.Context, client *privatedns.PrivateZonesClient, resourcesClient *resources.Client, name string) (*privateDnsZone, error) { filter := fmt.Sprintf("resourceType eq 'Microsoft.Network/privateDnsZones' and name eq '%s'", name) privateZones, err := resourcesClient.List(ctx, filter, "", nil) - if err != nil { return nil, fmt.Errorf("Error listing Private DNS Zones: %+v", err) } @@ -135,16 +132,14 @@ func findPrivateZone(ctx context.Context, client *privatedns.PrivateZonesClient, continue } - id, err := azure.ParseAzureResourceID(*z.ID) - + id, err := parse.PrivateDnsZoneID(*z.ID) if err != nil { continue } - zone, err := client.Get(ctx, id.ResourceGroup, name) - + zone, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { - return nil, fmt.Errorf("Error retrieving Private DNS Zone %q in resource group %q: %+v", name, id.ResourceGroup, err) + return nil, fmt.Errorf("retrieving %s: %+v", id, err) } return &privateDnsZone{ diff --git a/azurerm/internal/services/privatedns/private_dns_zone_data_source_test.go b/azurerm/internal/services/privatedns/private_dns_zone_data_source_test.go new file mode 100644 index 000000000000..376a04e1733d --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_zone_data_source_test.go @@ -0,0 +1,138 @@ +package privatedns_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type PrivateDnsZoneDatasource struct { +} + +func TestAccDataSourcePrivateDNSZone_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_private_dns_zone", "test") + r := PrivateDnsZoneDatasource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func TestAccDataSourcePrivateDNSZone_tags(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_private_dns_zone", "test") + r := PrivateDnsZoneDatasource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.tags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + check.That(data.ResourceName).Key("tags.hello").HasValue("world"), + ), + }, + }) +} + +func TestAccDataSourcePrivateDNSZone_withoutResourceGroupName(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_private_dns_zone", "test") + r := PrivateDnsZoneDatasource{} + resourceGroupName := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.onlyNamePrep(data, resourceGroupName), + }, + { + Config: r.onlyName(data, resourceGroupName), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("resource_group_name").HasValue(resourceGroupName), + ), + }, + }) +} + +func (PrivateDnsZoneDatasource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.internal" + resource_group_name = azurerm_resource_group.test.name +} + +data "azurerm_private_dns_zone" "test" { + name = azurerm_private_dns_zone.test.name + resource_group_name = azurerm_private_dns_zone.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (PrivateDnsZoneDatasource) tags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.internal" + resource_group_name = azurerm_resource_group.test.name + + tags = { + hello = "world" + } +} + +data "azurerm_private_dns_zone" "test" { + name = azurerm_private_dns_zone.test.name + resource_group_name = azurerm_private_dns_zone.test.resource_group_name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (PrivateDnsZoneDatasource) onlyNamePrep(data acceptance.TestData, resourceGroupName string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "%s" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.internal" + resource_group_name = azurerm_resource_group.test.name +} +`, resourceGroupName, data.Locations.Primary, data.RandomInteger) +} + +func (r PrivateDnsZoneDatasource) onlyName(data acceptance.TestData, resourceGroupName string) string { + return fmt.Sprintf(` +%s + +data "azurerm_private_dns_zone" "test" { + name = azurerm_private_dns_zone.test.name +} +`, r.onlyNamePrep(data, resourceGroupName)) +} diff --git a/azurerm/internal/services/privatedns/private_dns_zone_resource.go b/azurerm/internal/services/privatedns/private_dns_zone_resource.go index 862189f943bf..fb32990dfea8 100644 --- a/azurerm/internal/services/privatedns/private_dns_zone_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_zone_resource.go @@ -2,28 +2,34 @@ package privatedns import ( "fmt" + "strings" "time" "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" "github.com/hashicorp/go-azure-helpers/response" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsZone() *schema.Resource { +func resourcePrivateDnsZone() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsZoneCreateUpdate, - Read: resourceArmPrivateDnsZoneRead, - Update: resourceArmPrivateDnsZoneCreateUpdate, - Delete: resourceArmPrivateDnsZoneDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsZoneCreateUpdate, + Read: resourcePrivateDnsZoneRead, + Update: resourcePrivateDnsZoneCreateUpdate, + Delete: resourcePrivateDnsZoneDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.PrivateDnsZoneID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -61,29 +67,104 @@ func resourceArmPrivateDnsZone() *schema.Resource { "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + "soa_record": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "email": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.PrivateDnsZoneSOARecordEmail, + }, + + "expire_time": { + Type: schema.TypeInt, + Optional: true, + Default: 2419200, + ValidateFunc: validation.IntAtLeast(0), + }, + + "minimum_ttl": { + Type: schema.TypeInt, + Optional: true, + Default: 10, + ValidateFunc: validation.IntAtLeast(0), + }, + + "refresh_time": { + Type: schema.TypeInt, + Optional: true, + Default: 3600, + ValidateFunc: validation.IntAtLeast(0), + }, + + "retry_time": { + Type: schema.TypeInt, + Optional: true, + Default: 300, + ValidateFunc: validation.IntAtLeast(0), + }, + + "ttl": { + Type: schema.TypeInt, + Optional: true, + Default: 3600, + ValidateFunc: validation.IntBetween(0, 2147483647), + }, + + "tags": tags.Schema(), + + "fqdn": { + Type: schema.TypeString, + Computed: true, + }, + + // This field should be able to be updated since DNS Record Sets API allows to update it. + // So the issue is submitted on https://github.com/Azure/azure-rest-api-specs/issues/11674 + // Once the issue is fixed, the field will be updated to `Required` property. + "host_name": { + Type: schema.TypeString, + Computed: true, + }, + + // This field should be able to be updated since DNS Record Sets API allows to update it. + // So the issue is submitted on https://github.com/Azure/azure-rest-api-specs/issues/11674 + // Once the issue is fixed, the field will be updated to `Optional` property with `Default` attribute. + "serial_number": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + "tags": tags.Schema(), }, } } -func resourceArmPrivateDnsZoneCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsZoneCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.PrivateZonesClient + recordSetsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - + resourceId := parse.NewPrivateDnsZoneID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.Name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("error checking for presence of existing Private DNS Zone %q (Resource Group %q): %s", name, resGroup, err) + return fmt.Errorf("checking for presence of existing Private DNS Zone %q (Resource Group %q): %+v", resourceId.Name, resourceId.ResourceGroup, err) } } if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_private_dns_zone", *existing.ID) + return tf.ImportAsExistsError("azurerm_private_dns_zone", resourceId.ID()) } } @@ -97,53 +178,67 @@ func resourceArmPrivateDnsZoneCreateUpdate(d *schema.ResourceData, meta interfac etag := "" ifNoneMatch := "" // set to empty to allow updates to records after creation - future, err := client.CreateOrUpdate(ctx, resGroup, name, parameters, etag, ifNoneMatch) + future, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.Name, parameters, etag, ifNoneMatch) if err != nil { - return fmt.Errorf("error creating/updating Private DNS Zone %q (Resource Group %q): %s", name, resGroup, err) + return fmt.Errorf("creating/updating Private DNS Zone %q (Resource Group %q): %s", resourceId.Name, resourceId.ResourceGroup, err) } if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("error waiting for Private DNS Zone %q to become available: %+v", name, err) + return fmt.Errorf("waiting for create/update of Private DNS Zone %q (Resource Group %q): %+v", resourceId.Name, resourceId.ResourceGroup, err) } - resp, err := client.Get(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("error retrieving Private DNS Zone %q (Resource Group %q): %s", name, resGroup, err) - } + if v, ok := d.GetOk("soa_record"); ok { + soaRecordRaw := v.([]interface{})[0].(map[string]interface{}) + soaRecord := expandPrivateDNSZoneSOARecord(soaRecordRaw) + rsParameters := privatedns.RecordSet{ + RecordSetProperties: &privatedns.RecordSetProperties{ + TTL: utils.Int64(int64(soaRecordRaw["ttl"].(int))), + Metadata: tags.Expand(soaRecordRaw["tags"].(map[string]interface{})), + SoaRecord: soaRecord, + }, + } - if resp.ID == nil { - return fmt.Errorf("cannot read Private DNS Zone %q (Resource Group %q) ID", name, resGroup) - } + val := fmt.Sprintf("%s%s", resourceId.Name, strings.TrimSuffix(*soaRecord.Email, ".")) + if len(val) > 253 { + return fmt.Errorf("the value %q for `email` which is concatenated with Private DNS Zone `name` cannot exceed 253 characters excluding a trailing period", val) + } - d.SetId(*resp.ID) + if _, err := recordSetsClient.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.Name, privatedns.SOA, "@", rsParameters, etag, ifNoneMatch); err != nil { + return fmt.Errorf("creating/updating Private DNS SOA Record @ (Zone %q / Resource Group %q): %s", resourceId.Name, resourceId.ResourceGroup, err) + } + } - return resourceArmPrivateDnsZoneRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsZoneRead(d, meta) } -func resourceArmPrivateDnsZoneRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsZoneRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.PrivateZonesClient + recordSetsClient := meta.(*clients.Client).PrivateDns.RecordSetsClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.PrivateDnsZoneID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["privateDnsZones"] - - resp, err := client.Get(ctx, resGroup, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("error reading Private DNS Zone %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("retrieving Private DNS Zone %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) + recordSetResp, err := recordSetsClient.Get(ctx, id.ResourceGroup, id.Name, privatedns.SOA, "@") + if err != nil { + return fmt.Errorf("reading DNS SOA record @: %v", err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) if props := resp.PrivateZoneProperties; props != nil { d.Set("number_of_record_sets", props.NumberOfRecordSets) @@ -152,37 +247,121 @@ func resourceArmPrivateDnsZoneRead(d *schema.ResourceData, meta interface{}) err d.Set("max_number_of_virtual_network_links_with_registration", props.MaxNumberOfVirtualNetworkLinksWithRegistration) } + if err := d.Set("soa_record", flattenPrivateDNSZoneSOARecord(&recordSetResp)); err != nil { + return fmt.Errorf("setting `soa_record`: %+v", err) + } + return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmPrivateDnsZoneDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsZoneDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.PrivateZonesClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.PrivateDnsZoneID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - name := id.Path["privateDnsZones"] - etag := "" - future, err := client.Delete(ctx, resGroup, name, etag) + future, err := client.Delete(ctx, id.ResourceGroup, id.Name, etag) if err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("error deleting Private DNS Zone %s (resource group %s): %+v", name, resGroup, err) + return fmt.Errorf("error deleting Private DNS Zone %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("error deleting Private DNS Zone %s (resource group %s): %+v", name, resGroup, err) + return fmt.Errorf("error deleting Private DNS Zone %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) } return nil } + +func expandPrivateDNSZoneSOARecord(input map[string]interface{}) *privatedns.SoaRecord { + return &privatedns.SoaRecord{ + Email: utils.String(input["email"].(string)), + ExpireTime: utils.Int64(int64(input["expire_time"].(int))), + MinimumTTL: utils.Int64(int64(input["minimum_ttl"].(int))), + RefreshTime: utils.Int64(int64(input["refresh_time"].(int))), + RetryTime: utils.Int64(int64(input["retry_time"].(int))), + } +} + +func flattenPrivateDNSZoneSOARecord(input *privatedns.RecordSet) []interface{} { + if input == nil { + return make([]interface{}, 0) + } + + ttl := 0 + if input.TTL != nil { + ttl = int(*input.TTL) + } + + metaData := make(map[string]interface{}) + if input.Metadata != nil { + metaData = tags.Flatten(input.Metadata) + } + + fqdn := "" + if input.Fqdn != nil { + fqdn = *input.Fqdn + } + + email := "" + hostName := "" + expireTime := 0 + minimumTTL := 0 + refreshTime := 0 + retryTime := 0 + serialNumber := 0 + if input.SoaRecord != nil { + if input.SoaRecord.Email != nil { + email = *input.SoaRecord.Email + } + + if input.SoaRecord.Host != nil { + hostName = *input.SoaRecord.Host + } + + if input.SoaRecord.ExpireTime != nil { + expireTime = int(*input.SoaRecord.ExpireTime) + } + + if input.SoaRecord.MinimumTTL != nil { + minimumTTL = int(*input.SoaRecord.MinimumTTL) + } + + if input.SoaRecord.RefreshTime != nil { + refreshTime = int(*input.SoaRecord.RefreshTime) + } + + if input.SoaRecord.RetryTime != nil { + retryTime = int(*input.SoaRecord.RetryTime) + } + + if input.SoaRecord.SerialNumber != nil { + serialNumber = int(*input.SoaRecord.SerialNumber) + } + } + + return []interface{}{ + map[string]interface{}{ + "email": email, + "host_name": hostName, + "expire_time": expireTime, + "minimum_ttl": minimumTTL, + "refresh_time": refreshTime, + "retry_time": retryTime, + "serial_number": serialNumber, + "ttl": ttl, + "tags": metaData, + "fqdn": fqdn, + }, + } +} diff --git a/azurerm/internal/services/privatedns/private_dns_zone_resource_test.go b/azurerm/internal/services/privatedns/private_dns_zone_resource_test.go new file mode 100644 index 000000000000..ae5aad5c9cae --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_zone_resource_test.go @@ -0,0 +1,238 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +type PrivateDnsZoneResource struct { +} + +func TestAccPrivateDnsZone_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") + r := PrivateDnsZoneResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsZone_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") + r := PrivateDnsZoneResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsZone_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") + r := PrivateDnsZoneResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsZone_withSOARecord(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") + r := PrivateDnsZoneResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withBasicSOARecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withCompletedSOARecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.withBasicSOARecord(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsZoneResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.PrivateDnsZoneID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.PrivateZonesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Private DNS Zone (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsZoneResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r PrivateDnsZoneResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_zone" "import" { + name = azurerm_private_dns_zone.test.name + resource_group_name = azurerm_private_dns_zone.test.resource_group_name +} +`, r.basic(data)) +} + +func (PrivateDnsZoneResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (PrivateDnsZoneResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (PrivateDnsZoneResource) withBasicSOARecord(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-privatedns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + soa_record { + email = "testemail.com" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (PrivateDnsZoneResource) withCompletedSOARecord(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-privatedns-%d" + location = "%s" +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name + + soa_record { + email = "testemail.com" + expire_time = 2419200 + minimum_ttl = 200 + refresh_time = 2600 + retry_time = 200 + ttl = 100 + + tags = { + ENv = "Test" + } + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource.go b/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource.go index 32f2c3af248b..83a7638a4b42 100644 --- a/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource.go +++ b/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource.go @@ -10,7 +10,11 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" @@ -18,15 +22,16 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) -func resourceArmPrivateDnsZoneVirtualNetworkLink() *schema.Resource { +func resourcePrivateDnsZoneVirtualNetworkLink() *schema.Resource { return &schema.Resource{ - Create: resourceArmPrivateDnsZoneVirtualNetworkLinkCreateUpdate, - Read: resourceArmPrivateDnsZoneVirtualNetworkLinkRead, - Update: resourceArmPrivateDnsZoneVirtualNetworkLinkCreateUpdate, - Delete: resourceArmPrivateDnsZoneVirtualNetworkLinkDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, + Create: resourcePrivateDnsZoneVirtualNetworkLinkCreateUpdate, + Read: resourcePrivateDnsZoneVirtualNetworkLinkRead, + Update: resourcePrivateDnsZoneVirtualNetworkLinkCreateUpdate, + Delete: resourcePrivateDnsZoneVirtualNetworkLinkDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.VirtualNetworkLinkID(id) + return err + }), Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(30 * time.Minute), @@ -35,12 +40,14 @@ func resourceArmPrivateDnsZoneVirtualNetworkLink() *schema.Resource { Delete: schema.DefaultTimeout(30 * time.Minute), }, + // TODO: these can become case-sensitive with a state migration Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, + Type: schema.TypeString, + Required: true, + ForceNew: true, + // TODO: make this case sensitive once the API's fixed https://github.com/Azure/azure-rest-api-specs/issues/10933 + DiffSuppressFunc: suppress.CaseDifference, }, "private_dns_zone_name": { @@ -63,6 +70,7 @@ func resourceArmPrivateDnsZoneVirtualNetworkLink() *schema.Resource { Default: false, }, + // TODO: make this case sensitive once the API's fixed https://github.com/Azure/azure-rest-api-specs/issues/10933 "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), "tags": tags.Schema(), @@ -70,27 +78,26 @@ func resourceArmPrivateDnsZoneVirtualNetworkLink() *schema.Resource { } } -func resourceArmPrivateDnsZoneVirtualNetworkLinkCreateUpdate(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsZoneVirtualNetworkLinkCreateUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.VirtualNetworkLinksClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - dnsZoneName := d.Get("private_dns_zone_name").(string) vNetID := d.Get("virtual_network_id").(string) registrationEnabled := d.Get("registration_enabled").(bool) - resGroup := d.Get("resource_group_name").(string) + resourceId := parse.NewVirtualNetworkLinkID(subscriptionId, d.Get("resource_group_name").(string), d.Get("private_dns_zone_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resGroup, dnsZoneName, name) + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, resourceId.Name) if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("error checking for presence of existing Private DNS Zone Virtual network link %q (Resource Group %q): %s", name, resGroup, err) + return fmt.Errorf("checking for presence of existing Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %s", resourceId.Name, resourceId.PrivateDnsZoneName, resourceId.ResourceGroup, err) } } - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_private_dns_zone_virtual_network_link", *existing.ID) + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_private_dns_zone_virtual_network_link", resourceId.ID()) } } @@ -111,55 +118,41 @@ func resourceArmPrivateDnsZoneVirtualNetworkLinkCreateUpdate(d *schema.ResourceD etag := "" ifNoneMatch := "" // set to empty to allow updates to records after creation - future, err := client.CreateOrUpdate(ctx, resGroup, dnsZoneName, name, parameters, etag, ifNoneMatch) + future, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.PrivateDnsZoneName, resourceId.Name, parameters, etag, ifNoneMatch) if err != nil { - return fmt.Errorf("error creating/updating Private DNS Zone Virtual network link %q (Resource Group %q): %s", name, resGroup, err) + return fmt.Errorf("creating/updating Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %+v", resourceId.Name, resourceId.PrivateDnsZoneName, resourceId.ResourceGroup, err) } if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("error waiting for Private DNS Zone Virtual network link %q to become available: %+v", name, err) - } - - resp, err := client.Get(ctx, resGroup, dnsZoneName, name) - if err != nil { - return fmt.Errorf("error retrieving Private DNS Zone Virtual network link %q (Resource Group %q): %s", name, resGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("cannot read Private DNS Zone Virtual network link %q (Resource Group %q) ID", name, resGroup) + return fmt.Errorf("waiting for Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) to become available: %+v", resourceId.Name, resourceId.PrivateDnsZoneName, resourceId.ResourceGroup, err) } - d.SetId(*resp.ID) - - return resourceArmPrivateDnsZoneVirtualNetworkLinkRead(d, meta) + d.SetId(resourceId.ID()) + return resourcePrivateDnsZoneVirtualNetworkLinkRead(d, meta) } -func resourceArmPrivateDnsZoneVirtualNetworkLinkRead(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsZoneVirtualNetworkLinkRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.VirtualNetworkLinksClient ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.VirtualNetworkLinkID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - dnsZoneName := id.Path["privateDnsZones"] - name := id.Path["virtualNetworkLinks"] - - resp, err := client.Get(ctx, resGroup, dnsZoneName, name) + resp, err := client.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("error reading Private DNS Zone Virtual network link %q (Resource Group %q): %+v", name, resGroup, err) + return fmt.Errorf("reading Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %+v", id.Name, id.PrivateDnsZoneName, id.ResourceGroup, err) } - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("private_dns_zone_name", dnsZoneName) + d.Set("name", id.Name) + d.Set("private_dns_zone_name", id.PrivateDnsZoneName) + d.Set("resource_group_name", id.ResourceGroup) if props := resp.VirtualNetworkLinkProperties; props != nil { d.Set("registration_enabled", props.RegistrationEnabled) @@ -172,47 +165,43 @@ func resourceArmPrivateDnsZoneVirtualNetworkLinkRead(d *schema.ResourceData, met return tags.FlattenAndSet(d, resp.Tags) } -func resourceArmPrivateDnsZoneVirtualNetworkLinkDelete(d *schema.ResourceData, meta interface{}) error { +func resourcePrivateDnsZoneVirtualNetworkLinkDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*clients.Client).PrivateDns.VirtualNetworkLinksClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.VirtualNetworkLinkID(d.Id()) if err != nil { return err } - resGroup := id.ResourceGroup - dnsZoneName := id.Path["privateDnsZones"] - name := id.Path["virtualNetworkLinks"] - etag := "" - if future, err := client.Delete(ctx, resGroup, dnsZoneName, name, etag); err != nil { + if future, err := client.Delete(ctx, id.ResourceGroup, id.PrivateDnsZoneName, id.Name, etag); err != nil { if response.WasNotFound(future.Response()) { return nil } - return fmt.Errorf("error deleting Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %+v", name, dnsZoneName, resGroup, err) + return fmt.Errorf("deleting Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %+v", id.Name, id.PrivateDnsZoneName, id.ResourceGroup, err) } // whilst the Delete above returns a Future, the Azure API's broken such that even though it's marked as "gone" // it's still kicking around - so we have to poll until this is actually gone - log.Printf("[DEBUG] Waiting for Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) to be deleted", name, dnsZoneName, resGroup) + log.Printf("[DEBUG] Waiting for Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) to be deleted", id.Name, id.PrivateDnsZoneName, id.ResourceGroup) stateConf := &resource.StateChangeConf{ Pending: []string{"Available"}, Target: []string{"NotFound"}, Refresh: func() (interface{}, string, error) { - log.Printf("[DEBUG] Checking to see if Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) is available", name, dnsZoneName, resGroup) - resp, err := client.Get(ctx, resGroup, dnsZoneName, name) + log.Printf("[DEBUG] Checking to see if Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) is still available", id.Name, id.PrivateDnsZoneName, id.ResourceGroup) + resp, err := client.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, id.Name) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) was not found", name, dnsZoneName, resGroup) + log.Printf("[DEBUG] Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) was not found", id.Name, id.PrivateDnsZoneName, id.ResourceGroup) return "NotFound", "NotFound", nil } return "", "error", err } - log.Printf("[DEBUG] Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) still exists", name, dnsZoneName, resGroup) + log.Printf("[DEBUG] Virtual Network Link %q (Private DNS Zone %q / Resource Group %q) still exists", id.Name, id.PrivateDnsZoneName, id.ResourceGroup) return "Available", "Available", nil }, Delay: 30 * time.Second, @@ -222,7 +211,7 @@ func resourceArmPrivateDnsZoneVirtualNetworkLinkDelete(d *schema.ResourceData, m } if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("error waiting for deletion of Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %+v", name, dnsZoneName, resGroup, err) + return fmt.Errorf("waiting for deletion of Virtual Network Link %q (Private DNS Zone %q / Resource Group %q): %+v", id.Name, id.PrivateDnsZoneName, id.ResourceGroup, err) } return nil diff --git a/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource_test.go b/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource_test.go new file mode 100644 index 000000000000..23e686311ced --- /dev/null +++ b/azurerm/internal/services/privatedns/private_dns_zone_virtual_network_link_resource_test.go @@ -0,0 +1,217 @@ +package privatedns_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type PrivateDnsZoneVirtualNetworkLinkResource struct { +} + +func TestAccPrivateDnsZoneVirtualNetworkLink_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone_virtual_network_link", "test") + r := PrivateDnsZoneVirtualNetworkLinkResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccPrivateDnsZoneVirtualNetworkLink_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone_virtual_network_link", "test") + r := PrivateDnsZoneVirtualNetworkLinkResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccPrivateDnsZoneVirtualNetworkLink_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_private_dns_zone_virtual_network_link", "test") + r := PrivateDnsZoneVirtualNetworkLinkResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withTags(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("2"), + ), + }, + { + Config: r.withTagsUpdate(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("tags.%").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func (t PrivateDnsZoneVirtualNetworkLinkResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.VirtualNetworkLinkID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.PrivateDns.VirtualNetworkLinksClient.Get(ctx, id.ResourceGroup, id.PrivateDnsZoneName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Private DNS Zone Virtual Network Link (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (PrivateDnsZoneVirtualNetworkLinkResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "vnet%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] + + subnet { + name = "subnet1" + address_prefix = "10.0.1.0/24" + } +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "test" { + name = "acctestVnetZone%d.com" + private_dns_zone_name = azurerm_private_dns_zone.test.name + virtual_network_id = azurerm_virtual_network.test.id + resource_group_name = azurerm_resource_group.test.name +} + +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (r PrivateDnsZoneVirtualNetworkLinkResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_private_dns_zone_virtual_network_link" "import" { + name = azurerm_private_dns_zone_virtual_network_link.test.name + private_dns_zone_name = azurerm_private_dns_zone_virtual_network_link.test.private_dns_zone_name + virtual_network_id = azurerm_private_dns_zone_virtual_network_link.test.virtual_network_id + resource_group_name = azurerm_private_dns_zone_virtual_network_link.test.resource_group_name +} +`, r.basic(data)) +} + +func (PrivateDnsZoneVirtualNetworkLinkResource) withTags(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "vnet%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] + + subnet { + name = "subnet1" + address_prefix = "10.0.1.0/24" + } +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_private_dns_zone_virtual_network_link" "test" { + name = "acctestVnetZone%d.com" + private_dns_zone_name = azurerm_private_dns_zone.test.name + virtual_network_id = azurerm_virtual_network.test.id + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (PrivateDnsZoneVirtualNetworkLinkResource) withTagsUpdate(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "vnet%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + address_space = ["10.0.0.0/16"] + + subnet { + name = "subnet1" + address_prefix = "10.0.1.0/24" + } +} + +resource "azurerm_private_dns_zone" "test" { + name = "acctestzone%d.com" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_private_dns_zone_virtual_network_link" "test" { + name = "acctestVnetZone%d.com" + private_dns_zone_name = azurerm_private_dns_zone.test.name + virtual_network_id = azurerm_virtual_network.test.id + resource_group_name = azurerm_resource_group.test.name + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/privatedns/registration.go b/azurerm/internal/services/privatedns/registration.go index a51d8b99d167..ab9b38a45769 100644 --- a/azurerm/internal/services/privatedns/registration.go +++ b/azurerm/internal/services/privatedns/registration.go @@ -21,21 +21,21 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_private_dns_zone": dataSourceArmPrivateDnsZone(), + "azurerm_private_dns_zone": dataSourcePrivateDnsZone(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_private_dns_zone": resourceArmPrivateDnsZone(), - "azurerm_private_dns_a_record": resourceArmPrivateDnsARecord(), - "azurerm_private_dns_aaaa_record": resourceArmPrivateDnsAaaaRecord(), - "azurerm_private_dns_cname_record": resourceArmPrivateDnsCNameRecord(), - "azurerm_private_dns_mx_record": resourceArmPrivateDnsMxRecord(), - "azurerm_private_dns_ptr_record": resourceArmPrivateDnsPtrRecord(), - "azurerm_private_dns_srv_record": resourceArmPrivateDnsSrvRecord(), - "azurerm_private_dns_txt_record": resourceArmPrivateDnsTxtRecord(), - "azurerm_private_dns_zone_virtual_network_link": resourceArmPrivateDnsZoneVirtualNetworkLink(), + "azurerm_private_dns_zone": resourcePrivateDnsZone(), + "azurerm_private_dns_a_record": resourcePrivateDnsARecord(), + "azurerm_private_dns_aaaa_record": resourcePrivateDnsAaaaRecord(), + "azurerm_private_dns_cname_record": resourcePrivateDnsCNameRecord(), + "azurerm_private_dns_mx_record": resourcePrivateDnsMxRecord(), + "azurerm_private_dns_ptr_record": resourcePrivateDnsPtrRecord(), + "azurerm_private_dns_srv_record": resourcePrivateDnsSrvRecord(), + "azurerm_private_dns_txt_record": resourcePrivateDnsTxtRecord(), + "azurerm_private_dns_zone_virtual_network_link": resourcePrivateDnsZoneVirtualNetworkLink(), } } diff --git a/azurerm/internal/services/privatedns/resourceids.go b/azurerm/internal/services/privatedns/resourceids.go new file mode 100644 index 000000000000..d40038268c3e --- /dev/null +++ b/azurerm/internal/services/privatedns/resourceids.go @@ -0,0 +1,12 @@ +package privatedns + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PrivateDnsZone -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=VirtualNetworkLink -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/virtualNetworkLinks/virtualNetworkLink1 + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=ARecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/A/eh1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=AaaaRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/AAAA/eheh1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=CnameRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/CNAME/name1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=MxRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/MX/mx1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PtrRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/PTR/ptr1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=SrvRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/SRV/srv1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=TxtRecord -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/TXT/txt1 diff --git a/azurerm/internal/services/privatedns/tests/private_dns_a_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_a_record_resource_test.go deleted file mode 100644 index bab6ad1ce765..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_a_record_resource_test.go +++ /dev/null @@ -1,290 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsARecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsARecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsARecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsARecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsARecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsARecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsARecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMPrivateDnsARecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_a_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsARecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsARecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsARecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsARecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsARecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - aName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS A record: %s", aName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.A, aName) - if err != nil { - return fmt.Errorf("Bad: Get A RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS A record %s (resource group: %s) does not exist", aName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsARecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_a_record" { - continue - } - - aName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.A, aName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS A record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsARecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsARecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsARecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_a_record" "import" { - name = azurerm_private_dns_a_record.test.name - resource_group_name = azurerm_private_dns_a_record.test.resource_group_name - zone_name = azurerm_private_dns_a_record.test.zone_name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] -} -`, template) -} - -func testAccAzureRMPrivateDnsARecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5", "1.2.3.7"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsARecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsARecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_a_record" "test" { - name = "myarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["1.2.3.4", "1.2.4.5"] - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_aaaa_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_aaaa_record_resource_test.go deleted file mode 100644 index eeafc879740a..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_aaaa_record_resource_test.go +++ /dev/null @@ -1,290 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsAaaaRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsAaaaRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsAaaaRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsAaaaRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsAaaaRecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsAaaaRecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsAaaaRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsAaaaRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsAaaaRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMPrivateDnsAaaaRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_aaaa_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsAaaaRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsAaaaRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsAaaaRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsAaaaRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsAaaaRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - aaaaName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS AAAA record: %s", aaaaName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.AAAA, aaaaName) - if err != nil { - return fmt.Errorf("Bad: Get AAAA RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS AAAA record %s (resource group: %s) does not exist", aaaaName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsAaaaRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_aaaa_record" { - continue - } - - aaaaName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.AAAA, aaaaName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS AAAA record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsAaaaRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_aaaa_record" "test" { - name = "myaaaarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsAaaaRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsAaaaRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_aaaa_record" "import" { - name = azurerm_private_dns_aaaa_record.test.name - resource_group_name = azurerm_private_dns_aaaa_record.test.resource_group_name - zone_name = azurerm_private_dns_aaaa_record.test.zone_name - ttl = 300 - records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] -} -`, template) -} - -func testAccAzureRMPrivateDnsAaaaRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_aaaa_record" "test" { - name = "myaaaarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335", "fd73:5e76:3ab5:d2e9::1"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsAaaaRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_aaaa_record" "test" { - name = "myaaaarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsAaaaRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_aaaa_record" "test" { - name = "myaaaarecord%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["fd5d:70bc:930e:d008:0000:0000:0000:7334", "fd5d:70bc:930e:d008::7335"] - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_cname_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_cname_record_resource_test.go deleted file mode 100644 index 45a284e6d886..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_cname_record_resource_test.go +++ /dev/null @@ -1,332 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsCNameRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsCNameRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsCNameRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsCNameRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsCNameRecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsCNameRecord_subdomain(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsCNameRecord_subdomain(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record", "test.contoso.com"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsCNameRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsCNameRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMPrivateDnsCNameRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMPrivateDnsCNameRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_cname_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsCNameRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsCNameRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsCNameRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsCNameRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsCNameRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - aName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS CNAME record: %s", aName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.CNAME, aName) - if err != nil { - return fmt.Errorf("Bad: Get CNAME RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS CNAME record %s (resource group: %s) does not exist", aName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsCNameRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_cname_record" { - continue - } - - aName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.CNAME, aName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS CNAME record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsCNameRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_cname_record" "test" { - name = "acctestcname%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record = "contoso.com" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsCNameRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsCNameRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_cname_record" "import" { - name = azurerm_private_dns_cname_record.test.name - resource_group_name = azurerm_private_dns_cname_record.test.resource_group_name - zone_name = azurerm_private_dns_cname_record.test.zone_name - ttl = 300 - record = "contoso.com" -} -`, template) -} - -func testAccAzureRMPrivateDnsCNameRecord_subdomain(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_cname_record" "test" { - name = "acctestcname%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record = "test.contoso.com" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsCNameRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_cname_record" "test" { - name = "acctestcname%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record = "contoso.com" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsCNameRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_cname_record" "test" { - name = "acctestcname%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record = "contoso.com" - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsCNameRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_cname_record" "test" { - name = "acctestcname%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record = "contoso.com" - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_mx_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_mx_record_resource_test.go deleted file mode 100644 index 4880e385b2bb..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_mx_record_resource_test.go +++ /dev/null @@ -1,380 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsMxRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsMxRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsMxRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsMxRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsMxRecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsMxRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsMxRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsMxRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMPrivateDnsMxRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsMxRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsMxRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsMxRecord_emptyName(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_mx_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsMxRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsMxRecord_emptyName(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsMxRecordExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsMxRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - mxName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS MX record: %s", mxName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.MX, mxName) - if err != nil { - return fmt.Errorf("Bad: Get MX RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS MX record %s (resource group: %s) does not exist", mxName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsMxRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_mx_record" { - continue - } - - mxName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.MX, mxName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS MX record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsMxRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_mx_record" "test" { - name = "testaccmx%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - preference = 10 - exchange = "mx1.contoso.com" - } - - record { - preference = 10 - exchange = "mx2.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsMxRecord_emptyName(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_mx_record" "test" { - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - preference = 10 - exchange = "mx1.contoso.com" - } - - record { - preference = 10 - exchange = "mx2.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsMxRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsMxRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_mx_record" "import" { - name = azurerm_private_dns_mx_record.test.name - resource_group_name = azurerm_private_dns_mx_record.test.resource_group_name - zone_name = azurerm_private_dns_mx_record.test.zone_name - ttl = 300 - record { - preference = 10 - exchange = "mx1.contoso.com" - } - record { - preference = 10 - exchange = "mx2.contoso.com" - } -} -`, template) -} - -func testAccAzureRMPrivateDnsMxRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_mx_record" "test" { - name = "testaccmx%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - preference = 10 - exchange = "mx1.contoso.com" - } - record { - preference = 10 - exchange = "mx2.contoso.com" - } - record { - preference = 20 - exchange = "backupmx.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsMxRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_mx_record" "test" { - name = "testaccmx%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - preference = 10 - exchange = "mx1.contoso.com" - } - record { - preference = 10 - exchange = "mx2.contoso.com" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsMxRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_mx_record" "test" { - name = "testaccmx%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - preference = 10 - exchange = "mx1.contoso.com" - } - record { - preference = 10 - exchange = "mx2.contoso.com" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_ptr_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_ptr_record_resource_test.go deleted file mode 100644 index 251156aade37..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_ptr_record_resource_test.go +++ /dev/null @@ -1,289 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsPtrRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsPtrRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsPtrRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsPtrRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsPtrRecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsPtrRecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsPtrRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsPtrRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsPtrRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "records.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMPrivateDnsPtrRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_ptr_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsPtrRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsPtrRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsPtrRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsPtrRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsPtrRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - ptrName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS PTR record: %s", ptrName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.PTR, ptrName) - if err != nil { - return fmt.Errorf("Bad: Get PTR RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS PTR record %s (resource group: %s) does not exist", ptrName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsPtrRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_ptr_record" { - continue - } - - ptrName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.PTR, ptrName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS PTR record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsPtrRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "%d.0.10.in-addr.arpa" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_ptr_record" "test" { - name = "%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["test.contoso.com", "test2.contoso.com"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsPtrRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsPtrRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_ptr_record" "import" { - name = azurerm_private_dns_ptr_record.test.name - resource_group_name = azurerm_private_dns_ptr_record.test.resource_group_name - zone_name = azurerm_private_dns_ptr_record.test.zone_name - ttl = 300 - records = ["test.contoso.com", "test2.contoso.com"] -} -`, template) -} - -func testAccAzureRMPrivateDnsPtrRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "%d.0.10.in-addr.arpa" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_ptr_record" "test" { - name = "%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["test.contoso.com", "test2.contoso.com", "test3.contoso.com"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsPtrRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "%d.0.10.in-addr.arpa" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_ptr_record" "test" { - name = "%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["test.contoso.com", "test2.contoso.com"] - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsPtrRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "%d.0.10.in-addr.arpa" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_ptr_record" "test" { - name = "%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - records = ["test.contoso.com", "test2.contoso.com"] - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_srv_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_srv_record_resource_test.go deleted file mode 100644 index c83d745391f5..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_srv_record_resource_test.go +++ /dev/null @@ -1,351 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsSrvRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsSrvRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsSrvRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsSrvRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsSrvRecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsSrvRecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsSrvRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsSrvRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsSrvRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "3"), - ), - }, - }, - }) -} - -func TestAccAzureRMPrivateDnsSrvRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_srv_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsSrvRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsSrvRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsSrvRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsSrvRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsSrvRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - srvName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS SRV record: %s", srvName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.SRV, srvName) - if err != nil { - return fmt.Errorf("Bad: Get SRV RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS SRV record %s (resource group: %s) does not exist", srvName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsSrvRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_srv_record" { - continue - } - - srvName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.SRV, srvName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS SRV record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsSrvRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_srv_record" "test" { - name = "testaccsrv%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - - record { - priority = 10 - weight = 10 - port = 8080 - target = "target2.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsSrvRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsSrvRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_srv_record" "import" { - name = azurerm_private_dns_srv_record.test.name - resource_group_name = azurerm_private_dns_srv_record.test.resource_group_name - zone_name = azurerm_private_dns_srv_record.test.zone_name - ttl = 300 - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - record { - priority = 10 - weight = 10 - port = 8080 - target = "target2.contoso.com" - } -} -`, template) -} - -func testAccAzureRMPrivateDnsSrvRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_srv_record" "test" { - name = "test%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - record { - priority = 10 - weight = 10 - port = 8080 - target = "target2.contoso.com" - } - record { - priority = 20 - weight = 100 - port = 8080 - target = "target3.contoso.com" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsSrvRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_srv_record" "test" { - name = "test%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - record { - priority = 10 - weight = 10 - port = 8080 - target = "target2.contoso.com" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsSrvRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_srv_record" "test" { - name = "test%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - record { - priority = 1 - weight = 5 - port = 8080 - target = "target1.contoso.com" - } - record { - priority = 10 - weight = 10 - port = 8080 - target = "target2.contoso.com" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_txt_record_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_txt_record_resource_test.go deleted file mode 100644 index b96326ba0302..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_txt_record_resource_test.go +++ /dev/null @@ -1,339 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/Azure/azure-sdk-for-go/services/privatedns/mgmt/2018-09-01/privatedns" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsTxtRecord_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "fqdn"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsTxtRecord_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsTxtRecord_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsTxtRecord_updateRecords(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPrivateDnsTxtRecord_updateRecords(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "3"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPrivateDnsTxtRecord_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "record.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsTxtRecord_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_txt_record", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsTxtRecordDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsTxtRecord_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMPrivateDnsTxtRecord_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsTxtRecordExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsTxtRecordExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - txtName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS TXT record: %s", txtName) - } - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.TXT, txtName) - if err != nil { - return fmt.Errorf("Bad: Get TXT RecordSet: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS TXT record %s (resource group: %s) does not exist", txtName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsTxtRecordDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.RecordSetsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_txt_record" { - continue - } - - txtName := rs.Primary.Attributes["name"] - zoneName := rs.Primary.Attributes["zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName, privatedns.TXT, txtName) - - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS TXT record still exists:\n%#v", resp.RecordSetProperties) - } - - return nil -} - -func testAccAzureRMPrivateDnsTxtRecord_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_txt_record" "test" { - name = "testacctxt%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsTxtRecord_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsTxtRecord_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_txt_record" "import" { - name = azurerm_private_dns_txt_record.test.name - resource_group_name = azurerm_private_dns_txt_record.test.resource_group_name - zone_name = azurerm_private_dns_txt_record.test.zone_name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } -} -`, template) -} - -func testAccAzureRMPrivateDnsTxtRecord_updateRecords(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_txt_record" "test" { - name = "test%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } - - record { - value = "I'm a record too'" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsTxtRecord_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_txt_record" "test" { - name = "test%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsTxtRecord_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-prvdns-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "testzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_private_dns_txt_record" "test" { - name = "test%d" - resource_group_name = azurerm_resource_group.test.name - zone_name = azurerm_private_dns_zone.test.name - ttl = 300 - - record { - value = "Quick brown fox" - } - - record { - value = "A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......A long text......" - } - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_zone_data_source_test.go b/azurerm/internal/services/privatedns/tests/private_dns_zone_data_source_test.go deleted file mode 100644 index 94589ba11a7b..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_zone_data_source_test.go +++ /dev/null @@ -1,147 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMPrivateDNSZone_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_private_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourcePrivateDNSZone_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMPrivateDNSZone_tags(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_private_dns_zone", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourcePrivateDNSZone_tags(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.hello", "world"), - ), - }, - }, - }) -} - -func TestAccDataSourceAzureRMPrivateDNSZone_withoutResourceGroupName(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_private_dns_zone", "test") - resourceGroupName := fmt.Sprintf("acctestRG-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourcePrivateDNSZone_onlyNamePrep(data, resourceGroupName), - }, - { - Config: testAccDataSourcePrivateDNSZone_onlyName(data, resourceGroupName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "resource_group_name", resourceGroupName), - ), - }, - }, - }) -} - -func testAccDataSourcePrivateDNSZone_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.internal" - resource_group_name = azurerm_resource_group.test.name -} - -data "azurerm_private_dns_zone" "test" { - name = azurerm_private_dns_zone.test.name - resource_group_name = azurerm_private_dns_zone.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourcePrivateDNSZone_tags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.internal" - resource_group_name = azurerm_resource_group.test.name - - tags = { - hello = "world" - } -} - -data "azurerm_private_dns_zone" "test" { - name = azurerm_private_dns_zone.test.name - resource_group_name = azurerm_private_dns_zone.test.resource_group_name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourcePrivateDNSZone_onlyNamePrep(data acceptance.TestData, resourceGroupName string) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "%s" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.internal" - resource_group_name = azurerm_resource_group.test.name -} -`, resourceGroupName, data.Locations.Primary, data.RandomInteger) -} - -func testAccDataSourcePrivateDNSZone_onlyName(data acceptance.TestData, resourceGroupName string) string { - template := testAccDataSourcePrivateDNSZone_onlyNamePrep(data, resourceGroupName) - return fmt.Sprintf(` -%s - -data "azurerm_private_dns_zone" "test" { - name = azurerm_private_dns_zone.test.name -} -`, template) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_zone_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_zone_resource_test.go deleted file mode 100644 index 236637b05cd7..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_zone_resource_test.go +++ /dev/null @@ -1,206 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMPrivateDnsZone_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsZone_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsZone_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsZone_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsZone_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsZone_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_zone", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsZone_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsZone_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsZoneExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.PrivateZonesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - zoneName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS zone: %s", zoneName) - } - - resp, err := client.Get(ctx, resourceGroup, zoneName) - if err != nil { - return fmt.Errorf("Bad: Get Private DNS zone: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Private DNS zone %s (resource group: %s) does not exist", zoneName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsZoneDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.PrivateZonesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_zone" { - continue - } - - zoneName := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, zoneName) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS zone still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMPrivateDnsZone_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsZone_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsZone_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_zone" "import" { - name = azurerm_private_dns_zone.test.name - resource_group_name = azurerm_private_dns_zone.test.resource_group_name -} -`, template) -} - -func testAccAzureRMPrivateDnsZone_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsZone_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = azurerm_resource_group.test.name - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/tests/private_dns_zone_virtual_network_link_resource_test.go b/azurerm/internal/services/privatedns/tests/private_dns_zone_virtual_network_link_resource_test.go deleted file mode 100644 index 5527780404c1..000000000000 --- a/azurerm/internal/services/privatedns/tests/private_dns_zone_virtual_network_link_resource_test.go +++ /dev/null @@ -1,270 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMPrivateDnsZoneVirtualNetworkLink_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_zone_virtual_network_link", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsZoneVirtualNetworkLink_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMPrivateDnsZoneVirtualNetworkLink_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_zone_virtual_network_link", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsZoneVirtualNetworkLink_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMPrivateDnsZoneVirtualNetworkLink_requiresImport), - }, - }) -} - -func TestAccAzureRMPrivateDnsZoneVirtualNetworkLink_withTags(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_private_dns_zone_virtual_network_link", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMPrivateDnsZoneVirtualNetworkLink_withTags(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "2"), - ), - }, - { - Config: testAccAzureRMPrivateDnsZoneVirtualNetworkLink_withTagsUpdate(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.VirtualNetworkLinksClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - dnsZoneName := rs.Primary.Attributes["private_dns_zone_name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Private DNS zone virtual network link: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, dnsZoneName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: virtual network link %q (Private DNS zone %q / resource group: %s) does not exist", name, dnsZoneName, resourceGroup) - } - - return fmt.Errorf("Bad: Get Private DNS zone virtual network link: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMPrivateDnsZoneVirtualNetworkLinkDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).PrivateDns.VirtualNetworkLinksClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_private_dns_zone_virtual_network_link" { - continue - } - - name := rs.Primary.Attributes["name"] - dnsZoneName := rs.Primary.Attributes["private_dns_zone_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, dnsZoneName, name) - if err != nil { - if resp.StatusCode == http.StatusNotFound { - return nil - } - - return err - } - - return fmt.Errorf("Private DNS zone virtual network link still exists:\n%#v", resp) - } - - return nil -} - -func testAccAzureRMPrivateDnsZoneVirtualNetworkLink_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "vnet%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] - - subnet { - name = "subnet1" - address_prefix = "10.0.1.0/24" - } -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = "${azurerm_resource_group.test.name}" -} - -resource "azurerm_private_dns_zone_virtual_network_link" "test" { - name = "acctest%d" - private_dns_zone_name = "${azurerm_private_dns_zone.test.name}" - virtual_network_id = "${azurerm_virtual_network.test.id}" - resource_group_name = "${azurerm_resource_group.test.name}" -} - -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsZoneVirtualNetworkLink_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMPrivateDnsZoneVirtualNetworkLink_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_private_dns_zone_virtual_network_link" "import" { - name = azurerm_private_dns_zone_virtual_network_link.test.name - private_dns_zone_name = azurerm_private_dns_zone_virtual_network_link.test.private_dns_zone_name - virtual_network_id = azurerm_private_dns_zone_virtual_network_link.test.virtual_network_id - resource_group_name = azurerm_private_dns_zone_virtual_network_link.test.resource_group_name -} -`, template) -} - -func testAccAzureRMPrivateDnsZoneVirtualNetworkLink_withTags(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "vnet%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] - - subnet { - name = "subnet1" - address_prefix = "10.0.1.0/24" - } -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = "${azurerm_resource_group.test.name}" -} - -resource "azurerm_private_dns_zone_virtual_network_link" "test" { - name = "acctest%d" - private_dns_zone_name = "${azurerm_private_dns_zone.test.name}" - virtual_network_id = "${azurerm_virtual_network.test.id}" - resource_group_name = "${azurerm_resource_group.test.name}" - - tags = { - environment = "Production" - cost_center = "MSFT" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMPrivateDnsZoneVirtualNetworkLink_withTagsUpdate(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "vnet%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - address_space = ["10.0.0.0/16"] - - subnet { - name = "subnet1" - address_prefix = "10.0.1.0/24" - } -} - -resource "azurerm_private_dns_zone" "test" { - name = "acctestzone%d.com" - resource_group_name = "${azurerm_resource_group.test.name}" -} - -resource "azurerm_private_dns_zone_virtual_network_link" "test" { - name = "acctestzone%d.com" - private_dns_zone_name = "${azurerm_private_dns_zone.test.name}" - virtual_network_id = "${azurerm_virtual_network.test.id}" - resource_group_name = "${azurerm_resource_group.test.name}" - - tags = { - environment = "staging" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/privatedns/validate/a_record_id.go b/azurerm/internal/services/privatedns/validate/a_record_id.go new file mode 100644 index 000000000000..0c08d02259b0 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/a_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func ARecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.ARecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/a_record_id_test.go b/azurerm/internal/services/privatedns/validate/a_record_id_test.go new file mode 100644 index 000000000000..137de55ed200 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/a_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestARecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for AName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/A/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/A/eh1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/A/EH1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := ARecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/aaaa_record_id.go b/azurerm/internal/services/privatedns/validate/aaaa_record_id.go new file mode 100644 index 000000000000..000d12c0e45c --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/aaaa_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func AaaaRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.AaaaRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/aaaa_record_id_test.go b/azurerm/internal/services/privatedns/validate/aaaa_record_id_test.go new file mode 100644 index 000000000000..722ad2db77dd --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/aaaa_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestAaaaRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for AAAAName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/AAAA/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/AAAA/eheh1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/AAAA/EHEH1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := AaaaRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/cname_record_id.go b/azurerm/internal/services/privatedns/validate/cname_record_id.go new file mode 100644 index 000000000000..2d175107f849 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/cname_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func CnameRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CnameRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/cname_record_id_test.go b/azurerm/internal/services/privatedns/validate/cname_record_id_test.go new file mode 100644 index 000000000000..0a36d0a361f7 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/cname_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCnameRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for CNAMEName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/CNAME/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/CNAME/name1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/CNAME/NAME1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CnameRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/mx_record_id.go b/azurerm/internal/services/privatedns/validate/mx_record_id.go new file mode 100644 index 000000000000..160be61c8271 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/mx_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func MxRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.MxRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/mx_record_id_test.go b/azurerm/internal/services/privatedns/validate/mx_record_id_test.go new file mode 100644 index 000000000000..3856df0dd637 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/mx_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestMxRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for MXName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/MX/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/MX/mx1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/MX/MX1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := MxRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/private_dns_zone_id.go b/azurerm/internal/services/privatedns/validate/private_dns_zone_id.go new file mode 100644 index 000000000000..a1ad170e499c --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/private_dns_zone_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func PrivateDnsZoneID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PrivateDnsZoneID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/private_dns_zone_id_test.go b/azurerm/internal/services/privatedns/validate/private_dns_zone_id_test.go new file mode 100644 index 000000000000..273feaa285b2 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/private_dns_zone_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPrivateDnsZoneID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PrivateDnsZoneID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/private_dns_zone_soa_record_email.go b/azurerm/internal/services/privatedns/validate/private_dns_zone_soa_record_email.go new file mode 100644 index 000000000000..8301b6a4e02f --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/private_dns_zone_soa_record_email.go @@ -0,0 +1,41 @@ +package validate + +import ( + "fmt" + "regexp" + "strings" +) + +func PrivateDnsZoneSOARecordEmail(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if len(value) == 0 { + errors = append(errors, fmt.Errorf("%q cannot be an empty string: %q", k, v)) + return warnings, errors + } + + vSegments := strings.Split(value, ".") + if len(vSegments) < 2 || len(vSegments) > 34 { + errors = append(errors, fmt.Errorf("%q must be between 2 and 34 segments", k)) + return warnings, errors + } + + for _, segment := range vSegments { + if segment == "" { + errors = append(errors, fmt.Errorf("%q cannot contain consecutive period", k)) + return warnings, errors + } + + if len(segment) > 63 { + errors = append(errors, fmt.Errorf("the each segment of the `email` must contain between 1 and 63 characters")) + return warnings, errors + } + } + + if !regexp.MustCompile(`^[a-zA-Z\d._-]+$`).MatchString(value) { + errors = append(errors, fmt.Errorf("%q only contains letters, numbers, underscores, dashes and periods", k)) + return warnings, errors + } + + return warnings, errors +} diff --git a/azurerm/internal/services/privatedns/validate/private_dns_zone_soa_record_email_test.go b/azurerm/internal/services/privatedns/validate/private_dns_zone_soa_record_email_test.go new file mode 100644 index 000000000000..7f68a88aa584 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/private_dns_zone_soa_record_email_test.go @@ -0,0 +1,72 @@ +package validate + +import ( + "strings" + "testing" +) + +func TestPrivateDNSZoneSOARecordEmail(t *testing.T) { + cases := []struct { + Value string + Errors int + }{ + { + Value: "", + Errors: 1, + }, + { + Value: "a..com", + Errors: 1, + }, + { + Value: ".a.com", + Errors: 1, + }, + { + Value: "a.com.", + Errors: 1, + }, + { + Value: "a", + Errors: 1, + }, + { + Value: "a@.com.", + Errors: 1, + }, + { + Value: "a.com", + Errors: 0, + }, + { + Value: strings.Repeat("a.", 33) + "com", + Errors: 0, + }, + { + Value: strings.Repeat("a.", 34) + "com", + Errors: 1, + }, + { + Value: "a-b.com", + Errors: 0, + }, + { + Value: strings.Repeat("s", 63) + ".com", + Errors: 0, + }, + { + Value: strings.Repeat("s", 64) + ".com", + Errors: 1, + }, + } + + for _, tc := range cases { + t.Run(tc.Value, func(t *testing.T) { + _, errors := PrivateDnsZoneSOARecordEmail(tc.Value, "email") + + if len(errors) != tc.Errors { + t.Fatalf("Expected DNSZoneSOARecordEmail to return %d error(s) not %d", tc.Errors, len(errors)) + } + }) + } +} diff --git a/azurerm/internal/services/privatedns/validate/ptr_record_id.go b/azurerm/internal/services/privatedns/validate/ptr_record_id.go new file mode 100644 index 000000000000..95c9cecf3040 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/ptr_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func PtrRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.PtrRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/ptr_record_id_test.go b/azurerm/internal/services/privatedns/validate/ptr_record_id_test.go new file mode 100644 index 000000000000..8170964a3ba4 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/ptr_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestPtrRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for PTRName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/PTR/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/PTR/ptr1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/PTR/PTR1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := PtrRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/srv_record_id.go b/azurerm/internal/services/privatedns/validate/srv_record_id.go new file mode 100644 index 000000000000..7552aa88c716 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/srv_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func SrvRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.SrvRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/srv_record_id_test.go b/azurerm/internal/services/privatedns/validate/srv_record_id_test.go new file mode 100644 index 000000000000..cc109ab8cf5b --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/srv_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestSrvRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for SRVName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/SRV/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/SRV/srv1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/SRV/SRV1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := SrvRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/txt_record_id.go b/azurerm/internal/services/privatedns/validate/txt_record_id.go new file mode 100644 index 000000000000..3cb509f0d70e --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/txt_record_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func TxtRecordID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.TxtRecordID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/txt_record_id_test.go b/azurerm/internal/services/privatedns/validate/txt_record_id_test.go new file mode 100644 index 000000000000..390bbcc051f3 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/txt_record_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestTxtRecordID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for TXTName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/TXT/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/TXT/txt1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/TXT/TXT1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := TxtRecordID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/privatedns/validate/virtual_network_link_id.go b/azurerm/internal/services/privatedns/validate/virtual_network_link_id.go new file mode 100644 index 000000000000..2b859a54214f --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/virtual_network_link_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/privatedns/parse" +) + +func VirtualNetworkLinkID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.VirtualNetworkLinkID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/privatedns/validate/virtual_network_link_id_test.go b/azurerm/internal/services/privatedns/validate/virtual_network_link_id_test.go new file mode 100644 index 000000000000..ad87350b81b8 --- /dev/null +++ b/azurerm/internal/services/privatedns/validate/virtual_network_link_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestVirtualNetworkLinkID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", + Valid: false, + }, + + { + // missing value for PrivateDnsZoneName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/virtualNetworkLinks/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/privateDnsZones/privateDnsZone1/virtualNetworkLinks/virtualNetworkLink1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/PRIVATEDNSZONES/PRIVATEDNSZONE1/VIRTUALNETWORKLINKS/VIRTUALNETWORKLINK1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := VirtualNetworkLinkID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/recoveryservices/backup_container_storage_account_resource.go b/azurerm/internal/services/recoveryservices/backup_container_storage_account_resource.go new file mode 100644 index 000000000000..598f77945bd9 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_container_storage_account_resource.go @@ -0,0 +1,247 @@ +package recoveryservices + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceBackupProtectionContainerStorageAccount() *schema.Resource { + return &schema.Resource{ + Create: resourceBackupProtectionContainerStorageAccountCreate, + Read: resourceBackupProtectionContainerStorageAccountRead, + Update: nil, + Delete: resourceBackupProtectionContainerStorageAccountDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "storage_account_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + }, + } +} + +func resourceBackupProtectionContainerStorageAccountCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.BackupProtectionContainersClient + opStatusClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + storageAccountID := d.Get("storage_account_id").(string) + + parsedStorageAccountID, err := azure.ParseAzureResourceID(storageAccountID) + if err != nil { + return fmt.Errorf("[ERROR] Unable to parse storage_account_id '%s': %+v", storageAccountID, err) + } + accountName, hasName := parsedStorageAccountID.Path["storageAccounts"] + if !hasName { + return fmt.Errorf("[ERROR] parsed storage_account_id '%s' doesn't contain 'storageAccounts'", storageAccountID) + } + + containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageAccountID.ResourceGroup, accountName) + + if d.IsNewResource() { + existing, err := client.Get(ctx, vaultName, resGroup, "Azure", containerName) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing recovery services protection container %s (Vault %s): %+v", containerName, vaultName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_backup_protection_container_storage", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + parameters := backup.ProtectionContainerResource{ + Properties: &backup.AzureStorageContainer{ + SourceResourceID: &storageAccountID, + FriendlyName: &accountName, + BackupManagementType: backup.ManagementTypeAzureStorage, + ContainerType: backup.ContainerTypeStorageContainer1, + }, + } + + resp, err := client.Register(ctx, vaultName, resGroup, "Azure", containerName, parameters) + if err != nil { + return fmt.Errorf("Error registering backup protection container %s (Vault %s): %+v", containerName, vaultName, err) + } + + locationURL, err := resp.Response.Location() // Operation ID found in the Location header + if locationURL == nil || err != nil { + return fmt.Errorf("Unable to determine operation URL for protection container registration status for %s. (Vault %s): Location header missing or empty", containerName, vaultName) + } + + opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) + + parsedLocation, err := azure.ParseAzureResourceID(opResourceID) + if err != nil { + return err + } + + operationID := parsedLocation.Path["operationResults"] + if _, err = resourceBackupProtectionContainerStorageAccountWaitForOperation(ctx, opStatusClient, vaultName, resGroup, operationID, d); err != nil { + return err + } + + resp, err = client.Get(ctx, vaultName, resGroup, "Azure", containerName) + if err != nil { + return fmt.Errorf("Error retrieving site recovery protection container %s (Vault %s): %+v", containerName, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceBackupProtectionContainerStorageAccountRead(d, meta) +} + +func resourceBackupProtectionContainerStorageAccountRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["backupFabrics"] + containerName := id.Path["protectionContainers"] + + client := meta.(*clients.Client).RecoveryServices.BackupProtectionContainersClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, vaultName, resGroup, fabricName, containerName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on backup protection container %s (Vault %s): %+v", containerName, vaultName, err) + } + + d.Set("resource_group_name", resGroup) + d.Set("recovery_vault_name", vaultName) + + if properties, ok := resp.Properties.AsAzureStorageContainer(); ok && properties != nil { + d.Set("storage_account_id", properties.SourceResourceID) + } + + return nil +} + +func resourceBackupProtectionContainerStorageAccountDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["backupFabrics"] + containerName := id.Path["protectionContainers"] + + client := meta.(*clients.Client).RecoveryServices.BackupProtectionContainersClient + opClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Unregister(ctx, vaultName, resGroup, fabricName, containerName) + if err != nil { + return fmt.Errorf("Error deregistering backup protection container %s (Vault %s): %+v", containerName, vaultName, err) + } + + locationURL, err := resp.Response.Location() + if err != nil || locationURL == nil { + return fmt.Errorf("Error unregistering backup protection container %s (Vault %s): Location header missing or empty", containerName, vaultName) + } + + opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) + + parsedLocation, err := azure.ParseAzureResourceID(opResourceID) + if err != nil { + return err + } + operationID := parsedLocation.Path["backupOperationResults"] + + if _, err = resourceBackupProtectionContainerStorageAccountWaitForOperation(ctx, opClient, vaultName, resGroup, operationID, d); err != nil { + return err + } + + return nil +} + +// nolint unused - linter mistakenly things this function isn't used? +func resourceBackupProtectionContainerStorageAccountWaitForOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string, d *schema.ResourceData) (backup.OperationStatus, error) { + state := &resource.StateChangeConf{ + MinTimeout: 10 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"InProgress"}, + Target: []string{"Succeeded"}, + Refresh: resourceBackupProtectionContainerStorageAccountCheckOperation(ctx, client, vaultName, resourceGroup, operationID), + ContinuousTargetOccurence: 5, // Without this buffer, file share backups and storage account deletions may fail if performed immediately after creating/destroying the container + } + + if d.IsNewResource() { + state.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + state.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + log.Printf("[DEBUG] Waiting for backup container operation %q (Vault %q) to complete", operationID, vaultName) + resp, err := state.WaitForState() + if err != nil { + return resp.(backup.OperationStatus), err + } + return resp.(backup.OperationStatus), nil +} + +func resourceBackupProtectionContainerStorageAccountCheckOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + resp, err := client.Get(ctx, vaultName, resourceGroup, operationID) + if err != nil { + return resp, "Error", fmt.Errorf("Error making Read request on Recovery Service Protection Container operation %q (Vault %q in Resource Group %q): %+v", operationID, vaultName, resourceGroup, err) + } + + if opErr := resp.Error; opErr != nil { + errMsg := "No upstream error message" + if opErr.Message != nil { + errMsg = *opErr.Message + } + err = fmt.Errorf("Recovery Service Protection Container operation status failed with status %q (Vault %q Resource Group %q Operation ID %q): %+v", resp.Status, vaultName, resourceGroup, operationID, errMsg) + } + + return resp, string(resp.Status), err + } +} diff --git a/azurerm/internal/services/recoveryservices/backup_container_storage_account_resource_test.go b/azurerm/internal/services/recoveryservices/backup_container_storage_account_resource_test.go new file mode 100644 index 000000000000..117e3af4d29b --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_container_storage_account_resource_test.go @@ -0,0 +1,89 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type BackupProtectionContainerStorageAccountResource struct { +} + +func TestAccBackupProtectionContainerStorageAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_container_storage_account", "test") + r := BackupProtectionContainerStorageAccountResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t BackupProtectionContainerStorageAccountResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["backupFabrics"] + containerName := id.Path["protectionContainers"] + + resp, err := clients.RecoveryServices.BackupProtectionContainersClient.Get(ctx, vaultName, resGroup, fabricName, containerName) + if err != nil { + return nil, fmt.Errorf("reading site recovery protection container (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (BackupProtectionContainerStorageAccountResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-backup-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "testvlt" { + name = "acctest-vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_backup_container_storage_account" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.testvlt.name + storage_account_id = azurerm_storage_account.test.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) +} diff --git a/azurerm/internal/services/recoveryservices/backup_policy_file_share_resource.go b/azurerm/internal/services/recoveryservices/backup_policy_file_share_resource.go new file mode 100644 index 000000000000..a7ff6b196b9f --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_policy_file_share_resource.go @@ -0,0 +1,370 @@ +package recoveryservices + +import ( + "context" + "fmt" + "log" + "regexp" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceBackupProtectionPolicyFileShare() *schema.Resource { + return &schema.Resource{ + Create: resourceBackupProtectionPolicyFileShareCreateUpdate, + Read: resourceBackupProtectionPolicyFileShareRead, + Update: resourceBackupProtectionPolicyFileShareCreateUpdate, + Delete: resourceBackupProtectionPolicyFileShareDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[a-zA-Z][-_!a-zA-Z0-9]{2,149}$"), + "Backup Policy name must be 3 - 150 characters long, start with a letter, contain only letters and numbers.", + ), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + + "timezone": { + Type: schema.TypeString, + Optional: true, + Default: "UTC", + }, + + "backup": { + Type: schema.TypeList, + MaxItems: 1, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + + "frequency": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(backup.ScheduleRunTypeDaily), + }, false), + }, + + "time": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^([01][0-9]|[2][0-3]):([03][0])$"), // time must be on the hour or half past + "Time of day must match the format HH:mm where HH is 00-23 and mm is 00 or 30", + ), + }, + }, + }, + }, + + "retention_daily": { + Type: schema.TypeList, + MaxItems: 1, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 180), + }, + }, + }, + }, + }, + } +} + +func resourceBackupProtectionPolicyFileShareCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + policyName := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + + log.Printf("[DEBUG] Creating/updating Recovery Service Protection Policy %s (resource group %q)", policyName, resourceGroup) + + // getting this ready now because its shared between *everything*, time is... complicated for this resource + timeOfDay := d.Get("backup.0.time").(string) + dateOfDay, err := time.Parse(time.RFC3339, fmt.Sprintf("2018-07-30T%s:00Z", timeOfDay)) + if err != nil { + return fmt.Errorf("Error generating time from %q for policy %q (Resource Group %q): %+v", timeOfDay, policyName, resourceGroup, err) + } + times := append(make([]date.Time, 0), date.Time{Time: dateOfDay}) + + if d.IsNewResource() { + existing, err2 := client.Get(ctx, vaultName, resourceGroup, policyName) + if err2 != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err2) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_backup_policy_file_share", *existing.ID) + } + } + + policy := backup.ProtectionPolicyResource{ + Properties: &backup.AzureFileShareProtectionPolicy{ + TimeZone: utils.String(d.Get("timezone").(string)), + BackupManagementType: backup.BackupManagementTypeAzureStorage, + WorkLoadType: backup.WorkloadTypeAzureFileShare, + SchedulePolicy: expandBackupProtectionPolicyFileShareSchedule(d, times), + RetentionPolicy: &backup.LongTermRetentionPolicy{ // SimpleRetentionPolicy only has duration property ¯\_(ツ)_/¯ + RetentionPolicyType: backup.RetentionPolicyTypeLongTermRetentionPolicy, + DailySchedule: expandBackupProtectionPolicyFileShareRetentionDaily(d, times), + }, + }, + } + if _, err = client.CreateOrUpdate(ctx, vaultName, resourceGroup, policyName, policy); err != nil { + return fmt.Errorf("Error creating/updating Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + resp, err := resourceBackupProtectionPolicyFileShareWaitForUpdate(ctx, client, vaultName, resourceGroup, policyName, d) + if err != nil { + return err + } + + id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) + d.SetId(id) + + return resourceBackupProtectionPolicyFileShareRead(d, meta) +} + +func resourceBackupProtectionPolicyFileShareRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + policyName := id.Path["backupPolicies"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + log.Printf("[DEBUG] Reading Recovery Service Protection Policy %q (resource group %q)", policyName, resourceGroup) + + resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + d.Set("name", policyName) + d.Set("resource_group_name", resourceGroup) + d.Set("recovery_vault_name", vaultName) + + if properties, ok := resp.Properties.AsAzureFileShareProtectionPolicy(); ok && properties != nil { + d.Set("timezone", properties.TimeZone) + + if schedule, ok := properties.SchedulePolicy.AsSimpleSchedulePolicy(); ok && schedule != nil { + if err := d.Set("backup", flattenBackupProtectionPolicyFileShareSchedule(schedule)); err != nil { + return fmt.Errorf("Error setting `backup`: %+v", err) + } + } + + if retention, ok := properties.RetentionPolicy.AsLongTermRetentionPolicy(); ok && retention != nil { + if s := retention.DailySchedule; s != nil { + if err := d.Set("retention_daily", flattenBackupProtectionPolicyFileShareRetentionDaily(s)); err != nil { + return fmt.Errorf("Error setting `retention_daily`: %+v", err) + } + } else { + d.Set("retention_daily", nil) + } + } + } + + return nil +} + +func resourceBackupProtectionPolicyFileShareDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + policyName := id.Path["backupPolicies"] + resourceGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + + log.Printf("[DEBUG] Deleting Recovery Service Protection Policy %q (resource group %q)", policyName, resourceGroup) + + resp, err := client.Delete(ctx, vaultName, resourceGroup, policyName) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("Error issuing delete request for Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + } + + if _, err := resourceBackupProtectionPolicyFileShareWaitForDeletion(ctx, client, vaultName, resourceGroup, policyName, d); err != nil { + return err + } + + return nil +} + +func expandBackupProtectionPolicyFileShareSchedule(d *schema.ResourceData, times []date.Time) *backup.SimpleSchedulePolicy { + if bb, ok := d.Get("backup").([]interface{}); ok && len(bb) > 0 { + block := bb[0].(map[string]interface{}) + + schedule := backup.SimpleSchedulePolicy{ // LongTermSchedulePolicy has no properties + SchedulePolicyType: backup.SchedulePolicyTypeSimpleSchedulePolicy, + ScheduleRunTimes: ×, + } + + if v, ok := block["frequency"].(string); ok { + schedule.ScheduleRunFrequency = backup.ScheduleRunType(v) + } + + return &schedule + } + + return nil +} + +func expandBackupProtectionPolicyFileShareRetentionDaily(d *schema.ResourceData, times []date.Time) *backup.DailyRetentionSchedule { + if rb, ok := d.Get("retention_daily").([]interface{}); ok && len(rb) > 0 { + block := rb[0].(map[string]interface{}) + + return &backup.DailyRetentionSchedule{ + RetentionTimes: ×, + RetentionDuration: &backup.RetentionDuration{ + Count: utils.Int32(int32(block["count"].(int))), + DurationType: backup.RetentionDurationTypeDays, + }, + } + } + + return nil +} + +func flattenBackupProtectionPolicyFileShareSchedule(schedule *backup.SimpleSchedulePolicy) []interface{} { + block := map[string]interface{}{} + + block["frequency"] = string(schedule.ScheduleRunFrequency) + + if times := schedule.ScheduleRunTimes; times != nil && len(*times) > 0 { + block["time"] = (*times)[0].Format("15:04") + } + + return []interface{}{block} +} + +func flattenBackupProtectionPolicyFileShareRetentionDaily(daily *backup.DailyRetentionSchedule) []interface{} { + block := map[string]interface{}{} + + if duration := daily.RetentionDuration; duration != nil { + if v := duration.Count; v != nil { + block["count"] = *v + } + } + + return []interface{}{block} +} + +func resourceBackupProtectionPolicyFileShareWaitForUpdate(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { + state := &resource.StateChangeConf{ + MinTimeout: 30 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"NotFound"}, + Target: []string{"Found"}, + Refresh: resourceBackupProtectionPolicyFileShareRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), + } + + if d.IsNewResource() { + state.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + state.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + resp, err := state.WaitForState() + if err != nil { + return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Recovery Service Protection Policy %q to update (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + return resp.(backup.ProtectionPolicyResource), nil +} + +func resourceBackupProtectionPolicyFileShareWaitForDeletion(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { + state := &resource.StateChangeConf{ + MinTimeout: 30 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"Found"}, + Target: []string{"NotFound"}, + Refresh: resourceBackupProtectionPolicyFileShareRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), + Timeout: d.Timeout(schema.TimeoutDelete), + } + + resp, err := state.WaitForState() + if err != nil { + return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Recovery Service Protection Policy %q to be missing (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + return resp.(backup.ProtectionPolicyResource), nil +} + +func resourceBackupProtectionPolicyFileShareRefreshFunc(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return resp, "NotFound", nil + } + + return resp, "Error", fmt.Errorf("Error making Read request on Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + return resp, "Found", nil + } +} diff --git a/azurerm/internal/services/recoveryservices/backup_policy_file_share_resource_test.go b/azurerm/internal/services/recoveryservices/backup_policy_file_share_resource_test.go new file mode 100644 index 000000000000..d8b49b5bf899 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_policy_file_share_resource_test.go @@ -0,0 +1,187 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type BackupProtectionPolicyFileShareResource struct { +} + +func TestAccBackupProtectionPolicyFileShare_basicDaily(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_file_share", "test") + r := BackupProtectionPolicyFileShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: checkAccBackupProtectionPolicyFileShare_basicDaily(data.ResourceName, data.RandomInteger), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyFileShare_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_file_share", "test") + r := BackupProtectionPolicyFileShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: checkAccBackupProtectionPolicyFileShare_basicDaily(data.ResourceName, data.RandomInteger), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccBackupProtectionPolicyFileShare_updateDaily(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_file_share", "test") + r := BackupProtectionPolicyFileShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: checkAccBackupProtectionPolicyFileShare_basicDaily(data.ResourceName, data.RandomInteger), + }, + data.ImportStep(), + { + Config: r.updateDaily(data), + Check: checkAccBackupProtectionPolicyFileShare_updateDaily(data.ResourceName, data.RandomInteger), + }, + data.ImportStep(), + }) +} + +func (t BackupProtectionPolicyFileShareResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + policyName := id.Path["backupPolicies"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + resp, err := clients.RecoveryServices.ProtectionPoliciesClient.Get(ctx, vaultName, resourceGroup, policyName) + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Protection Policy (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (BackupProtectionPolicyFileShareResource) base(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-backup-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-RSV-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r BackupProtectionPolicyFileShareResource) basicDaily(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_file_share" "test" { + name = "acctest-PFS-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, r.base(data), data.RandomInteger) +} + +func (r BackupProtectionPolicyFileShareResource) updateDaily(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_file_share" "test" { + name = "acctest-PFS-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:30" + } + + retention_daily { + count = 180 + } +} +`, r.base(data), data.RandomInteger) +} + +func (BackupProtectionPolicyFileShareResource) requiresImport(data acceptance.TestData) string { + template := BackupProtectionPolicyFileShareResource{}.basicDaily(data) + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_file_share" "import" { + name = azurerm_backup_policy_file_share.test.name + resource_group_name = azurerm_backup_policy_file_share.test.resource_group_name + recovery_vault_name = azurerm_backup_policy_file_share.test.recovery_vault_name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, template) +} + +func checkAccBackupProtectionPolicyFileShare_basicDaily(resourceName string, ri int) resource.TestCheckFunc { + return resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", fmt.Sprintf("acctest-PFS-%d", ri)), + resource.TestCheckResourceAttr(resourceName, "resource_group_name", fmt.Sprintf("acctestRG-backup-%d", ri)), + resource.TestCheckResourceAttr(resourceName, "recovery_vault_name", fmt.Sprintf("acctest-RSV-%d", ri)), + resource.TestCheckResourceAttr(resourceName, "backup.0.frequency", "Daily"), + resource.TestCheckResourceAttr(resourceName, "backup.0.time", "23:00"), + resource.TestCheckResourceAttr(resourceName, "retention_daily.0.count", "10"), + ) +} + +func checkAccBackupProtectionPolicyFileShare_updateDaily(resourceName string, ri int) resource.TestCheckFunc { + return resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", fmt.Sprintf("acctest-PFS-%d", ri)), + resource.TestCheckResourceAttr(resourceName, "resource_group_name", fmt.Sprintf("acctestRG-backup-%d", ri)), + resource.TestCheckResourceAttr(resourceName, "recovery_vault_name", fmt.Sprintf("acctest-RSV-%d", ri)), + resource.TestCheckResourceAttr(resourceName, "backup.0.frequency", "Daily"), + resource.TestCheckResourceAttr(resourceName, "backup.0.time", "23:30"), + resource.TestCheckResourceAttr(resourceName, "retention_daily.0.count", "180"), + ) +} diff --git a/azurerm/internal/services/recoveryservices/backup_policy_vm_data_source.go b/azurerm/internal/services/recoveryservices/backup_policy_vm_data_source.go new file mode 100644 index 000000000000..623c5922984d --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_policy_vm_data_source.go @@ -0,0 +1,68 @@ +package recoveryservices + +import ( + "fmt" + "log" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceBackupPolicyVm() *schema.Resource { + return &schema.Resource{ + Read: dataSourceBackupPolicyVmRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceBackupPolicyVmRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + + log.Printf("[DEBUG] Reading Recovery Service Policy %q (resource group %q)", name, resourceGroup) + + protectionPolicy, err := client.Get(ctx, vaultName, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(protectionPolicy.Response) { + return fmt.Errorf("Error: Backup Policy %q (Resource Group %q) was not found", name, resourceGroup) + } + + return fmt.Errorf("Error making Read request on Backup Policy %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + id := strings.Replace(*protectionPolicy.ID, "Subscriptions", "subscriptions", 1) + d.SetId(id) + + return tags.FlattenAndSet(d, protectionPolicy.Tags) +} diff --git a/azurerm/internal/services/recoveryservices/backup_policy_vm_data_source_test.go b/azurerm/internal/services/recoveryservices/backup_policy_vm_data_source_test.go new file mode 100644 index 000000000000..8592e659a53d --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_policy_vm_data_source_test.go @@ -0,0 +1,42 @@ +package recoveryservices_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type BackupProtectionPolicyVMDataSource struct { +} + +func TestAccDataSourceBackupPolicyVm_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("recovery_vault_name").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + ), + }, + }) +} + +func (BackupProtectionPolicyVMDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_backup_policy_vm" "test" { + name = azurerm_backup_policy_vm.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, BackupProtectionPolicyVMResource{}.basicDaily(data)) +} diff --git a/azurerm/internal/services/recoveryservices/backup_policy_vm_resource.go b/azurerm/internal/services/recoveryservices/backup_policy_vm_resource.go new file mode 100644 index 000000000000..489fda5dda72 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_policy_vm_resource.go @@ -0,0 +1,781 @@ +package recoveryservices + +import ( + "context" + "fmt" + "log" + "regexp" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceBackupProtectionPolicyVM() *schema.Resource { + return &schema.Resource{ + Create: resourceBackupProtectionPolicyVMCreateUpdate, + Read: resourceBackupProtectionPolicyVMRead, + Update: resourceBackupProtectionPolicyVMCreateUpdate, + Delete: resourceBackupProtectionPolicyVMDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[a-zA-Z][-_!a-zA-Z0-9]{2,149}$"), + "Backup Policy name must be 3 - 150 characters long, start with a letter, contain only letters and numbers.", + ), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "instant_restore_retention_days": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + ValidateFunc: validation.IntBetween(1, 5), + }, + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + + "timezone": { + Type: schema.TypeString, + Optional: true, + Default: "UTC", + }, + + "backup": { + Type: schema.TypeList, + MaxItems: 1, + Required: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + + "frequency": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(backup.ScheduleRunTypeDaily), + string(backup.ScheduleRunTypeWeekly), + }, true), + }, + + "time": { // applies to all backup schedules & retention times (they all must be the same) + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^([01][0-9]|[2][0-3]):([03][0])$"), // time must be on the hour or half past + "Time of day must match the format HH:mm where HH is 00-23 and mm is 00 or 30", + ), + }, + + "weekdays": { // only for weekly + Type: schema.TypeSet, + Optional: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.IsDayOfTheWeek(true), + }, + }, + }, + }, + }, + + "retention_daily": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 9999), // Azure no longer supports less than 7 daily backups. This should be updated in 3.0 provider + + }, + }, + }, + }, + + "retention_weekly": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 9999), + }, + + "weekdays": { + Type: schema.TypeSet, + Required: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.IsDayOfTheWeek(true), + }, + }, + }, + }, + }, + + "retention_monthly": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 9999), + }, + + "weeks": { + Type: schema.TypeSet, + Required: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(backup.WeekOfMonthFirst), + string(backup.WeekOfMonthSecond), + string(backup.WeekOfMonthThird), + string(backup.WeekOfMonthFourth), + string(backup.WeekOfMonthLast), + }, true), + }, + }, + + "weekdays": { + Type: schema.TypeSet, + Required: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.IsDayOfTheWeek(true), + }, + }, + }, + }, + }, + + "retention_yearly": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 9999), + }, + + "months": { + Type: schema.TypeSet, + Required: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.IsMonth(true), + }, + }, + + "weeks": { + Type: schema.TypeSet, + Required: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(backup.WeekOfMonthFirst), + string(backup.WeekOfMonthSecond), + string(backup.WeekOfMonthThird), + string(backup.WeekOfMonthFourth), + string(backup.WeekOfMonthLast), + }, true), + }, + }, + + "weekdays": { + Type: schema.TypeSet, + Required: true, + Set: set.HashStringIgnoreCase, + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.IsDayOfTheWeek(true), + }, + }, + }, + }, + }, + + "tags": tags.Schema(), + }, + + // if daily, we need daily retention + // if weekly daily cannot be set, and we need weekly + CustomizeDiff: func(diff *schema.ResourceDiff, v interface{}) error { + _, hasDaily := diff.GetOk("retention_daily") + _, hasWeekly := diff.GetOk("retention_weekly") + + frequencyI, _ := diff.GetOk("backup.0.frequency") + switch strings.ToLower(frequencyI.(string)) { + case "daily": + if !hasDaily { + return fmt.Errorf("`retention_daily` must be set when backup.0.frequency is daily") + } + + if _, ok := diff.GetOk("backup.0.weekdays"); ok { + return fmt.Errorf("`backup.0.weekdays` should be not set when backup.0.frequency is daily") + } + case "weekly": + if hasDaily { + return fmt.Errorf("`retention_daily` must be not set when backup.0.frequency is weekly") + } + if !hasWeekly { + return fmt.Errorf("`retention_weekly` must be set when backup.0.frequency is weekly") + } + default: + return fmt.Errorf("Unrecognized value for backup.0.frequency") + } + return nil + }, + } +} + +func resourceBackupProtectionPolicyVMCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + policyName := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + t := d.Get("tags").(map[string]interface{}) + + log.Printf("[DEBUG] Creating/updating Azure Backup Protection Policy %s (resource group %q)", policyName, resourceGroup) + + // getting this ready now because its shared between *everything*, time is... complicated for this resource + timeOfDay := d.Get("backup.0.time").(string) + dateOfDay, err := time.Parse(time.RFC3339, fmt.Sprintf("2018-07-30T%s:00Z", timeOfDay)) + if err != nil { + return fmt.Errorf("Error generating time from %q for policy %q (Resource Group %q): %+v", timeOfDay, policyName, resourceGroup, err) + } + times := append(make([]date.Time, 0), date.Time{Time: dateOfDay}) + + if d.IsNewResource() { + existing, err2 := client.Get(ctx, vaultName, resourceGroup, policyName) + if err2 != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err2) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_backup_policy_vm", *existing.ID) + } + } + + // Less than 7 daily backups is no longer supported for create/update + if (d.IsNewResource() || d.HasChange("retention_daily.0.count")) && (d.Get("retention_daily.0.count").(int) > 1 && d.Get("retention_daily.0.count").(int) < 7) { + return fmt.Errorf("The Azure API has recently changed behaviour so that provisioning a `count` for the `retention_daily` field can no longer be less than 7 days for new/updates to existing Backup Policies. Please ensure that `count` is less than 7, currently %d", d.Get("retention_daily.0.count").(int)) + } + + vmProtectionPolicyProperties := &backup.AzureIaaSVMProtectionPolicy{ + TimeZone: utils.String(d.Get("timezone").(string)), + BackupManagementType: backup.BackupManagementTypeAzureIaasVM, + SchedulePolicy: expandBackupProtectionPolicyVMSchedule(d, times), + RetentionPolicy: &backup.LongTermRetentionPolicy{ // SimpleRetentionPolicy only has duration property ¯\_(ツ)_/¯ + RetentionPolicyType: backup.RetentionPolicyTypeLongTermRetentionPolicy, + DailySchedule: expandBackupProtectionPolicyVMRetentionDaily(d, times), + WeeklySchedule: expandBackupProtectionPolicyVMRetentionWeekly(d, times), + MonthlySchedule: expandBackupProtectionPolicyVMRetentionMonthly(d, times), + YearlySchedule: expandBackupProtectionPolicyVMRetentionYearly(d, times), + }, + } + + if d.HasChange("instant_restore_retention_days") { + vmProtectionPolicyProperties.InstantRpRetentionRangeInDays = utils.Int32(int32(d.Get("instant_restore_retention_days").(int))) + } + + policy := backup.ProtectionPolicyResource{ + Tags: tags.Expand(t), + Properties: vmProtectionPolicyProperties, + } + + if _, err = client.CreateOrUpdate(ctx, vaultName, resourceGroup, policyName, policy); err != nil { + return fmt.Errorf("Error creating/updating Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + resp, err := resourceBackupProtectionPolicyVMWaitForUpdate(ctx, client, vaultName, resourceGroup, policyName, d) + if err != nil { + return err + } + + id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) + d.SetId(id) + + return resourceBackupProtectionPolicyVMRead(d, meta) +} + +func resourceBackupProtectionPolicyVMRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + policyName := id.Path["backupPolicies"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + log.Printf("[DEBUG] Reading Azure Backup Protection Policy %q (resource group %q)", policyName, resourceGroup) + + resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + d.Set("name", policyName) + d.Set("resource_group_name", resourceGroup) + d.Set("recovery_vault_name", vaultName) + + if properties, ok := resp.Properties.AsAzureIaaSVMProtectionPolicy(); ok && properties != nil { + d.Set("timezone", properties.TimeZone) + d.Set("instant_restore_retention_days", properties.InstantRpRetentionRangeInDays) + + if schedule, ok := properties.SchedulePolicy.AsSimpleSchedulePolicy(); ok && schedule != nil { + if err := d.Set("backup", flattenBackupProtectionPolicyVMSchedule(schedule)); err != nil { + return fmt.Errorf("Error setting `backup`: %+v", err) + } + } + + if retention, ok := properties.RetentionPolicy.AsLongTermRetentionPolicy(); ok && retention != nil { + if s := retention.DailySchedule; s != nil { + if err := d.Set("retention_daily", flattenBackupProtectionPolicyVMRetentionDaily(s)); err != nil { + return fmt.Errorf("Error setting `retention_daily`: %+v", err) + } + } else { + d.Set("retention_daily", nil) + } + + if s := retention.WeeklySchedule; s != nil { + if err := d.Set("retention_weekly", flattenBackupProtectionPolicyVMRetentionWeekly(s)); err != nil { + return fmt.Errorf("Error setting `retention_weekly`: %+v", err) + } + } else { + d.Set("retention_weekly", nil) + } + + if s := retention.MonthlySchedule; s != nil { + if err := d.Set("retention_monthly", flattenBackupProtectionPolicyVMRetentionMonthly(s)); err != nil { + return fmt.Errorf("Error setting `retention_monthly`: %+v", err) + } + } else { + d.Set("retention_monthly", nil) + } + + if s := retention.YearlySchedule; s != nil { + if err := d.Set("retention_yearly", flattenBackupProtectionPolicyVMRetentionYearly(s)); err != nil { + return fmt.Errorf("Error setting `retention_yearly`: %+v", err) + } + } else { + d.Set("retention_yearly", nil) + } + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceBackupProtectionPolicyVMDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + policyName := id.Path["backupPolicies"] + resourceGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + + log.Printf("[DEBUG] Deleting Azure Backup Protected Item %q (resource group %q)", policyName, resourceGroup) + + resp, err := client.Delete(ctx, vaultName, resourceGroup, policyName) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("Error issuing delete request for Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + } + + if _, err := resourceBackupProtectionPolicyVMWaitForDeletion(ctx, client, vaultName, resourceGroup, policyName, d); err != nil { + return err + } + + return nil +} + +func expandBackupProtectionPolicyVMSchedule(d *schema.ResourceData, times []date.Time) *backup.SimpleSchedulePolicy { + if bb, ok := d.Get("backup").([]interface{}); ok && len(bb) > 0 { + block := bb[0].(map[string]interface{}) + + schedule := backup.SimpleSchedulePolicy{ // LongTermSchedulePolicy has no properties + SchedulePolicyType: backup.SchedulePolicyTypeSimpleSchedulePolicy, + ScheduleRunTimes: ×, + } + + if v, ok := block["frequency"].(string); ok { + schedule.ScheduleRunFrequency = backup.ScheduleRunType(v) + } + + if v, ok := block["weekdays"].(*schema.Set); ok { + days := make([]backup.DayOfWeek, 0) + for _, day := range v.List() { + days = append(days, backup.DayOfWeek(day.(string))) + } + schedule.ScheduleRunDays = &days + } + + return &schedule + } + + return nil +} + +func expandBackupProtectionPolicyVMRetentionDaily(d *schema.ResourceData, times []date.Time) *backup.DailyRetentionSchedule { + if rb, ok := d.Get("retention_daily").([]interface{}); ok && len(rb) > 0 { + block := rb[0].(map[string]interface{}) + + return &backup.DailyRetentionSchedule{ + RetentionTimes: ×, + RetentionDuration: &backup.RetentionDuration{ + Count: utils.Int32(int32(block["count"].(int))), + DurationType: backup.RetentionDurationTypeDays, + }, + } + } + + return nil +} + +func expandBackupProtectionPolicyVMRetentionWeekly(d *schema.ResourceData, times []date.Time) *backup.WeeklyRetentionSchedule { + if rb, ok := d.Get("retention_weekly").([]interface{}); ok && len(rb) > 0 { + block := rb[0].(map[string]interface{}) + + retention := backup.WeeklyRetentionSchedule{ + RetentionTimes: ×, + RetentionDuration: &backup.RetentionDuration{ + Count: utils.Int32(int32(block["count"].(int))), + DurationType: backup.RetentionDurationTypeWeeks, + }, + } + + if v, ok := block["weekdays"].(*schema.Set); ok { + days := make([]backup.DayOfWeek, 0) + for _, day := range v.List() { + days = append(days, backup.DayOfWeek(day.(string))) + } + retention.DaysOfTheWeek = &days + } + + return &retention + } + + return nil +} + +func expandBackupProtectionPolicyVMRetentionMonthly(d *schema.ResourceData, times []date.Time) *backup.MonthlyRetentionSchedule { + if rb, ok := d.Get("retention_monthly").([]interface{}); ok && len(rb) > 0 { + block := rb[0].(map[string]interface{}) + + retention := backup.MonthlyRetentionSchedule{ + RetentionScheduleFormatType: backup.RetentionScheduleFormatWeekly, // this is always weekly ¯\_(ツ)_/¯ + RetentionScheduleDaily: nil, // and this is always nil.. + RetentionScheduleWeekly: expandBackupProtectionPolicyVMRetentionWeeklyFormat(block), + RetentionTimes: ×, + RetentionDuration: &backup.RetentionDuration{ + Count: utils.Int32(int32(block["count"].(int))), + DurationType: backup.RetentionDurationTypeMonths, + }, + } + + return &retention + } + + return nil +} + +func expandBackupProtectionPolicyVMRetentionYearly(d *schema.ResourceData, times []date.Time) *backup.YearlyRetentionSchedule { + if rb, ok := d.Get("retention_yearly").([]interface{}); ok && len(rb) > 0 { + block := rb[0].(map[string]interface{}) + + retention := backup.YearlyRetentionSchedule{ + RetentionScheduleFormatType: backup.RetentionScheduleFormatWeekly, // this is always weekly ¯\_(ツ)_/¯ + RetentionScheduleDaily: nil, // and this is always nil.. + RetentionScheduleWeekly: expandBackupProtectionPolicyVMRetentionWeeklyFormat(block), + RetentionTimes: ×, + RetentionDuration: &backup.RetentionDuration{ + Count: utils.Int32(int32(block["count"].(int))), + DurationType: backup.RetentionDurationTypeYears, + }, + } + + if v, ok := block["months"].(*schema.Set); ok { + months := make([]backup.MonthOfYear, 0) + for _, month := range v.List() { + months = append(months, backup.MonthOfYear(month.(string))) + } + retention.MonthsOfYear = &months + } + + return &retention + } + + return nil +} + +func expandBackupProtectionPolicyVMRetentionWeeklyFormat(block map[string]interface{}) *backup.WeeklyRetentionFormat { + weekly := backup.WeeklyRetentionFormat{} + + if v, ok := block["weekdays"].(*schema.Set); ok { + days := make([]backup.DayOfWeek, 0) + for _, day := range v.List() { + days = append(days, backup.DayOfWeek(day.(string))) + } + weekly.DaysOfTheWeek = &days + } + + if v, ok := block["weeks"].(*schema.Set); ok { + weeks := make([]backup.WeekOfMonth, 0) + for _, week := range v.List() { + weeks = append(weeks, backup.WeekOfMonth(week.(string))) + } + weekly.WeeksOfTheMonth = &weeks + } + + return &weekly +} + +func flattenBackupProtectionPolicyVMSchedule(schedule *backup.SimpleSchedulePolicy) []interface{} { + block := map[string]interface{}{} + + block["frequency"] = string(schedule.ScheduleRunFrequency) + + if times := schedule.ScheduleRunTimes; times != nil && len(*times) > 0 { + block["time"] = (*times)[0].Format("15:04") + } + + if days := schedule.ScheduleRunDays; days != nil { + weekdays := make([]interface{}, 0) + for _, d := range *days { + weekdays = append(weekdays, string(d)) + } + block["weekdays"] = schema.NewSet(schema.HashString, weekdays) + } + + return []interface{}{block} +} + +func flattenBackupProtectionPolicyVMRetentionDaily(daily *backup.DailyRetentionSchedule) []interface{} { + block := map[string]interface{}{} + + if duration := daily.RetentionDuration; duration != nil { + if v := duration.Count; v != nil { + block["count"] = *v + } + } + + return []interface{}{block} +} + +func flattenBackupProtectionPolicyVMRetentionWeekly(weekly *backup.WeeklyRetentionSchedule) []interface{} { + block := map[string]interface{}{} + + if duration := weekly.RetentionDuration; duration != nil { + if v := duration.Count; v != nil { + block["count"] = *v + } + } + + if days := weekly.DaysOfTheWeek; days != nil { + weekdays := make([]interface{}, 0) + for _, d := range *days { + weekdays = append(weekdays, string(d)) + } + block["weekdays"] = schema.NewSet(schema.HashString, weekdays) + } + + return []interface{}{block} +} + +func flattenBackupProtectionPolicyVMRetentionMonthly(monthly *backup.MonthlyRetentionSchedule) []interface{} { + block := map[string]interface{}{} + + if duration := monthly.RetentionDuration; duration != nil { + if v := duration.Count; v != nil { + block["count"] = *v + } + } + + if weekly := monthly.RetentionScheduleWeekly; weekly != nil { + block["weekdays"], block["weeks"] = flattenBackupProtectionPolicyVMRetentionWeeklyFormat(weekly) + } + + return []interface{}{block} +} + +func flattenBackupProtectionPolicyVMRetentionYearly(yearly *backup.YearlyRetentionSchedule) []interface{} { + block := map[string]interface{}{} + + if duration := yearly.RetentionDuration; duration != nil { + if v := duration.Count; v != nil { + block["count"] = *v + } + } + + if weekly := yearly.RetentionScheduleWeekly; weekly != nil { + block["weekdays"], block["weeks"] = flattenBackupProtectionPolicyVMRetentionWeeklyFormat(weekly) + } + + if months := yearly.MonthsOfYear; months != nil { + slice := make([]interface{}, 0) + for _, d := range *months { + slice = append(slice, string(d)) + } + block["months"] = schema.NewSet(schema.HashString, slice) + } + + return []interface{}{block} +} + +func flattenBackupProtectionPolicyVMRetentionWeeklyFormat(retention *backup.WeeklyRetentionFormat) (weekdays, weeks *schema.Set) { + if days := retention.DaysOfTheWeek; days != nil { + slice := make([]interface{}, 0) + for _, d := range *days { + slice = append(slice, string(d)) + } + weekdays = schema.NewSet(schema.HashString, slice) + } + + if days := retention.WeeksOfTheMonth; days != nil { + slice := make([]interface{}, 0) + for _, d := range *days { + slice = append(slice, string(d)) + } + weeks = schema.NewSet(schema.HashString, slice) + } + + return weekdays, weeks +} + +func resourceBackupProtectionPolicyVMWaitForUpdate(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { + state := &resource.StateChangeConf{ + MinTimeout: 30 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"NotFound"}, + Target: []string{"Found"}, + Refresh: resourceBackupProtectionPolicyVMRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), + } + + if d.IsNewResource() { + state.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + state.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + resp, err := state.WaitForState() + if err != nil { + return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Azure Backup Protection Policy %q to be true (Resource Group %q) to provision: %+v", policyName, resourceGroup, err) + } + + return resp.(backup.ProtectionPolicyResource), nil +} + +func resourceBackupProtectionPolicyVMWaitForDeletion(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { + state := &resource.StateChangeConf{ + MinTimeout: 30 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"Found"}, + Target: []string{"NotFound"}, + Refresh: resourceBackupProtectionPolicyVMRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), + Timeout: d.Timeout(schema.TimeoutDelete), + } + + resp, err := state.WaitForState() + if err != nil { + return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Azure Backup Protection Policy %q to be false (Resource Group %q) to provision: %+v", policyName, resourceGroup, err) + } + + return resp.(backup.ProtectionPolicyResource), nil +} + +func resourceBackupProtectionPolicyVMRefreshFunc(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return resp, "NotFound", nil + } + + return resp, "Error", fmt.Errorf("Error making Read request on Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) + } + + return resp, "Found", nil + } +} diff --git a/azurerm/internal/services/recoveryservices/backup_policy_vm_resource_test.go b/azurerm/internal/services/recoveryservices/backup_policy_vm_resource_test.go new file mode 100644 index 000000000000..91c2fbbf64f4 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_policy_vm_resource_test.go @@ -0,0 +1,506 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type BackupProtectionPolicyVMResource struct { +} + +func TestAccBackupProtectionPolicyVM_basicDaily(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backup.0.frequency").HasValue("Daily"), + check.That(data.ResourceName).Key("backup.0.time").HasValue("23:00"), + check.That(data.ResourceName).Key("retention_daily.0.count").HasValue("10"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_withInstantRestoreRetentionRangeUpdate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basicDailyWithInstantRestoreRetentionRange(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccBackupProtectionPolicyVM_basicWeekly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_completeDaily(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_completeWeekly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backup.0.frequency").HasValue("Weekly"), + check.That(data.ResourceName).Key("backup.0.time").HasValue("23:00"), + check.That(data.ResourceName).Key("backup.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_weekly.0.count").HasValue("42"), + check.That(data.ResourceName).Key("retention_weekly.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_monthly.0.count").HasValue("7"), + check.That(data.ResourceName).Key("retention_monthly.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_monthly.0.weeks.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.count").HasValue("77"), + check.That(data.ResourceName).Key("retention_yearly.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_yearly.0.weeks.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.months.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_updateDaily(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.completeDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backup.0.frequency").HasValue("Daily"), + check.That(data.ResourceName).Key("backup.0.time").HasValue("23:00"), + check.That(data.ResourceName).Key("retention_daily.0.count").HasValue("10"), + check.That(data.ResourceName).Key("retention_weekly.0.count").HasValue("42"), + check.That(data.ResourceName).Key("retention_weekly.0.weekdays.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_monthly.0.count").HasValue("7"), + check.That(data.ResourceName).Key("retention_monthly.0.weekdays.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_monthly.0.weeks.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.count").HasValue("77"), + check.That(data.ResourceName).Key("retention_yearly.0.weekdays.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.weeks.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.months.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_updateWeekly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.completeWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("backup.0.frequency").HasValue("Weekly"), + check.That(data.ResourceName).Key("backup.0.time").HasValue("23:00"), + check.That(data.ResourceName).Key("backup.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_weekly.0.count").HasValue("42"), + check.That(data.ResourceName).Key("retention_weekly.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_monthly.0.count").HasValue("7"), + check.That(data.ResourceName).Key("retention_monthly.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_monthly.0.weeks.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.count").HasValue("77"), + check.That(data.ResourceName).Key("retention_yearly.0.weekdays.#").HasValue("4"), + check.That(data.ResourceName).Key("retention_yearly.0.weeks.#").HasValue("2"), + check.That(data.ResourceName).Key("retention_yearly.0.months.#").HasValue("2"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_updateDailyToWeekly(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basicWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_updateWeeklyToDaily(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basicDaily(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccBackupProtectionPolicyVM_updateWeeklyToPartial(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") + r := BackupProtectionPolicyVMResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.completeWeekly(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.completeWeeklyPartial(data), + Check: resource.ComposeAggregateTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t BackupProtectionPolicyVMResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + policyName := id.Path["backupPolicies"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + resp, err := clients.RecoveryServices.ProtectionPoliciesClient.Get(ctx, vaultName, resourceGroup, policyName) + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Protection Policy (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (BackupProtectionPolicyVMResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-backup-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r BackupProtectionPolicyVMResource) basicDaily(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, r.template(data), data.RandomInteger) +} + +func (r BackupProtectionPolicyVMResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "import" { + name = azurerm_backup_policy_vm.test.name + resource_group_name = azurerm_backup_policy_vm.test.resource_group_name + recovery_vault_name = azurerm_backup_policy_vm.test.recovery_vault_name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, r.template(data)) +} + +func (r BackupProtectionPolicyVMResource) basicWeekly(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Weekly" + time = "23:00" + weekdays = ["Sunday", "Wednesday"] + } + + retention_weekly { + count = 42 + weekdays = ["Sunday", "Wednesday"] + } +} +`, r.template(data), data.RandomInteger) +} + +func (r BackupProtectionPolicyVMResource) completeDaily(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } + + retention_weekly { + count = 42 + weekdays = ["Sunday", "Wednesday"] + } + + retention_monthly { + count = 7 + weekdays = ["Sunday", "Wednesday"] + weeks = ["First", "Last"] + } + + retention_yearly { + count = 77 + weekdays = ["Sunday", "Wednesday"] + weeks = ["First", "Last"] + months = ["January", "July"] + } +} +`, r.template(data), data.RandomInteger) +} + +func (r BackupProtectionPolicyVMResource) completeWeekly(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Weekly" + time = "23:00" + weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] + } + + retention_weekly { + count = 42 + weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] + } + + retention_monthly { + count = 7 + weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] + weeks = ["First", "Last"] + } + + retention_yearly { + count = 77 + weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] + weeks = ["First", "Last"] + months = ["January", "July"] + } +} +`, r.template(data), data.RandomInteger) +} + +func (r BackupProtectionPolicyVMResource) completeWeeklyPartial(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Weekly" + time = "23:00" + weekdays = ["Sunday", "Wednesday", "Friday"] + } + + retention_weekly { + count = 42 + weekdays = ["Sunday", "Wednesday", "Friday"] + } + + retention_monthly { + count = 7 + weekdays = ["Sunday", "Wednesday"] + weeks = ["First", "Last"] + } + + retention_yearly { + count = 77 + weekdays = ["Sunday"] + weeks = ["Last"] + months = ["January"] + } +} +`, r.template(data), data.RandomInteger) +} + +func (r BackupProtectionPolicyVMResource) basicDailyWithInstantRestoreRetentionRange(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-BPVM-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + instant_restore_retention_days = 5 + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/azurerm/internal/services/recoveryservices/backup_protected_file_share_resource.go b/azurerm/internal/services/recoveryservices/backup_protected_file_share_resource.go new file mode 100644 index 000000000000..e12f537aa225 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_protected_file_share_resource.go @@ -0,0 +1,290 @@ +package recoveryservices + +import ( + "context" + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceBackupProtectedFileShare() *schema.Resource { + return &schema.Resource{ + Create: resourceBackupProtectedFileShareCreateUpdate, + Read: resourceBackupProtectedFileShareRead, + Update: resourceBackupProtectedFileShareCreateUpdate, + Delete: resourceBackupProtectedFileShareDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(80 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(80 * time.Minute), + Delete: schema.DefaultTimeout(80 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + + "source_storage_account_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "source_file_share_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.StorageShareName, + }, + + "backup_policy_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateResourceID, + }, + }, + } +} + +func resourceBackupProtectedFileShareCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient + opClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceGroup := d.Get("resource_group_name").(string) + + vaultName := d.Get("recovery_vault_name").(string) + storageAccountID := d.Get("source_storage_account_id").(string) + fileShareName := d.Get("source_file_share_name").(string) + policyID := d.Get("backup_policy_id").(string) + + // get storage account name from id + parsedStorageAccountID, err := azure.ParseAzureResourceID(storageAccountID) + if err != nil { + return fmt.Errorf("[ERROR] Unable to parse source_storage_account_id '%s': %+v", storageAccountID, err) + } + accountName, hasName := parsedStorageAccountID.Path["storageAccounts"] + if !hasName { + return fmt.Errorf("[ERROR] parsed source_storage_account_id '%s' doesn't contain 'storageAccounts'", storageAccountID) + } + + protectedItemName := fmt.Sprintf("AzureFileShare;%s", fileShareName) + containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageAccountID.ResourceGroup, accountName) + + log.Printf("[DEBUG] Creating/updating Recovery Service Protected File Share %q (Container Name %q)", protectedItemName, containerName) + + if d.IsNewResource() { + existing, err2 := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err2 != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Recovery Service Protected File Share %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err2) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_backup_protected_file_share", *existing.ID) + } + } + + item := backup.ProtectedItemResource{ + Properties: &backup.AzureFileshareProtectedItem{ + PolicyID: &policyID, + ProtectedItemType: backup.ProtectedItemTypeAzureFileShareProtectedItem, + WorkloadType: backup.DataSourceTypeAzureFileShare, + SourceResourceID: utils.String(storageAccountID), + FriendlyName: utils.String(fileShareName), + }, + } + + resp, err := client.CreateOrUpdate(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, item) + if err != nil { + return fmt.Errorf("Error creating/updating Recovery Service Protected File Share %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) + } + + locationURL, err := resp.Response.Location() + if err != nil || locationURL == nil { + return fmt.Errorf("Error creating/updating Azure File Share backup item %q (Vault %q): Location header missing or empty", containerName, vaultName) + } + + opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) + + parsedLocation, err := azure.ParseAzureResourceID(opResourceID) + if err != nil { + return err + } + operationID := parsedLocation.Path["operationResults"] + + if _, err := resourceBackupProtectedFileShareWaitForOperation(ctx, opClient, vaultName, resourceGroup, operationID, d); err != nil { + return err + } + + resp, err = client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + + if err != nil { + return fmt.Errorf("Error creating/udpating Azure File Share backup item %q (Vault %q): %+v", protectedItemName, vaultName, err) + } + + id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) // This code is a workaround for this bug https://github.com/Azure/azure-sdk-for-go/issues/2824 + d.SetId(id) + + return resourceBackupProtectedFileShareRead(d, meta) +} + +func resourceBackupProtectedFileShareRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + protectedItemName := id.Path["protectedItems"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + containerName := id.Path["protectionContainers"] + + log.Printf("[DEBUG] Reading Recovery Service Protected File Share %q (resource group %q)", protectedItemName, resourceGroup) + + resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Recovery Service Protected File Share %q (Vault %q Resource Group %q): %+v", protectedItemName, vaultName, resourceGroup, err) + } + + d.Set("resource_group_name", resourceGroup) + d.Set("recovery_vault_name", vaultName) + + if properties := resp.Properties; properties != nil { + if item, ok := properties.AsAzureFileshareProtectedItem(); ok { + sourceResourceID := strings.Replace(*item.SourceResourceID, "Microsoft.storage", "Microsoft.Storage", 1) // The SDK is returning inconsistent capitalization + d.Set("source_storage_account_id", sourceResourceID) + d.Set("source_file_share_name", item.FriendlyName) + + if v := item.PolicyID; v != nil { + d.Set("backup_policy_id", strings.Replace(*v, "Subscriptions", "subscriptions", 1)) + } + } + } + + return nil +} + +func resourceBackupProtectedFileShareDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient + opClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + protectedItemName := id.Path["protectedItems"] + resourceGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + containerName := id.Path["protectionContainers"] + + log.Printf("[DEBUG] Deleting Recovery Service Protected Item %q (resource group %q)", protectedItemName, resourceGroup) + + resp, err := client.Delete(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("Error issuing delete request for Recovery Service Protected File Share %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) + } + } + + locationURL, err := resp.Response.Location() + if err != nil || locationURL == nil { + return fmt.Errorf("Error deleting Azure File Share backups item %s (Vault %s): Location header missing or empty", containerName, vaultName) + } + + opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) + + parsedLocation, err := azure.ParseAzureResourceID(opResourceID) + if err != nil { + return err + } + operationID := parsedLocation.Path["backupOperationResults"] // This is different for create and delete requests ¯\_(ツ)_/¯ + + if _, err := resourceBackupProtectedFileShareWaitForOperation(ctx, opClient, vaultName, resourceGroup, operationID, d); err != nil { + return err + } + + return nil +} + +// nolint unused - linter mistakenly things this function isn't used? +func resourceBackupProtectedFileShareWaitForOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string, d *schema.ResourceData) (backup.OperationStatus, error) { + state := &resource.StateChangeConf{ + MinTimeout: 10 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"InProgress"}, + Target: []string{"Succeeded"}, + Refresh: resourceBackupProtectedFileShareCheckOperation(ctx, client, vaultName, resourceGroup, operationID), + } + + if d.IsNewResource() { + state.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + state.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + log.Printf("[DEBUG] Waiting for backup operation %s (Vault %s) to complete", operationID, vaultName) + resp, err := state.WaitForState() + if err != nil { + return resp.(backup.OperationStatus), err + } + return resp.(backup.OperationStatus), nil +} + +func resourceBackupProtectedFileShareCheckOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + resp, err := client.Get(ctx, vaultName, resourceGroup, operationID) + if err != nil { + return resp, "Error", fmt.Errorf("Error making Read request on Recovery Service Protection Container operation %q (Vault %q in Resource Group %q): %+v", operationID, vaultName, resourceGroup, err) + } + + if opErr := resp.Error; opErr != nil { + errMsg := "No upstream error message" + if opErr.Message != nil { + errMsg = *opErr.Message + } + err = fmt.Errorf("Azure Backup operation status failed with status %q (Vault %q Resource Group %q Operation ID %q): %+v", resp.Status, vaultName, resourceGroup, operationID, errMsg) + } + + log.Printf("[DEBUG] Backup operation %s status is %s", operationID, string(resp.Status)) + return resp, string(resp.Status), err + } +} diff --git a/azurerm/internal/services/recoveryservices/backup_protected_file_share_resource_test.go b/azurerm/internal/services/recoveryservices/backup_protected_file_share_resource_test.go new file mode 100644 index 000000000000..046a97ecd3d9 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_protected_file_share_resource_test.go @@ -0,0 +1,232 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type BackupProtectedFileShareResource struct { +} + +// TODO: These tests fail because enabling backup on file shares with no content +func TestAccBackupProtectedFileShare_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_protected_file_share", "test") + r := BackupProtectedFileShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + ), + }, + data.ImportStep(), + { + // vault cannot be deleted unless we unregister all backups + Config: r.base(data), + }, + }) +} + +func TestAccBackupProtectedFileShare_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_protected_file_share", "test") + r := BackupProtectedFileShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + { + // vault cannot be deleted unless we unregister all backups + Config: r.base(data), + }, + }) +} + +func TestAccBackupProtectedFileShare_updateBackupPolicyId(t *testing.T) { + fBackupPolicyResourceName := "azurerm_backup_policy_file_share.test1" + sBackupPolicyResourceName := "azurerm_backup_policy_file_share.test2" + + data := acceptance.BuildTestData(t, "azurerm_backup_protected_file_share", "test") + r := BackupProtectedFileShareResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + // Create resources and link first backup policy id + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", fBackupPolicyResourceName, "id"), + ), + }, + { + // Modify backup policy id to the second one + // Set Destroy false to prevent error from cleaning up dangling resource + Config: r.updatePolicy(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", sBackupPolicyResourceName, "id"), + ), + }, + { + // Remove protected items first before the associated policies are deleted + Config: r.base(data), + }, + }) +} + +func (t BackupProtectedFileShareResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + protectedItemName := id.Path["protectedItems"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + containerName := id.Path["protectionContainers"] + + resp, err := clients.RecoveryServices.ProtectedItemsClient.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Protected File Share (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (BackupProtectedFileShareResource) base(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-backup-%[1]d" + location = "%[2]s" +} + +resource "azurerm_storage_account" "test" { + name = "acctest%[3]s" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_share" "test" { + name = "acctest-ss-%[1]d" + storage_account_name = "${azurerm_storage_account.test.name}" + metadata = {} + + lifecycle { + ignore_changes = [metadata] // Ignore changes Azure Backup makes to the metadata + } +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-VAULT-%[1]d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_backup_policy_file_share" "test1" { + name = "acctest-PFS-%[1]d" + resource_group_name = "${azurerm_resource_group.test.name}" + recovery_vault_name = "${azurerm_recovery_services_vault.test.name}" + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString) +} + +func (r BackupProtectedFileShareResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_container_storage_account" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + storage_account_id = azurerm_storage_account.test.id +} + +resource "azurerm_backup_protected_file_share" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_storage_account_id = azurerm_backup_container_storage_account.test.storage_account_id + source_file_share_name = azurerm_storage_share.test.name + backup_policy_id = azurerm_backup_policy_file_share.test1.id +} +`, r.base(data)) +} + +func (r BackupProtectedFileShareResource) updatePolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_file_share" "test2" { + name = "acctest-%d-Secondary" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} + +resource "azurerm_backup_container_storage_account" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + storage_account_id = azurerm_storage_account.test.id +} + +resource "azurerm_backup_protected_file_share" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_storage_account_id = azurerm_backup_container_storage_account.test.storage_account_id + source_file_share_name = azurerm_storage_share.test.name + backup_policy_id = azurerm_backup_policy_file_share.test2.id +} +`, r.base(data), data.RandomInteger) +} + +func (r BackupProtectedFileShareResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_protected_file_share" "test_import" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_storage_account_id = azurerm_storage_account.test.id + source_file_share_name = azurerm_storage_share.test.name + backup_policy_id = azurerm_backup_policy_file_share.test1.id +} +`, r.base(data)) +} diff --git a/azurerm/internal/services/recoveryservices/backup_protected_vm_resource.go b/azurerm/internal/services/recoveryservices/backup_protected_vm_resource.go new file mode 100644 index 000000000000..e27d0cef7805 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_protected_vm_resource.go @@ -0,0 +1,281 @@ +package recoveryservices + +import ( + "context" + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceRecoveryServicesBackupProtectedVM() *schema.Resource { + return &schema.Resource{ + Create: resourceRecoveryServicesBackupProtectedVMCreateUpdate, + Read: resourceRecoveryServicesBackupProtectedVMRead, + Update: resourceRecoveryServicesBackupProtectedVMCreateUpdate, + Delete: resourceRecoveryServicesBackupProtectedVMDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(80 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(80 * time.Minute), + Delete: schema.DefaultTimeout(80 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + + "source_vm_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "backup_policy_id": { + Type: schema.TypeString, + Required: true, + ValidateFunc: azure.ValidateResourceID, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceRecoveryServicesBackupProtectedVMCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + resourceGroup := d.Get("resource_group_name").(string) + t := d.Get("tags").(map[string]interface{}) + + vaultName := d.Get("recovery_vault_name").(string) + vmId := d.Get("source_vm_id").(string) + policyId := d.Get("backup_policy_id").(string) + + // get VM name from id + parsedVmId, err := azure.ParseAzureResourceID(vmId) + if err != nil { + return fmt.Errorf("[ERROR] Unable to parse source_vm_id '%s': %+v", vmId, err) + } + vmName, hasName := parsedVmId.Path["virtualMachines"] + if !hasName { + return fmt.Errorf("[ERROR] parsed source_vm_id '%s' doesn't contain 'virtualMachines'", vmId) + } + + protectedItemName := fmt.Sprintf("VM;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) + containerName := fmt.Sprintf("iaasvmcontainer;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) + + log.Printf("[DEBUG] Creating/updating Azure Backup Protected VM %s (resource group %q)", protectedItemName, resourceGroup) + + if d.IsNewResource() { + existing, err2 := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err2 != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err2) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_backup_protected_vm", *existing.ID) + } + } + + item := backup.ProtectedItemResource{ + Tags: tags.Expand(t), + Properties: &backup.AzureIaaSComputeVMProtectedItem{ + PolicyID: &policyId, + ProtectedItemType: backup.ProtectedItemTypeMicrosoftClassicComputevirtualMachines, + WorkloadType: backup.DataSourceTypeVM, + SourceResourceID: utils.String(vmId), + FriendlyName: utils.String(vmName), + VirtualMachineID: utils.String(vmId), + }, + } + + if _, err = client.CreateOrUpdate(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, item); err != nil { + return fmt.Errorf("Error creating/updating Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) + } + + resp, err := resourceRecoveryServicesBackupProtectedVMWaitForStateCreateUpdate(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, policyId, d) + if err != nil { + return err + } + + id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) // This code is a workaround for this bug https://github.com/Azure/azure-sdk-for-go/issues/2824 + d.SetId(id) + + return resourceRecoveryServicesBackupProtectedVMRead(d, meta) +} + +func resourceRecoveryServicesBackupProtectedVMRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + protectedItemName := id.Path["protectedItems"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + containerName := id.Path["protectionContainers"] + + log.Printf("[DEBUG] Reading Azure Backup Protected VM %q (resource group %q)", protectedItemName, resourceGroup) + + resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) + } + + d.Set("resource_group_name", resourceGroup) + d.Set("recovery_vault_name", vaultName) + + if properties := resp.Properties; properties != nil { + if vm, ok := properties.AsAzureIaaSComputeVMProtectedItem(); ok { + d.Set("source_vm_id", vm.SourceResourceID) + + if v := vm.PolicyID; v != nil { + d.Set("backup_policy_id", strings.Replace(*v, "Subscriptions", "subscriptions", 1)) + } + } + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceRecoveryServicesBackupProtectedVMDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + protectedItemName := id.Path["protectedItems"] + resourceGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + containerName := id.Path["protectionContainers"] + + log.Printf("[DEBUG] Deleting Azure Backup Protected Item %q (resource group %q)", protectedItemName, resourceGroup) + + resp, err := client.Delete(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("Error issuing delete request for Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) + } + } + + if _, err := resourceRecoveryServicesBackupProtectedVMWaitForDeletion(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, "", d); err != nil { + return err + } + + return nil +} + +func resourceRecoveryServicesBackupProtectedVMWaitForStateCreateUpdate(ctx context.Context, client *backup.ProtectedItemsClient, vaultName, resourceGroup, containerName, protectedItemName string, policyId string, d *schema.ResourceData) (backup.ProtectedItemResource, error) { + state := &resource.StateChangeConf{ + MinTimeout: 30 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"NotFound"}, + Target: []string{"Found"}, + Refresh: resourceRecoveryServicesBackupProtectedVMRefreshFunc(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, policyId, true), + } + + if d.IsNewResource() { + state.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + state.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + resp, err := state.WaitForState() + if err != nil { + i, _ := resp.(backup.ProtectedItemResource) + return i, fmt.Errorf("Error waiting for the Azure Backup Protected VM %q to be true (Resource Group %q) to provision: %+v", protectedItemName, resourceGroup, err) + } + + return resp.(backup.ProtectedItemResource), nil +} + +func resourceRecoveryServicesBackupProtectedVMWaitForDeletion(ctx context.Context, client *backup.ProtectedItemsClient, vaultName, resourceGroup, containerName, protectedItemName string, policyId string, d *schema.ResourceData) (backup.ProtectedItemResource, error) { + state := &resource.StateChangeConf{ + MinTimeout: 30 * time.Second, + Delay: 10 * time.Second, + Pending: []string{"Found"}, + Target: []string{"NotFound"}, + Refresh: resourceRecoveryServicesBackupProtectedVMRefreshFunc(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, policyId, false), + Timeout: d.Timeout(schema.TimeoutDelete), + } + + resp, err := state.WaitForState() + if err != nil { + i, _ := resp.(backup.ProtectedItemResource) + return i, fmt.Errorf("Error waiting for the Azure Backup Protected VM %q to be false (Resource Group %q) to provision: %+v", protectedItemName, resourceGroup, err) + } + + return resp.(backup.ProtectedItemResource), nil +} + +func resourceRecoveryServicesBackupProtectedVMRefreshFunc(ctx context.Context, client *backup.ProtectedItemsClient, vaultName, resourceGroup, containerName, protectedItemName string, policyId string, newResource bool) resource.StateRefreshFunc { + // TODO: split this into two functions + return func() (interface{}, string, error) { + resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return resp, "NotFound", nil + } + + return resp, "Error", fmt.Errorf("Error making Read request on Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) + } else if !newResource && policyId != "" { + if properties := resp.Properties; properties != nil { + if vm, ok := properties.AsAzureIaaSComputeVMProtectedItem(); ok { + if v := vm.PolicyID; v != nil { + if strings.Replace(*v, "Subscriptions", "subscriptions", 1) != policyId { + return resp, "NotFound", nil + } + } else { + return resp, "Error", fmt.Errorf("Error reading policy ID attribute nil on Azure Backup Protected VM %q (Resource Group %q)", protectedItemName, resourceGroup) + } + } else { + return resp, "Error", fmt.Errorf("Error reading properties on Azure Backup Protected VM %q (Resource Group %q)", protectedItemName, resourceGroup) + } + } else { + return resp, "Error", fmt.Errorf("Error reading properties on empty Azure Backup Protected VM %q (Resource Group %q)", protectedItemName, resourceGroup) + } + } + return resp, "Found", nil + } +} diff --git a/azurerm/internal/services/recoveryservices/backup_protected_vm_resource_test.go b/azurerm/internal/services/recoveryservices/backup_protected_vm_resource_test.go new file mode 100644 index 000000000000..f6c0760839d8 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/backup_protected_vm_resource_test.go @@ -0,0 +1,557 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type BackupProtectedVmResource struct { +} + +func TestAccBackupProtectedVm_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") + r := BackupProtectedVmResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + ), + }, + data.ImportStep(), + { + // vault cannot be deleted unless we unregister all backups + Config: r.base(data), + }, + }) +} + +func TestAccBackupProtectedVm_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") + r := BackupProtectedVmResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + { + // vault cannot be deleted unless we unregister all backups + Config: r.base(data), + }, + }) +} + +func TestAccBackupProtectedVm_separateResourceGroups(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") + r := BackupProtectedVmResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.separateResourceGroups(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + ), + }, + data.ImportStep(), + { + // vault cannot be deleted unless we unregister all backups + Config: r.additionalVault(data), + }, + }) +} + +func TestAccBackupProtectedVm_updateBackupPolicyId(t *testing.T) { + virtualMachine := "azurerm_virtual_machine.test" + fBackupPolicyResourceName := "azurerm_backup_policy_vm.test" + sBackupPolicyResourceName := "azurerm_backup_policy_vm.test_change_backup" + data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") + r := BackupProtectedVmResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { // Create resources and link first backup policy id + ResourceName: fBackupPolicyResourceName, + Config: r.linkFirstBackupPolicy(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", fBackupPolicyResourceName, "id"), + ), + }, + { // Modify backup policy id to the second one + // Set Destroy false to prevent error from cleaning up dangling resource + ResourceName: sBackupPolicyResourceName, + Config: r.linkSecondBackupPolicy(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", sBackupPolicyResourceName, "id"), + ), + }, + { + // Remove backup policy link + // Backup policy link will need to be removed first so the VM's backup policy subsequently reverts to Default + // Azure API is quite sensitive, adding the step to control resource cleanup order + ResourceName: fBackupPolicyResourceName, + Config: r.withVM(data), + }, + { + // Then VM can be removed + ResourceName: virtualMachine, + Config: r.withSecondPolicy(data), + }, + { + // Remove backup policies and vault + ResourceName: data.ResourceName, + Config: r.basePolicyTest(data), + }, + }) +} + +func (t BackupProtectedVmResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + protectedItemName := id.Path["protectedItems"] + vaultName := id.Path["vaults"] + resourceGroup := id.ResourceGroup + containerName := id.Path["protectionContainers"] + + resp, err := clients.RecoveryServices.ProtectedItemsClient.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Protected VM (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (BackupProtectedVmResource) base(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-backup-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "vnet" + location = azurerm_resource_group.test.location + address_space = ["10.0.0.0/16"] + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest_subnet" + virtual_network_name = azurerm_virtual_network.test.name + resource_group_name = azurerm_resource_group.test.name + address_prefix = "10.0.10.0/24" +} + +resource "azurerm_network_interface" "test" { + name = "acctest_nic" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "acctestipconfig" + subnet_id = azurerm_subnet.test.id + private_ip_address_allocation = "Dynamic" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_public_ip" "test" { + name = "acctest-ip" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + domain_name_label = "acctestip%d" +} + +resource "azurerm_storage_account" "test" { + name = "acctest%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_managed_disk" "test" { + name = "acctest-datadisk" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + storage_account_type = "Standard_LRS" + create_option = "Empty" + disk_size_gb = "1023" +} + +resource "azurerm_virtual_machine" "test" { + name = "acctestvm" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + vm_size = "Standard_A0" + network_interface_ids = [azurerm_network_interface.test.id] + + storage_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + storage_os_disk { + name = "acctest-osdisk" + managed_disk_type = "Standard_LRS" + caching = "ReadWrite" + create_option = "FromImage" + } + + storage_data_disk { + name = "acctest-datadisk" + managed_disk_id = azurerm_managed_disk.test.id + managed_disk_type = "Standard_LRS" + disk_size_gb = azurerm_managed_disk.test.disk_size_gb + create_option = "Attach" + lun = 0 + } + + os_profile { + computer_name = "acctest" + admin_username = "vmadmin" + admin_password = "Password123!@#" + } + + os_profile_linux_config { + disable_password_authentication = false + } + + boot_diagnostics { + enabled = true + storage_uri = azurerm_storage_account.test.primary_blob_endpoint + } +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString, data.RandomInteger, data.RandomInteger) +} + +func (r BackupProtectedVmResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_protected_vm" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_vm_id = azurerm_virtual_machine.test.id + backup_policy_id = azurerm_backup_policy_vm.test.id +} +`, r.base(data)) +} + +// For update backup policy id test +func (BackupProtectedVmResource) basePolicyTest(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-backup-%d-1" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "vnet" + location = azurerm_resource_group.test.location + address_space = ["10.0.0.0/16"] + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "acctest_subnet" + virtual_network_name = azurerm_virtual_network.test.name + resource_group_name = azurerm_resource_group.test.name + address_prefix = "10.0.10.0/24" +} + +resource "azurerm_network_interface" "test" { + name = "acctest_nic" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "acctestipconfig" + subnet_id = azurerm_subnet.test.id + private_ip_address_allocation = "Dynamic" + public_ip_address_id = azurerm_public_ip.test.id + } +} + +resource "azurerm_public_ip" "test" { + name = "acctest-ip" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Dynamic" + domain_name_label = "acctestip%d" +} + +resource "azurerm_storage_account" "test" { + name = "acctest%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_managed_disk" "test" { + name = "acctest-datadisk" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + storage_account_type = "Standard_LRS" + create_option = "Empty" + disk_size_gb = "1023" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) +} + +// For update backup policy id test +func (r BackupProtectedVmResource) withVault(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} +`, r.base(data), data.RandomInteger) +} + +// For update backup policy id test +func (r BackupProtectedVmResource) withFirstPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test" { + name = "acctest-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, r.withVault(data), data.RandomInteger) +} + +// For update backup policy id test +func (r BackupProtectedVmResource) withSecondPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_policy_vm" "test_change_backup" { + name = "acctest2-%d" + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 15 + } +} +`, r.withFirstPolicy(data), data.RandomInteger) +} + +// For update backup policy id test +func (r BackupProtectedVmResource) withVM(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_virtual_machine" "test" { + name = "acctestvm-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + vm_size = "Standard_A0" + network_interface_ids = [azurerm_network_interface.test.id] + delete_os_disk_on_termination = true + + storage_image_reference { + publisher = "Canonical" + offer = "UbuntuServer" + sku = "16.04-LTS" + version = "latest" + } + + storage_os_disk { + name = "acctest-osdisk" + managed_disk_type = "Standard_LRS" + caching = "ReadWrite" + create_option = "FromImage" + } + + storage_data_disk { + name = "acctest-datadisk" + managed_disk_id = azurerm_managed_disk.test.id + managed_disk_type = "Standard_LRS" + disk_size_gb = azurerm_managed_disk.test.disk_size_gb + create_option = "Attach" + lun = 0 + } + + os_profile { + computer_name = "acctest" + admin_username = "vmadmin" + admin_password = "Password123!@#" + } + + os_profile_linux_config { + disable_password_authentication = false + } + + boot_diagnostics { + enabled = true + storage_uri = azurerm_storage_account.test.primary_blob_endpoint + } +} +`, r.withSecondPolicy(data), data.RandomInteger) +} + +// For update backup policy id test +func (r BackupProtectedVmResource) linkFirstBackupPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_protected_vm" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_vm_id = azurerm_virtual_machine.test.id + backup_policy_id = azurerm_backup_policy_vm.test.id +} +`, r.withVM(data)) +} + +// For update backup policy id test +func (r BackupProtectedVmResource) linkSecondBackupPolicy(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_protected_vm" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_vm_id = azurerm_virtual_machine.test.id + backup_policy_id = azurerm_backup_policy_vm.test_change_backup.id +} +`, r.withVM(data)) +} + +func (r BackupProtectedVmResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_protected_vm" "import" { + resource_group_name = azurerm_backup_protected_vm.test.resource_group_name + recovery_vault_name = azurerm_backup_protected_vm.test.recovery_vault_name + source_vm_id = azurerm_backup_protected_vm.test.source_vm_id + backup_policy_id = azurerm_backup_protected_vm.test.backup_policy_id +} +`, r.basic(data)) +} + +func (r BackupProtectedVmResource) additionalVault(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_resource_group" "test2" { + name = "acctestRG-backup-%d-2" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test2" { + name = "acctest2-%d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_backup_policy_vm" "test2" { + name = "acctest2-%d" + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test2.name + + backup { + frequency = "Daily" + time = "23:00" + } + + retention_daily { + count = 10 + } +} +`, r.base(data), data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r BackupProtectedVmResource) separateResourceGroups(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_backup_protected_vm" "test" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test2.name + backup_policy_id = azurerm_backup_policy_vm.test2.id + source_vm_id = azurerm_virtual_machine.test.id +} +`, r.additionalVault(data)) +} diff --git a/azurerm/internal/services/recoveryservices/data_source_backup_policy_vm.go b/azurerm/internal/services/recoveryservices/data_source_backup_policy_vm.go deleted file mode 100644 index 626a073105c9..000000000000 --- a/azurerm/internal/services/recoveryservices/data_source_backup_policy_vm.go +++ /dev/null @@ -1,68 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "log" - "strings" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmBackupPolicyVm() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmBackupPolicyVmRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - }, - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "tags": tags.SchemaDataSource(), - }, - } -} - -func dataSourceArmBackupPolicyVmRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - - log.Printf("[DEBUG] Reading Recovery Service Policy %q (resource group %q)", name, resourceGroup) - - protectionPolicy, err := client.Get(ctx, vaultName, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(protectionPolicy.Response) { - return fmt.Errorf("Error: Backup Policy %q (Resource Group %q) was not found", name, resourceGroup) - } - - return fmt.Errorf("Error making Read request on Backup Policy %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - id := strings.Replace(*protectionPolicy.ID, "Subscriptions", "subscriptions", 1) - d.SetId(id) - - return tags.FlattenAndSet(d, protectionPolicy.Tags) -} diff --git a/azurerm/internal/services/recoveryservices/data_source_recovery_services_vault.go b/azurerm/internal/services/recoveryservices/data_source_recovery_services_vault.go deleted file mode 100644 index a4df80744156..000000000000 --- a/azurerm/internal/services/recoveryservices/data_source_recovery_services_vault.go +++ /dev/null @@ -1,73 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "log" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmRecoveryServicesVault() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmRecoveryServicesVaultRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - }, - - "location": azure.SchemaLocationForDataSource(), - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "tags": tags.SchemaDataSource(), - - "sku": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func dataSourceArmRecoveryServicesVaultRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.VaultsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - - log.Printf("[DEBUG] Reading Recovery Service Vault %q (resource group %q)", name, resourceGroup) - - vault, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(vault.Response) { - return fmt.Errorf("Error: Recovery Services Vault %q (Resource Group %q) was not found", name, resourceGroup) - } - - return fmt.Errorf("Error making Read request on Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - d.SetId(*vault.ID) - d.Set("name", vault.Name) - d.Set("location", azure.NormalizeLocation(*vault.Location)) - d.Set("resource_group_name", resourceGroup) - - if sku := vault.Sku; sku != nil { - d.Set("sku", string(sku.Name)) - } - - return tags.FlattenAndSet(d, vault.Tags) -} diff --git a/azurerm/internal/services/recoveryservices/recovery_services_vault_data_source.go b/azurerm/internal/services/recoveryservices/recovery_services_vault_data_source.go new file mode 100644 index 000000000000..651f76ac4dd5 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/recovery_services_vault_data_source.go @@ -0,0 +1,73 @@ +package recoveryservices + +import ( + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceRecoveryServicesVault() *schema.Resource { + return &schema.Resource{ + Read: dataSourceRecoveryServicesVaultRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "location": azure.SchemaLocationForDataSource(), + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "tags": tags.SchemaDataSource(), + + "sku": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceRecoveryServicesVaultRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.VaultsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + log.Printf("[DEBUG] Reading Recovery Service Vault %q (resource group %q)", name, resourceGroup) + + vault, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(vault.Response) { + return fmt.Errorf("Error: Recovery Services Vault %q (Resource Group %q) was not found", name, resourceGroup) + } + + return fmt.Errorf("Error making Read request on Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.SetId(*vault.ID) + d.Set("name", vault.Name) + d.Set("location", azure.NormalizeLocation(*vault.Location)) + d.Set("resource_group_name", resourceGroup) + + if sku := vault.Sku; sku != nil { + d.Set("sku", string(sku.Name)) + } + + return tags.FlattenAndSet(d, vault.Tags) +} diff --git a/azurerm/internal/services/recoveryservices/recovery_services_vault_data_source_test.go b/azurerm/internal/services/recoveryservices/recovery_services_vault_data_source_test.go new file mode 100644 index 000000000000..fec92980479d --- /dev/null +++ b/azurerm/internal/services/recoveryservices/recovery_services_vault_data_source_test.go @@ -0,0 +1,42 @@ +package recoveryservices_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type RecoveryServicesVaultDataSource struct { +} + +func TestAccDataSourceAzureRMRecoveryServicesVault_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_recovery_services_vault", "test") + r := RecoveryServicesVaultDataSource{} + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("sku").HasValue("Standard"), + ), + }, + }) +} + +func (RecoveryServicesVaultDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_recovery_services_vault" "test" { + name = azurerm_recovery_services_vault.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, RecoveryServicesVaultResource{}.basic(data)) +} diff --git a/azurerm/internal/services/recoveryservices/recovery_services_vault_resource.go b/azurerm/internal/services/recoveryservices/recovery_services_vault_resource.go new file mode 100644 index 000000000000..ac3c5d61e641 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/recovery_services_vault_resource.go @@ -0,0 +1,304 @@ +package recoveryservices + +import ( + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2016-06-01/recoveryservices" + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceRecoveryServicesVault() *schema.Resource { + return &schema.Resource{ + Create: resourceRecoveryServicesVaultCreateUpdate, + Read: resourceRecoveryServicesVaultRead, + Update: resourceRecoveryServicesVaultCreateUpdate, + Delete: resourceRecoveryServicesVaultDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "identity": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(recoveryservices.SystemAssigned), + }, false), + }, + + "principal_id": { + Type: schema.TypeString, + Computed: true, + }, + + "tenant_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "tags": tags.Schema(), + + "sku": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringInSlice([]string{ + string(recoveryservices.RS0), + string(recoveryservices.Standard), + }, true), + }, + + "soft_delete_enabled": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + }, + } +} + +func resourceRecoveryServicesVaultCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.VaultsClient + cfgsClient := meta.(*clients.Client).RecoveryServices.VaultsConfigsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + location := d.Get("location").(string) + resourceGroup := d.Get("resource_group_name").(string) + t := d.Get("tags").(map[string]interface{}) + + log.Printf("[DEBUG] Creating/updating Recovery Service Vault %q (resource group %q)", name, resourceGroup) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_recovery_services_vault", *existing.ID) + } + } + + vault := recoveryservices.Vault{ + Location: utils.String(location), + Tags: tags.Expand(t), + Identity: expandValutIdentity(d.Get("identity").([]interface{})), + Sku: &recoveryservices.Sku{ + Name: recoveryservices.SkuName(d.Get("sku").(string)), + }, + Properties: &recoveryservices.VaultProperties{}, + } + + vault, err := client.CreateOrUpdate(ctx, resourceGroup, name, vault) + if err != nil { + return fmt.Errorf("Error creating/updating Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + cfg := backup.ResourceVaultConfigResource{ + Properties: &backup.ResourceVaultConfig{ + EnhancedSecurityState: backup.EnhancedSecurityStateEnabled, // always enabled + }, + } + + if sd := d.Get("soft_delete_enabled").(bool); sd { + cfg.Properties.SoftDeleteFeatureState = backup.SoftDeleteFeatureStateEnabled + } else { + cfg.Properties.SoftDeleteFeatureState = backup.SoftDeleteFeatureStateDisabled + } + + stateConf := &resource.StateChangeConf{ + Pending: []string{"syncing"}, + Target: []string{"success"}, + MinTimeout: 30 * time.Second, + Refresh: func() (interface{}, string, error) { + resp, err := cfgsClient.Update(ctx, name, resourceGroup, cfg) + if err != nil { + if strings.Contains(err.Error(), "ResourceNotYetSynced") { + return resp, "syncing", nil + } + return resp, "error", fmt.Errorf("Error updating Recovery Service Vault Cfg %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + return resp, "success", nil + }, + } + + if d.IsNewResource() { + stateConf.Timeout = d.Timeout(schema.TimeoutCreate) + } else { + stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("Error waiting for on update for Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("Error issuing read request for Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("Error Recovery Service Vault %q (Resource Group %q): read returned nil", name, resourceGroup) + } + + d.SetId(*vault.ID) + + return resourceRecoveryServicesVaultRead(d, meta) +} + +func resourceRecoveryServicesVaultRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.VaultsClient + cfgsClient := meta.(*clients.Client).RecoveryServices.VaultsConfigsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + name := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + log.Printf("[DEBUG] Reading Recovery Service Vault %q (resource group %q)", name, resourceGroup) + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error making Read request on Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resourceGroup) + if location := resp.Location; location != nil { + d.Set("location", azure.NormalizeLocation(*location)) + } + + if sku := resp.Sku; sku != nil { + d.Set("sku", string(sku.Name)) + } + + cfg, err := cfgsClient.Get(ctx, name, resourceGroup) + if err != nil { + return fmt.Errorf("Error reading Recovery Service Vault Cfg %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if props := cfg.Properties; props != nil { + d.Set("soft_delete_enabled", props.SoftDeleteFeatureState == backup.SoftDeleteFeatureStateEnabled) + } + + if err := d.Set("identity", flattenVaultIdentity(resp.Identity)); err != nil { + return fmt.Errorf("Error setting `identity`: %+v", err) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceRecoveryServicesVaultDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).RecoveryServices.VaultsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + name := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + log.Printf("[DEBUG] Deleting Recovery Service Vault %q (resource group %q)", name, resourceGroup) + + resp, err := client.Delete(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("Error issuing delete request for Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + + return nil +} + +func expandValutIdentity(input []interface{}) *recoveryservices.IdentityData { + if len(input) == 0 { + return nil + } + + v := input[0].(map[string]interface{}) + return &recoveryservices.IdentityData{ + Type: recoveryservices.ResourceIdentityType(v["type"].(string)), + } +} + +func flattenVaultIdentity(input *recoveryservices.IdentityData) []interface{} { + if input == nil { + return []interface{}{} + } + + principalID := "" + if input.PrincipalID != nil { + principalID = *input.PrincipalID + } + + tenantID := "" + if input.TenantID != nil { + tenantID = *input.TenantID + } + + return []interface{}{ + map[string]interface{}{ + "type": string(input.Type), + "principal_id": principalID, + "tenant_id": tenantID, + }, + } +} diff --git a/azurerm/internal/services/recoveryservices/recovery_services_vault_resource_test.go b/azurerm/internal/services/recoveryservices/recovery_services_vault_resource_test.go new file mode 100644 index 000000000000..ffe3d70d3ee4 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/recovery_services_vault_resource_test.go @@ -0,0 +1,212 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type RecoveryServicesVaultResource struct { +} + +func TestAccRecoveryServicesVault_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") + r := RecoveryServicesVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRecoveryServicesVault_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") + r := RecoveryServicesVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRecoveryServicesVault_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") + r := RecoveryServicesVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRecoveryServicesVault_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") + r := RecoveryServicesVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("name").Exists(), + check.That(data.ResourceName).Key("location").Exists(), + check.That(data.ResourceName).Key("resource_group_name").Exists(), + check.That(data.ResourceName).Key("tags.%").HasValue("0"), + check.That(data.ResourceName).Key("sku").HasValue("Standard"), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccRecoveryServicesVault_basicWithIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") + r := RecoveryServicesVaultResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basicWithIdentity(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t RecoveryServicesVaultResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + name := id.Path["vaults"] + resourceGroup := id.ResourceGroup + + resp, err := clients.RecoveryServices.VaultsClient.Get(ctx, resourceGroup, name) + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Vault (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (RecoveryServicesVaultResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-Vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RecoveryServicesVaultResource) basicWithIdentity(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-Vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + identity { + type = "SystemAssigned" + } + + soft_delete_enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RecoveryServicesVaultResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-Vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r RecoveryServicesVaultResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_recovery_services_vault" "import" { + name = azurerm_recovery_services_vault.test.name + location = azurerm_recovery_services_vault.test.location + resource_group_name = azurerm_recovery_services_vault.test.resource_group_name + sku = azurerm_recovery_services_vault.test.sku +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/recoveryservices/registration.go b/azurerm/internal/services/recoveryservices/registration.go index 4ad4baf1c1ed..b2344add2531 100644 --- a/azurerm/internal/services/recoveryservices/registration.go +++ b/azurerm/internal/services/recoveryservices/registration.go @@ -21,25 +21,26 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_recovery_services_vault": dataSourceArmRecoveryServicesVault(), - "azurerm_backup_policy_vm": dataSourceArmBackupPolicyVm(), + "azurerm_recovery_services_vault": dataSourceRecoveryServicesVault(), + "azurerm_backup_policy_vm": dataSourceBackupPolicyVm(), } } +// todo - this package should probably be split into backup, recovery, and site recovery? // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_backup_container_storage_account": resourceArmBackupProtectionContainerStorageAccount(), - "azurerm_backup_policy_file_share": resourceArmBackupProtectionPolicyFileShare(), - "azurerm_backup_protected_file_share": resourceArmBackupProtectedFileShare(), - "azurerm_backup_protected_vm": resourceArmRecoveryServicesBackupProtectedVM(), - "azurerm_backup_policy_vm": resourceArmBackupProtectionPolicyVM(), - "azurerm_recovery_services_vault": resourceArmRecoveryServicesVault(), - "azurerm_site_recovery_fabric": resourceArmSiteRecoveryFabric(), - "azurerm_site_recovery_network_mapping": resourceArmSiteRecoveryNetworkMapping(), - "azurerm_site_recovery_protection_container": resourceArmSiteRecoveryProtectionContainer(), - "azurerm_site_recovery_protection_container_mapping": resourceArmSiteRecoveryProtectionContainerMapping(), - "azurerm_site_recovery_replicated_vm": resourceArmSiteRecoveryReplicatedVM(), - "azurerm_site_recovery_replication_policy": resourceArmSiteRecoveryReplicationPolicy(), + "azurerm_backup_container_storage_account": resourceBackupProtectionContainerStorageAccount(), + "azurerm_backup_policy_file_share": resourceBackupProtectionPolicyFileShare(), + "azurerm_backup_protected_file_share": resourceBackupProtectedFileShare(), + "azurerm_backup_protected_vm": resourceRecoveryServicesBackupProtectedVM(), + "azurerm_backup_policy_vm": resourceBackupProtectionPolicyVM(), + "azurerm_recovery_services_vault": resourceRecoveryServicesVault(), + "azurerm_site_recovery_fabric": resourceSiteRecoveryFabric(), + "azurerm_site_recovery_network_mapping": resourceSiteRecoveryNetworkMapping(), + "azurerm_site_recovery_protection_container": resourceSiteRecoveryProtectionContainer(), + "azurerm_site_recovery_protection_container_mapping": resourceSiteRecoveryProtectionContainerMapping(), + "azurerm_site_recovery_replicated_vm": resourceSiteRecoveryReplicatedVM(), + "azurerm_site_recovery_replication_policy": resourceSiteRecoveryReplicationPolicy(), } } diff --git a/azurerm/internal/services/recoveryservices/resource_arm_backup_container_storage_account.go b/azurerm/internal/services/recoveryservices/resource_arm_backup_container_storage_account.go deleted file mode 100644 index d6088fad2b80..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_backup_container_storage_account.go +++ /dev/null @@ -1,247 +0,0 @@ -package recoveryservices - -import ( - "context" - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmBackupProtectionContainerStorageAccount() *schema.Resource { - return &schema.Resource{ - Create: resourceArmBackupProtectionContainerStorageAccountCreate, - Read: resourceArmBackupProtectionContainerStorageAccountRead, - Update: nil, - Delete: resourceArmBackupProtectionContainerStorageAccountDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "storage_account_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - }, - }, - } -} - -func resourceArmBackupProtectionContainerStorageAccountCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.BackupProtectionContainersClient - opStatusClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - storageAccountID := d.Get("storage_account_id").(string) - - parsedStorageAccountID, err := azure.ParseAzureResourceID(storageAccountID) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse storage_account_id '%s': %+v", storageAccountID, err) - } - accountName, hasName := parsedStorageAccountID.Path["storageAccounts"] - if !hasName { - return fmt.Errorf("[ERROR] parsed storage_account_id '%s' doesn't contain 'storageAccounts'", storageAccountID) - } - - containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageAccountID.ResourceGroup, accountName) - - if d.IsNewResource() { - existing, err := client.Get(ctx, vaultName, resGroup, "Azure", containerName) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing recovery services protection container %s (Vault %s): %+v", containerName, vaultName, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_backup_protection_container_storage", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - parameters := backup.ProtectionContainerResource{ - Properties: &backup.AzureStorageContainer{ - SourceResourceID: &storageAccountID, - FriendlyName: &accountName, - BackupManagementType: backup.ManagementTypeAzureStorage, - ContainerType: backup.ContainerTypeStorageContainer1, - }, - } - - resp, err := client.Register(ctx, vaultName, resGroup, "Azure", containerName, parameters) - if err != nil { - return fmt.Errorf("Error registering backup protection container %s (Vault %s): %+v", containerName, vaultName, err) - } - - locationURL, err := resp.Response.Location() // Operation ID found in the Location header - if locationURL == nil || err != nil { - return fmt.Errorf("Unable to determine operation URL for protection container registration status for %s. (Vault %s): Location header missing or empty", containerName, vaultName) - } - - opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) - - parsedLocation, err := azure.ParseAzureResourceID(opResourceID) - if err != nil { - return err - } - - operationID := parsedLocation.Path["operationResults"] - if _, err = resourceArmBackupProtectionContainerStorageAccountWaitForOperation(ctx, opStatusClient, vaultName, resGroup, operationID, d); err != nil { - return err - } - - resp, err = client.Get(ctx, vaultName, resGroup, "Azure", containerName) - if err != nil { - return fmt.Errorf("Error retrieving site recovery protection container %s (Vault %s): %+v", containerName, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmBackupProtectionContainerStorageAccountRead(d, meta) -} - -func resourceArmBackupProtectionContainerStorageAccountRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["backupFabrics"] - containerName := id.Path["protectionContainers"] - - client := meta.(*clients.Client).RecoveryServices.BackupProtectionContainersClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, vaultName, resGroup, fabricName, containerName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request on backup protection container %s (Vault %s): %+v", containerName, vaultName, err) - } - - d.Set("resource_group_name", resGroup) - d.Set("recovery_vault_name", vaultName) - - if properties, ok := resp.Properties.AsAzureStorageContainer(); ok && properties != nil { - d.Set("storage_account_id", properties.SourceResourceID) - } - - return nil -} - -func resourceArmBackupProtectionContainerStorageAccountDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["backupFabrics"] - containerName := id.Path["protectionContainers"] - - client := meta.(*clients.Client).RecoveryServices.BackupProtectionContainersClient - opClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Unregister(ctx, vaultName, resGroup, fabricName, containerName) - if err != nil { - return fmt.Errorf("Error deregistering backup protection container %s (Vault %s): %+v", containerName, vaultName, err) - } - - locationURL, err := resp.Response.Location() - if err != nil || locationURL == nil { - return fmt.Errorf("Error unregistering backup protection container %s (Vault %s): Location header missing or empty", containerName, vaultName) - } - - opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) - - parsedLocation, err := azure.ParseAzureResourceID(opResourceID) - if err != nil { - return err - } - operationID := parsedLocation.Path["backupOperationResults"] - - if _, err = resourceArmBackupProtectionContainerStorageAccountWaitForOperation(ctx, opClient, vaultName, resGroup, operationID, d); err != nil { - return err - } - - return nil -} - -// nolint unused - linter mistakenly things this function isn't used? -func resourceArmBackupProtectionContainerStorageAccountWaitForOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string, d *schema.ResourceData) (backup.OperationStatus, error) { - state := &resource.StateChangeConf{ - MinTimeout: 10 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"InProgress"}, - Target: []string{"Succeeded"}, - Refresh: resourceArmBackupProtectionContainerStorageAccountCheckOperation(ctx, client, vaultName, resourceGroup, operationID), - ContinuousTargetOccurence: 5, // Without this buffer, file share backups and storage account deletions may fail if performed immediately after creating/destroying the container - } - - if d.IsNewResource() { - state.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - state.Timeout = d.Timeout(schema.TimeoutUpdate) - } - - log.Printf("[DEBUG] Waiting for backup container operation %q (Vault %q) to complete", operationID, vaultName) - resp, err := state.WaitForState() - if err != nil { - return resp.(backup.OperationStatus), err - } - return resp.(backup.OperationStatus), nil -} - -func resourceArmBackupProtectionContainerStorageAccountCheckOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - resp, err := client.Get(ctx, vaultName, resourceGroup, operationID) - if err != nil { - return resp, "Error", fmt.Errorf("Error making Read request on Recovery Service Protection Container operation %q (Vault %q in Resource Group %q): %+v", operationID, vaultName, resourceGroup, err) - } - - if opErr := resp.Error; opErr != nil { - errMsg := "No upstream error message" - if opErr.Message != nil { - errMsg = *opErr.Message - } - err = fmt.Errorf("Recovery Service Protection Container operation status failed with status %q (Vault %q Resource Group %q Operation ID %q): %+v", resp.Status, vaultName, resourceGroup, operationID, errMsg) - } - - return resp, string(resp.Status), err - } -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_backup_policy_file_share.go b/azurerm/internal/services/recoveryservices/resource_arm_backup_policy_file_share.go deleted file mode 100644 index b0ce697c7926..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_backup_policy_file_share.go +++ /dev/null @@ -1,370 +0,0 @@ -package recoveryservices - -import ( - "context" - "fmt" - "log" - "regexp" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" - "github.com/Azure/go-autorest/autorest/date" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmBackupProtectionPolicyFileShare() *schema.Resource { - return &schema.Resource{ - Create: resourceArmBackupProtectionPolicyFileShareCreateUpdate, - Read: resourceArmBackupProtectionPolicyFileShareRead, - Update: resourceArmBackupProtectionPolicyFileShareCreateUpdate, - Delete: resourceArmBackupProtectionPolicyFileShareDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile("^[a-zA-Z][-_!a-zA-Z0-9]{2,149}$"), - "Backup Policy name must be 3 - 150 characters long, start with a letter, contain only letters and numbers.", - ), - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - - "timezone": { - Type: schema.TypeString, - Optional: true, - Default: "UTC", - }, - - "backup": { - Type: schema.TypeList, - MaxItems: 1, - Required: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - - "frequency": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(backup.ScheduleRunTypeDaily), - }, false), - }, - - "time": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile("^([01][0-9]|[2][0-3]):([03][0])$"), // time must be on the hour or half past - "Time of day must match the format HH:mm where HH is 00-23 and mm is 00 or 30", - ), - }, - }, - }, - }, - - "retention_daily": { - Type: schema.TypeList, - MaxItems: 1, - Required: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "count": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 180), - }, - }, - }, - }, - }, - } -} - -func resourceArmBackupProtectionPolicyFileShareCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - policyName := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - - log.Printf("[DEBUG] Creating/updating Recovery Service Protection Policy %s (resource group %q)", policyName, resourceGroup) - - // getting this ready now because its shared between *everything*, time is... complicated for this resource - timeOfDay := d.Get("backup.0.time").(string) - dateOfDay, err := time.Parse(time.RFC3339, fmt.Sprintf("2018-07-30T%s:00Z", timeOfDay)) - if err != nil { - return fmt.Errorf("Error generating time from %q for policy %q (Resource Group %q): %+v", timeOfDay, policyName, resourceGroup, err) - } - times := append(make([]date.Time, 0), date.Time{Time: dateOfDay}) - - if d.IsNewResource() { - existing, err2 := client.Get(ctx, vaultName, resourceGroup, policyName) - if err2 != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err2) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_backup_policy_file_share", *existing.ID) - } - } - - policy := backup.ProtectionPolicyResource{ - Properties: &backup.AzureFileShareProtectionPolicy{ - TimeZone: utils.String(d.Get("timezone").(string)), - BackupManagementType: backup.BackupManagementTypeAzureStorage, - WorkLoadType: backup.WorkloadTypeAzureFileShare, - SchedulePolicy: expandArmBackupProtectionPolicyFileShareSchedule(d, times), - RetentionPolicy: &backup.LongTermRetentionPolicy{ // SimpleRetentionPolicy only has duration property ¯\_(ツ)_/¯ - RetentionPolicyType: backup.RetentionPolicyTypeLongTermRetentionPolicy, - DailySchedule: expandArmBackupProtectionPolicyFileShareRetentionDaily(d, times), - }, - }, - } - if _, err = client.CreateOrUpdate(ctx, vaultName, resourceGroup, policyName, policy); err != nil { - return fmt.Errorf("Error creating/updating Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - resp, err := resourceArmBackupProtectionPolicyFileShareWaitForUpdate(ctx, client, vaultName, resourceGroup, policyName, d) - if err != nil { - return err - } - - id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) - d.SetId(id) - - return resourceArmBackupProtectionPolicyFileShareRead(d, meta) -} - -func resourceArmBackupProtectionPolicyFileShareRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - policyName := id.Path["backupPolicies"] - vaultName := id.Path["vaults"] - resourceGroup := id.ResourceGroup - - log.Printf("[DEBUG] Reading Recovery Service Protection Policy %q (resource group %q)", policyName, resourceGroup) - - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - d.Set("name", policyName) - d.Set("resource_group_name", resourceGroup) - d.Set("recovery_vault_name", vaultName) - - if properties, ok := resp.Properties.AsAzureFileShareProtectionPolicy(); ok && properties != nil { - d.Set("timezone", properties.TimeZone) - - if schedule, ok := properties.SchedulePolicy.AsSimpleSchedulePolicy(); ok && schedule != nil { - if err := d.Set("backup", flattenArmBackupProtectionPolicyFileShareSchedule(schedule)); err != nil { - return fmt.Errorf("Error setting `backup`: %+v", err) - } - } - - if retention, ok := properties.RetentionPolicy.AsLongTermRetentionPolicy(); ok && retention != nil { - if s := retention.DailySchedule; s != nil { - if err := d.Set("retention_daily", flattenArmBackupProtectionPolicyFileShareRetentionDaily(s)); err != nil { - return fmt.Errorf("Error setting `retention_daily`: %+v", err) - } - } else { - d.Set("retention_daily", nil) - } - } - } - - return nil -} - -func resourceArmBackupProtectionPolicyFileShareDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - policyName := id.Path["backupPolicies"] - resourceGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - - log.Printf("[DEBUG] Deleting Recovery Service Protection Policy %q (resource group %q)", policyName, resourceGroup) - - resp, err := client.Delete(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing delete request for Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - } - - if _, err := resourceArmBackupProtectionPolicyFileShareWaitForDeletion(ctx, client, vaultName, resourceGroup, policyName, d); err != nil { - return err - } - - return nil -} - -func expandArmBackupProtectionPolicyFileShareSchedule(d *schema.ResourceData, times []date.Time) *backup.SimpleSchedulePolicy { - if bb, ok := d.Get("backup").([]interface{}); ok && len(bb) > 0 { - block := bb[0].(map[string]interface{}) - - schedule := backup.SimpleSchedulePolicy{ // LongTermSchedulePolicy has no properties - SchedulePolicyType: backup.SchedulePolicyTypeSimpleSchedulePolicy, - ScheduleRunTimes: ×, - } - - if v, ok := block["frequency"].(string); ok { - schedule.ScheduleRunFrequency = backup.ScheduleRunType(v) - } - - return &schedule - } - - return nil -} - -func expandArmBackupProtectionPolicyFileShareRetentionDaily(d *schema.ResourceData, times []date.Time) *backup.DailyRetentionSchedule { - if rb, ok := d.Get("retention_daily").([]interface{}); ok && len(rb) > 0 { - block := rb[0].(map[string]interface{}) - - return &backup.DailyRetentionSchedule{ - RetentionTimes: ×, - RetentionDuration: &backup.RetentionDuration{ - Count: utils.Int32(int32(block["count"].(int))), - DurationType: backup.RetentionDurationTypeDays, - }, - } - } - - return nil -} - -func flattenArmBackupProtectionPolicyFileShareSchedule(schedule *backup.SimpleSchedulePolicy) []interface{} { - block := map[string]interface{}{} - - block["frequency"] = string(schedule.ScheduleRunFrequency) - - if times := schedule.ScheduleRunTimes; times != nil && len(*times) > 0 { - block["time"] = (*times)[0].Format("15:04") - } - - return []interface{}{block} -} - -func flattenArmBackupProtectionPolicyFileShareRetentionDaily(daily *backup.DailyRetentionSchedule) []interface{} { - block := map[string]interface{}{} - - if duration := daily.RetentionDuration; duration != nil { - if v := duration.Count; v != nil { - block["count"] = *v - } - } - - return []interface{}{block} -} - -func resourceArmBackupProtectionPolicyFileShareWaitForUpdate(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { - state := &resource.StateChangeConf{ - MinTimeout: 30 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"NotFound"}, - Target: []string{"Found"}, - Refresh: resourceArmBackupProtectionPolicyFileShareRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), - } - - if d.IsNewResource() { - state.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - state.Timeout = d.Timeout(schema.TimeoutUpdate) - } - - resp, err := state.WaitForState() - if err != nil { - return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Recovery Service Protection Policy %q to update (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - return resp.(backup.ProtectionPolicyResource), nil -} - -func resourceArmBackupProtectionPolicyFileShareWaitForDeletion(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { - state := &resource.StateChangeConf{ - MinTimeout: 30 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"Found"}, - Target: []string{"NotFound"}, - Refresh: resourceArmBackupProtectionPolicyFileShareRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), - Timeout: d.Timeout(schema.TimeoutDelete), - } - - resp, err := state.WaitForState() - if err != nil { - return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Recovery Service Protection Policy %q to be missing (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - return resp.(backup.ProtectionPolicyResource), nil -} - -func resourceArmBackupProtectionPolicyFileShareRefreshFunc(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return resp, "NotFound", nil - } - - return resp, "Error", fmt.Errorf("Error making Read request on Recovery Service Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - return resp, "Found", nil - } -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_backup_policy_vm.go b/azurerm/internal/services/recoveryservices/resource_arm_backup_policy_vm.go deleted file mode 100644 index c482b75a58bf..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_backup_policy_vm.go +++ /dev/null @@ -1,766 +0,0 @@ -package recoveryservices - -import ( - "context" - "fmt" - "log" - "regexp" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" - "github.com/Azure/go-autorest/autorest/date" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/set" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmBackupProtectionPolicyVM() *schema.Resource { - return &schema.Resource{ - Create: resourceArmBackupProtectionPolicyVMCreateUpdate, - Read: resourceArmBackupProtectionPolicyVMRead, - Update: resourceArmBackupProtectionPolicyVMCreateUpdate, - Delete: resourceArmBackupProtectionPolicyVMDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile("^[a-zA-Z][-_!a-zA-Z0-9]{2,149}$"), - "Backup Policy name must be 3 - 150 characters long, start with a letter, contain only letters and numbers.", - ), - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - - "timezone": { - Type: schema.TypeString, - Optional: true, - Default: "UTC", - }, - - "backup": { - Type: schema.TypeList, - MaxItems: 1, - Required: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - - "frequency": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(backup.ScheduleRunTypeDaily), - string(backup.ScheduleRunTypeWeekly), - }, true), - }, - - "time": { // applies to all backup schedules & retention times (they all must be the same) - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringMatch( - regexp.MustCompile("^([01][0-9]|[2][0-3]):([03][0])$"), // time must be on the hour or half past - "Time of day must match the format HH:mm where HH is 00-23 and mm is 00 or 30", - ), - }, - - "weekdays": { // only for weekly - Type: schema.TypeSet, - Optional: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.IsDayOfTheWeek(true), - }, - }, - }, - }, - }, - - "retention_daily": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "count": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 9999), // Azure no longer supports less than 7 daily backups. This should be updated in 3.0 provider - - }, - }, - }, - }, - - "retention_weekly": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "count": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 9999), - }, - - "weekdays": { - Type: schema.TypeSet, - Required: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.IsDayOfTheWeek(true), - }, - }, - }, - }, - }, - - "retention_monthly": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "count": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 9999), - }, - - "weeks": { - Type: schema.TypeSet, - Required: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(backup.WeekOfMonthFirst), - string(backup.WeekOfMonthSecond), - string(backup.WeekOfMonthThird), - string(backup.WeekOfMonthFourth), - string(backup.WeekOfMonthLast), - }, true), - }, - }, - - "weekdays": { - Type: schema.TypeSet, - Required: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.IsDayOfTheWeek(true), - }, - }, - }, - }, - }, - - "retention_yearly": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "count": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, 9999), - }, - - "months": { - Type: schema.TypeSet, - Required: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.IsMonth(true), - }, - }, - - "weeks": { - Type: schema.TypeSet, - Required: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(backup.WeekOfMonthFirst), - string(backup.WeekOfMonthSecond), - string(backup.WeekOfMonthThird), - string(backup.WeekOfMonthFourth), - string(backup.WeekOfMonthLast), - }, true), - }, - }, - - "weekdays": { - Type: schema.TypeSet, - Required: true, - Set: set.HashStringIgnoreCase, - Elem: &schema.Schema{ - Type: schema.TypeString, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.IsDayOfTheWeek(true), - }, - }, - }, - }, - }, - - "tags": tags.Schema(), - }, - - // if daily, we need daily retention - // if weekly daily cannot be set, and we need weekly - CustomizeDiff: func(diff *schema.ResourceDiff, v interface{}) error { - _, hasDaily := diff.GetOk("retention_daily") - _, hasWeekly := diff.GetOk("retention_weekly") - - frequencyI, _ := diff.GetOk("backup.0.frequency") - switch strings.ToLower(frequencyI.(string)) { - case "daily": - if !hasDaily { - return fmt.Errorf("`retention_daily` must be set when backup.0.frequency is daily") - } - - if _, ok := diff.GetOk("backup.0.weekdays"); ok { - return fmt.Errorf("`backup.0.weekdays` should be not set when backup.0.frequency is daily") - } - case "weekly": - if hasDaily { - return fmt.Errorf("`retention_daily` must be not set when backup.0.frequency is weekly") - } - if !hasWeekly { - return fmt.Errorf("`retention_weekly` must be set when backup.0.frequency is weekly") - } - default: - return fmt.Errorf("Unrecognized value for backup.0.frequency") - } - return nil - }, - } -} - -func resourceArmBackupProtectionPolicyVMCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - policyName := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - t := d.Get("tags").(map[string]interface{}) - - log.Printf("[DEBUG] Creating/updating Azure Backup Protection Policy %s (resource group %q)", policyName, resourceGroup) - - // getting this ready now because its shared between *everything*, time is... complicated for this resource - timeOfDay := d.Get("backup.0.time").(string) - dateOfDay, err := time.Parse(time.RFC3339, fmt.Sprintf("2018-07-30T%s:00Z", timeOfDay)) - if err != nil { - return fmt.Errorf("Error generating time from %q for policy %q (Resource Group %q): %+v", timeOfDay, policyName, resourceGroup, err) - } - times := append(make([]date.Time, 0), date.Time{Time: dateOfDay}) - - if d.IsNewResource() { - existing, err2 := client.Get(ctx, vaultName, resourceGroup, policyName) - if err2 != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err2) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_backup_policy_vm", *existing.ID) - } - } - - // Less than 7 daily backups is no longer supported for create/update - if (d.IsNewResource() || d.HasChange("retention_daily.0.count")) && (d.Get("retention_daily.0.count").(int) > 1 && d.Get("retention_daily.0.count").(int) < 7) { - return fmt.Errorf("The Azure API has recently changed behaviour so that provisioning a `count` for the `retention_daily` field can no longer be less than 7 days for new/updates to existing Backup Policies. Please ensure that `count` is less than 7, currently %d", d.Get("retention_daily.0.count").(int)) - } - - policy := backup.ProtectionPolicyResource{ - Tags: tags.Expand(t), - Properties: &backup.AzureIaaSVMProtectionPolicy{ - TimeZone: utils.String(d.Get("timezone").(string)), - BackupManagementType: backup.BackupManagementTypeAzureIaasVM, - SchedulePolicy: expandArmBackupProtectionPolicyVMSchedule(d, times), - RetentionPolicy: &backup.LongTermRetentionPolicy{ // SimpleRetentionPolicy only has duration property ¯\_(ツ)_/¯ - RetentionPolicyType: backup.RetentionPolicyTypeLongTermRetentionPolicy, - DailySchedule: expandArmBackupProtectionPolicyVMRetentionDaily(d, times), - WeeklySchedule: expandArmBackupProtectionPolicyVMRetentionWeekly(d, times), - MonthlySchedule: expandArmBackupProtectionPolicyVMRetentionMonthly(d, times), - YearlySchedule: expandArmBackupProtectionPolicyVMRetentionYearly(d, times), - }, - }, - } - if _, err = client.CreateOrUpdate(ctx, vaultName, resourceGroup, policyName, policy); err != nil { - return fmt.Errorf("Error creating/updating Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - resp, err := resourceArmBackupProtectionPolicyVMWaitForUpdate(ctx, client, vaultName, resourceGroup, policyName, d) - if err != nil { - return err - } - - id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) - d.SetId(id) - - return resourceArmBackupProtectionPolicyVMRead(d, meta) -} - -func resourceArmBackupProtectionPolicyVMRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - policyName := id.Path["backupPolicies"] - vaultName := id.Path["vaults"] - resourceGroup := id.ResourceGroup - - log.Printf("[DEBUG] Reading Azure Backup Protection Policy %q (resource group %q)", policyName, resourceGroup) - - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - d.Set("name", policyName) - d.Set("resource_group_name", resourceGroup) - d.Set("recovery_vault_name", vaultName) - - if properties, ok := resp.Properties.AsAzureIaaSVMProtectionPolicy(); ok && properties != nil { - d.Set("timezone", properties.TimeZone) - - if schedule, ok := properties.SchedulePolicy.AsSimpleSchedulePolicy(); ok && schedule != nil { - if err := d.Set("backup", flattenArmBackupProtectionPolicyVMSchedule(schedule)); err != nil { - return fmt.Errorf("Error setting `backup`: %+v", err) - } - } - - if retention, ok := properties.RetentionPolicy.AsLongTermRetentionPolicy(); ok && retention != nil { - if s := retention.DailySchedule; s != nil { - if err := d.Set("retention_daily", flattenArmBackupProtectionPolicyVMRetentionDaily(s)); err != nil { - return fmt.Errorf("Error setting `retention_daily`: %+v", err) - } - } else { - d.Set("retention_daily", nil) - } - - if s := retention.WeeklySchedule; s != nil { - if err := d.Set("retention_weekly", flattenArmBackupProtectionPolicyVMRetentionWeekly(s)); err != nil { - return fmt.Errorf("Error setting `retention_weekly`: %+v", err) - } - } else { - d.Set("retention_weekly", nil) - } - - if s := retention.MonthlySchedule; s != nil { - if err := d.Set("retention_monthly", flattenArmBackupProtectionPolicyVMRetentionMonthly(s)); err != nil { - return fmt.Errorf("Error setting `retention_monthly`: %+v", err) - } - } else { - d.Set("retention_monthly", nil) - } - - if s := retention.YearlySchedule; s != nil { - if err := d.Set("retention_yearly", flattenArmBackupProtectionPolicyVMRetentionYearly(s)); err != nil { - return fmt.Errorf("Error setting `retention_yearly`: %+v", err) - } - } else { - d.Set("retention_yearly", nil) - } - } - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmBackupProtectionPolicyVMDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - policyName := id.Path["backupPolicies"] - resourceGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - - log.Printf("[DEBUG] Deleting Azure Backup Protected Item %q (resource group %q)", policyName, resourceGroup) - - resp, err := client.Delete(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing delete request for Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - } - - if _, err := resourceArmBackupProtectionPolicyVMWaitForDeletion(ctx, client, vaultName, resourceGroup, policyName, d); err != nil { - return err - } - - return nil -} - -func expandArmBackupProtectionPolicyVMSchedule(d *schema.ResourceData, times []date.Time) *backup.SimpleSchedulePolicy { - if bb, ok := d.Get("backup").([]interface{}); ok && len(bb) > 0 { - block := bb[0].(map[string]interface{}) - - schedule := backup.SimpleSchedulePolicy{ // LongTermSchedulePolicy has no properties - SchedulePolicyType: backup.SchedulePolicyTypeSimpleSchedulePolicy, - ScheduleRunTimes: ×, - } - - if v, ok := block["frequency"].(string); ok { - schedule.ScheduleRunFrequency = backup.ScheduleRunType(v) - } - - if v, ok := block["weekdays"].(*schema.Set); ok { - days := make([]backup.DayOfWeek, 0) - for _, day := range v.List() { - days = append(days, backup.DayOfWeek(day.(string))) - } - schedule.ScheduleRunDays = &days - } - - return &schedule - } - - return nil -} - -func expandArmBackupProtectionPolicyVMRetentionDaily(d *schema.ResourceData, times []date.Time) *backup.DailyRetentionSchedule { - if rb, ok := d.Get("retention_daily").([]interface{}); ok && len(rb) > 0 { - block := rb[0].(map[string]interface{}) - - return &backup.DailyRetentionSchedule{ - RetentionTimes: ×, - RetentionDuration: &backup.RetentionDuration{ - Count: utils.Int32(int32(block["count"].(int))), - DurationType: backup.RetentionDurationTypeDays, - }, - } - } - - return nil -} - -func expandArmBackupProtectionPolicyVMRetentionWeekly(d *schema.ResourceData, times []date.Time) *backup.WeeklyRetentionSchedule { - if rb, ok := d.Get("retention_weekly").([]interface{}); ok && len(rb) > 0 { - block := rb[0].(map[string]interface{}) - - retention := backup.WeeklyRetentionSchedule{ - RetentionTimes: ×, - RetentionDuration: &backup.RetentionDuration{ - Count: utils.Int32(int32(block["count"].(int))), - DurationType: backup.RetentionDurationTypeWeeks, - }, - } - - if v, ok := block["weekdays"].(*schema.Set); ok { - days := make([]backup.DayOfWeek, 0) - for _, day := range v.List() { - days = append(days, backup.DayOfWeek(day.(string))) - } - retention.DaysOfTheWeek = &days - } - - return &retention - } - - return nil -} - -func expandArmBackupProtectionPolicyVMRetentionMonthly(d *schema.ResourceData, times []date.Time) *backup.MonthlyRetentionSchedule { - if rb, ok := d.Get("retention_monthly").([]interface{}); ok && len(rb) > 0 { - block := rb[0].(map[string]interface{}) - - retention := backup.MonthlyRetentionSchedule{ - RetentionScheduleFormatType: backup.RetentionScheduleFormatWeekly, // this is always weekly ¯\_(ツ)_/¯ - RetentionScheduleDaily: nil, // and this is always nil.. - RetentionScheduleWeekly: expandArmBackupProtectionPolicyVMRetentionWeeklyFormat(block), - RetentionTimes: ×, - RetentionDuration: &backup.RetentionDuration{ - Count: utils.Int32(int32(block["count"].(int))), - DurationType: backup.RetentionDurationTypeMonths, - }, - } - - return &retention - } - - return nil -} - -func expandArmBackupProtectionPolicyVMRetentionYearly(d *schema.ResourceData, times []date.Time) *backup.YearlyRetentionSchedule { - if rb, ok := d.Get("retention_yearly").([]interface{}); ok && len(rb) > 0 { - block := rb[0].(map[string]interface{}) - - retention := backup.YearlyRetentionSchedule{ - RetentionScheduleFormatType: backup.RetentionScheduleFormatWeekly, // this is always weekly ¯\_(ツ)_/¯ - RetentionScheduleDaily: nil, // and this is always nil.. - RetentionScheduleWeekly: expandArmBackupProtectionPolicyVMRetentionWeeklyFormat(block), - RetentionTimes: ×, - RetentionDuration: &backup.RetentionDuration{ - Count: utils.Int32(int32(block["count"].(int))), - DurationType: backup.RetentionDurationTypeYears, - }, - } - - if v, ok := block["months"].(*schema.Set); ok { - months := make([]backup.MonthOfYear, 0) - for _, month := range v.List() { - months = append(months, backup.MonthOfYear(month.(string))) - } - retention.MonthsOfYear = &months - } - - return &retention - } - - return nil -} - -func expandArmBackupProtectionPolicyVMRetentionWeeklyFormat(block map[string]interface{}) *backup.WeeklyRetentionFormat { - weekly := backup.WeeklyRetentionFormat{} - - if v, ok := block["weekdays"].(*schema.Set); ok { - days := make([]backup.DayOfWeek, 0) - for _, day := range v.List() { - days = append(days, backup.DayOfWeek(day.(string))) - } - weekly.DaysOfTheWeek = &days - } - - if v, ok := block["weeks"].(*schema.Set); ok { - weeks := make([]backup.WeekOfMonth, 0) - for _, week := range v.List() { - weeks = append(weeks, backup.WeekOfMonth(week.(string))) - } - weekly.WeeksOfTheMonth = &weeks - } - - return &weekly -} - -func flattenArmBackupProtectionPolicyVMSchedule(schedule *backup.SimpleSchedulePolicy) []interface{} { - block := map[string]interface{}{} - - block["frequency"] = string(schedule.ScheduleRunFrequency) - - if times := schedule.ScheduleRunTimes; times != nil && len(*times) > 0 { - block["time"] = (*times)[0].Format("15:04") - } - - if days := schedule.ScheduleRunDays; days != nil { - weekdays := make([]interface{}, 0) - for _, d := range *days { - weekdays = append(weekdays, string(d)) - } - block["weekdays"] = schema.NewSet(schema.HashString, weekdays) - } - - return []interface{}{block} -} - -func flattenArmBackupProtectionPolicyVMRetentionDaily(daily *backup.DailyRetentionSchedule) []interface{} { - block := map[string]interface{}{} - - if duration := daily.RetentionDuration; duration != nil { - if v := duration.Count; v != nil { - block["count"] = *v - } - } - - return []interface{}{block} -} - -func flattenArmBackupProtectionPolicyVMRetentionWeekly(weekly *backup.WeeklyRetentionSchedule) []interface{} { - block := map[string]interface{}{} - - if duration := weekly.RetentionDuration; duration != nil { - if v := duration.Count; v != nil { - block["count"] = *v - } - } - - if days := weekly.DaysOfTheWeek; days != nil { - weekdays := make([]interface{}, 0) - for _, d := range *days { - weekdays = append(weekdays, string(d)) - } - block["weekdays"] = schema.NewSet(schema.HashString, weekdays) - } - - return []interface{}{block} -} - -func flattenArmBackupProtectionPolicyVMRetentionMonthly(monthly *backup.MonthlyRetentionSchedule) []interface{} { - block := map[string]interface{}{} - - if duration := monthly.RetentionDuration; duration != nil { - if v := duration.Count; v != nil { - block["count"] = *v - } - } - - if weekly := monthly.RetentionScheduleWeekly; weekly != nil { - block["weekdays"], block["weeks"] = flattenArmBackupProtectionPolicyVMRetentionWeeklyFormat(weekly) - } - - return []interface{}{block} -} - -func flattenArmBackupProtectionPolicyVMRetentionYearly(yearly *backup.YearlyRetentionSchedule) []interface{} { - block := map[string]interface{}{} - - if duration := yearly.RetentionDuration; duration != nil { - if v := duration.Count; v != nil { - block["count"] = *v - } - } - - if weekly := yearly.RetentionScheduleWeekly; weekly != nil { - block["weekdays"], block["weeks"] = flattenArmBackupProtectionPolicyVMRetentionWeeklyFormat(weekly) - } - - if months := yearly.MonthsOfYear; months != nil { - slice := make([]interface{}, 0) - for _, d := range *months { - slice = append(slice, string(d)) - } - block["months"] = schema.NewSet(schema.HashString, slice) - } - - return []interface{}{block} -} - -func flattenArmBackupProtectionPolicyVMRetentionWeeklyFormat(retention *backup.WeeklyRetentionFormat) (weekdays, weeks *schema.Set) { - if days := retention.DaysOfTheWeek; days != nil { - slice := make([]interface{}, 0) - for _, d := range *days { - slice = append(slice, string(d)) - } - weekdays = schema.NewSet(schema.HashString, slice) - } - - if days := retention.WeeksOfTheMonth; days != nil { - slice := make([]interface{}, 0) - for _, d := range *days { - slice = append(slice, string(d)) - } - weeks = schema.NewSet(schema.HashString, slice) - } - - return weekdays, weeks -} - -func resourceArmBackupProtectionPolicyVMWaitForUpdate(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { - state := &resource.StateChangeConf{ - MinTimeout: 30 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"NotFound"}, - Target: []string{"Found"}, - Refresh: resourceArmBackupProtectionPolicyVMRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), - } - - if d.IsNewResource() { - state.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - state.Timeout = d.Timeout(schema.TimeoutUpdate) - } - - resp, err := state.WaitForState() - if err != nil { - return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Azure Backup Protection Policy %q to be true (Resource Group %q) to provision: %+v", policyName, resourceGroup, err) - } - - return resp.(backup.ProtectionPolicyResource), nil -} - -func resourceArmBackupProtectionPolicyVMWaitForDeletion(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string, d *schema.ResourceData) (backup.ProtectionPolicyResource, error) { - state := &resource.StateChangeConf{ - MinTimeout: 30 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"Found"}, - Target: []string{"NotFound"}, - Refresh: resourceArmBackupProtectionPolicyVMRefreshFunc(ctx, client, vaultName, resourceGroup, policyName), - Timeout: d.Timeout(schema.TimeoutDelete), - } - - resp, err := state.WaitForState() - if err != nil { - return resp.(backup.ProtectionPolicyResource), fmt.Errorf("Error waiting for the Azure Backup Protection Policy %q to be false (Resource Group %q) to provision: %+v", policyName, resourceGroup, err) - } - - return resp.(backup.ProtectionPolicyResource), nil -} - -func resourceArmBackupProtectionPolicyVMRefreshFunc(ctx context.Context, client *backup.ProtectionPoliciesClient, vaultName, resourceGroup, policyName string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return resp, "NotFound", nil - } - - return resp, "Error", fmt.Errorf("Error making Read request on Azure Backup Protection Policy %q (Resource Group %q): %+v", policyName, resourceGroup, err) - } - - return resp, "Found", nil - } -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_backup_protected_file_share.go b/azurerm/internal/services/recoveryservices/resource_arm_backup_protected_file_share.go deleted file mode 100644 index 4ce7ae3834c8..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_backup_protected_file_share.go +++ /dev/null @@ -1,290 +0,0 @@ -package recoveryservices - -import ( - "context" - "fmt" - "log" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmBackupProtectedFileShare() *schema.Resource { - return &schema.Resource{ - Create: resourceArmBackupProtectedFileShareCreateUpdate, - Read: resourceArmBackupProtectedFileShareRead, - Update: resourceArmBackupProtectedFileShareCreateUpdate, - Delete: resourceArmBackupProtectedFileShareDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(80 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(80 * time.Minute), - Delete: schema.DefaultTimeout(80 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - - "source_storage_account_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - }, - - "source_file_share_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: storage.ValidateArmStorageShareName, - }, - - "backup_policy_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.ValidateResourceID, - }, - }, - } -} - -func resourceArmBackupProtectedFileShareCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient - opClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - resourceGroup := d.Get("resource_group_name").(string) - - vaultName := d.Get("recovery_vault_name").(string) - storageAccountID := d.Get("source_storage_account_id").(string) - fileShareName := d.Get("source_file_share_name").(string) - policyID := d.Get("backup_policy_id").(string) - - // get storage account name from id - parsedStorageAccountID, err := azure.ParseAzureResourceID(storageAccountID) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_storage_account_id '%s': %+v", storageAccountID, err) - } - accountName, hasName := parsedStorageAccountID.Path["storageAccounts"] - if !hasName { - return fmt.Errorf("[ERROR] parsed source_storage_account_id '%s' doesn't contain 'storageAccounts'", storageAccountID) - } - - protectedItemName := fmt.Sprintf("AzureFileShare;%s", fileShareName) - containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageAccountID.ResourceGroup, accountName) - - log.Printf("[DEBUG] Creating/updating Recovery Service Protected File Share %q (Container Name %q)", protectedItemName, containerName) - - if d.IsNewResource() { - existing, err2 := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err2 != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Recovery Service Protected File Share %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err2) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_backup_protected_file_share", *existing.ID) - } - } - - item := backup.ProtectedItemResource{ - Properties: &backup.AzureFileshareProtectedItem{ - PolicyID: &policyID, - ProtectedItemType: backup.ProtectedItemTypeAzureFileShareProtectedItem, - WorkloadType: backup.DataSourceTypeAzureFileShare, - SourceResourceID: utils.String(storageAccountID), - FriendlyName: utils.String(fileShareName), - }, - } - - resp, err := client.CreateOrUpdate(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, item) - if err != nil { - return fmt.Errorf("Error creating/updating Recovery Service Protected File Share %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) - } - - locationURL, err := resp.Response.Location() - if err != nil || locationURL == nil { - return fmt.Errorf("Error creating/updating Azure File Share backup item %q (Vault %q): Location header missing or empty", containerName, vaultName) - } - - opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) - - parsedLocation, err := azure.ParseAzureResourceID(opResourceID) - if err != nil { - return err - } - operationID := parsedLocation.Path["operationResults"] - - if _, err := resourceArmBackupProtectedFileShareWaitForOperation(ctx, opClient, vaultName, resourceGroup, operationID, d); err != nil { - return err - } - - resp, err = client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - - if err != nil { - return fmt.Errorf("Error creating/udpating Azure File Share backup item %q (Vault %q): %+v", protectedItemName, vaultName, err) - } - - id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) // This code is a workaround for this bug https://github.com/Azure/azure-sdk-for-go/issues/2824 - d.SetId(id) - - return resourceArmBackupProtectedFileShareRead(d, meta) -} - -func resourceArmBackupProtectedFileShareRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - protectedItemName := id.Path["protectedItems"] - vaultName := id.Path["vaults"] - resourceGroup := id.ResourceGroup - containerName := id.Path["protectionContainers"] - - log.Printf("[DEBUG] Reading Recovery Service Protected File Share %q (resource group %q)", protectedItemName, resourceGroup) - - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Recovery Service Protected File Share %q (Vault %q Resource Group %q): %+v", protectedItemName, vaultName, resourceGroup, err) - } - - d.Set("resource_group_name", resourceGroup) - d.Set("recovery_vault_name", vaultName) - - if properties := resp.Properties; properties != nil { - if item, ok := properties.AsAzureFileshareProtectedItem(); ok { - sourceResourceID := strings.Replace(*item.SourceResourceID, "Microsoft.storage", "Microsoft.Storage", 1) // The SDK is returning inconsistent capitalization - d.Set("source_storage_account_id", sourceResourceID) - d.Set("source_file_share_name", item.FriendlyName) - - if v := item.PolicyID; v != nil { - d.Set("backup_policy_id", strings.Replace(*v, "Subscriptions", "subscriptions", 1)) - } - } - } - - return nil -} - -func resourceArmBackupProtectedFileShareDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient - opClient := meta.(*clients.Client).RecoveryServices.BackupOperationStatusesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - protectedItemName := id.Path["protectedItems"] - resourceGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - containerName := id.Path["protectionContainers"] - - log.Printf("[DEBUG] Deleting Recovery Service Protected Item %q (resource group %q)", protectedItemName, resourceGroup) - - resp, err := client.Delete(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing delete request for Recovery Service Protected File Share %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) - } - } - - locationURL, err := resp.Response.Location() - if err != nil || locationURL == nil { - return fmt.Errorf("Error deleting Azure File Share backups item %s (Vault %s): Location header missing or empty", containerName, vaultName) - } - - opResourceID := azure.HandleAzureSdkForGoBug2824(locationURL.Path) - - parsedLocation, err := azure.ParseAzureResourceID(opResourceID) - if err != nil { - return err - } - operationID := parsedLocation.Path["backupOperationResults"] // This is different for create and delete requests ¯\_(ツ)_/¯ - - if _, err := resourceArmBackupProtectedFileShareWaitForOperation(ctx, opClient, vaultName, resourceGroup, operationID, d); err != nil { - return err - } - - return nil -} - -// nolint unused - linter mistakenly things this function isn't used? -func resourceArmBackupProtectedFileShareWaitForOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string, d *schema.ResourceData) (backup.OperationStatus, error) { - state := &resource.StateChangeConf{ - MinTimeout: 10 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"InProgress"}, - Target: []string{"Succeeded"}, - Refresh: resourceArmBackupProtectedFileShareCheckOperation(ctx, client, vaultName, resourceGroup, operationID), - } - - if d.IsNewResource() { - state.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - state.Timeout = d.Timeout(schema.TimeoutUpdate) - } - - log.Printf("[DEBUG] Waiting for backup operation %s (Vault %s) to complete", operationID, vaultName) - resp, err := state.WaitForState() - if err != nil { - return resp.(backup.OperationStatus), err - } - return resp.(backup.OperationStatus), nil -} - -func resourceArmBackupProtectedFileShareCheckOperation(ctx context.Context, client *backup.OperationStatusesClient, vaultName, resourceGroup, operationID string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - resp, err := client.Get(ctx, vaultName, resourceGroup, operationID) - if err != nil { - return resp, "Error", fmt.Errorf("Error making Read request on Recovery Service Protection Container operation %q (Vault %q in Resource Group %q): %+v", operationID, vaultName, resourceGroup, err) - } - - if opErr := resp.Error; opErr != nil { - errMsg := "No upstream error message" - if opErr.Message != nil { - errMsg = *opErr.Message - } - err = fmt.Errorf("Azure Backup operation status failed with status %q (Vault %q Resource Group %q Operation ID %q): %+v", resp.Status, vaultName, resourceGroup, operationID, errMsg) - } - - log.Printf("[DEBUG] Backup operation %s status is %s", operationID, string(resp.Status)) - return resp, string(resp.Status), err - } -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_backup_protected_vm.go b/azurerm/internal/services/recoveryservices/resource_arm_backup_protected_vm.go deleted file mode 100644 index d2a7cb3ec3dc..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_backup_protected_vm.go +++ /dev/null @@ -1,281 +0,0 @@ -package recoveryservices - -import ( - "context" - "fmt" - "log" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmRecoveryServicesBackupProtectedVM() *schema.Resource { - return &schema.Resource{ - Create: resourceArmRecoveryServicesBackupProtectedVMCreateUpdate, - Read: resourceArmRecoveryServicesBackupProtectedVMRead, - Update: resourceArmRecoveryServicesBackupProtectedVMCreateUpdate, - Delete: resourceArmRecoveryServicesBackupProtectedVMDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(80 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(80 * time.Minute), - Delete: schema.DefaultTimeout(80 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - - "source_vm_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - }, - - "backup_policy_id": { - Type: schema.TypeString, - Required: true, - ValidateFunc: azure.ValidateResourceID, - }, - - "tags": tags.Schema(), - }, - } -} - -func resourceArmRecoveryServicesBackupProtectedVMCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - resourceGroup := d.Get("resource_group_name").(string) - t := d.Get("tags").(map[string]interface{}) - - vaultName := d.Get("recovery_vault_name").(string) - vmId := d.Get("source_vm_id").(string) - policyId := d.Get("backup_policy_id").(string) - - // get VM name from id - parsedVmId, err := azure.ParseAzureResourceID(vmId) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_vm_id '%s': %+v", vmId, err) - } - vmName, hasName := parsedVmId.Path["virtualMachines"] - if !hasName { - return fmt.Errorf("[ERROR] parsed source_vm_id '%s' doesn't contain 'virtualMachines'", vmId) - } - - protectedItemName := fmt.Sprintf("VM;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) - containerName := fmt.Sprintf("iaasvmcontainer;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) - - log.Printf("[DEBUG] Creating/updating Azure Backup Protected VM %s (resource group %q)", protectedItemName, resourceGroup) - - if d.IsNewResource() { - existing, err2 := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err2 != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err2) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_backup_protected_vm", *existing.ID) - } - } - - item := backup.ProtectedItemResource{ - Tags: tags.Expand(t), - Properties: &backup.AzureIaaSComputeVMProtectedItem{ - PolicyID: &policyId, - ProtectedItemType: backup.ProtectedItemTypeMicrosoftClassicComputevirtualMachines, - WorkloadType: backup.DataSourceTypeVM, - SourceResourceID: utils.String(vmId), - FriendlyName: utils.String(vmName), - VirtualMachineID: utils.String(vmId), - }, - } - - if _, err = client.CreateOrUpdate(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, item); err != nil { - return fmt.Errorf("Error creating/updating Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) - } - - resp, err := resourceArmRecoveryServicesBackupProtectedVMWaitForStateCreateUpdate(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, policyId, d) - if err != nil { - return err - } - - id := strings.Replace(*resp.ID, "Subscriptions", "subscriptions", 1) // This code is a workaround for this bug https://github.com/Azure/azure-sdk-for-go/issues/2824 - d.SetId(id) - - return resourceArmRecoveryServicesBackupProtectedVMRead(d, meta) -} - -func resourceArmRecoveryServicesBackupProtectedVMRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - protectedItemName := id.Path["protectedItems"] - vaultName := id.Path["vaults"] - resourceGroup := id.ResourceGroup - containerName := id.Path["protectionContainers"] - - log.Printf("[DEBUG] Reading Azure Backup Protected VM %q (resource group %q)", protectedItemName, resourceGroup) - - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) - } - - d.Set("resource_group_name", resourceGroup) - d.Set("recovery_vault_name", vaultName) - - if properties := resp.Properties; properties != nil { - if vm, ok := properties.AsAzureIaaSComputeVMProtectedItem(); ok { - d.Set("source_vm_id", vm.SourceResourceID) - - if v := vm.PolicyID; v != nil { - d.Set("backup_policy_id", strings.Replace(*v, "Subscriptions", "subscriptions", 1)) - } - } - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmRecoveryServicesBackupProtectedVMDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - protectedItemName := id.Path["protectedItems"] - resourceGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - containerName := id.Path["protectionContainers"] - - log.Printf("[DEBUG] Deleting Azure Backup Protected Item %q (resource group %q)", protectedItemName, resourceGroup) - - resp, err := client.Delete(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing delete request for Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) - } - } - - if _, err := resourceArmRecoveryServicesBackupProtectedVMWaitForDeletion(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, "", d); err != nil { - return err - } - - return nil -} - -func resourceArmRecoveryServicesBackupProtectedVMWaitForStateCreateUpdate(ctx context.Context, client *backup.ProtectedItemsClient, vaultName, resourceGroup, containerName, protectedItemName string, policyId string, d *schema.ResourceData) (backup.ProtectedItemResource, error) { - state := &resource.StateChangeConf{ - MinTimeout: 30 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"NotFound"}, - Target: []string{"Found"}, - Refresh: resourceArmRecoveryServicesBackupProtectedVMRefreshFunc(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, policyId, true), - } - - if d.IsNewResource() { - state.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - state.Timeout = d.Timeout(schema.TimeoutUpdate) - } - - resp, err := state.WaitForState() - if err != nil { - i, _ := resp.(backup.ProtectedItemResource) - return i, fmt.Errorf("Error waiting for the Azure Backup Protected VM %q to be true (Resource Group %q) to provision: %+v", protectedItemName, resourceGroup, err) - } - - return resp.(backup.ProtectedItemResource), nil -} - -func resourceArmRecoveryServicesBackupProtectedVMWaitForDeletion(ctx context.Context, client *backup.ProtectedItemsClient, vaultName, resourceGroup, containerName, protectedItemName string, policyId string, d *schema.ResourceData) (backup.ProtectedItemResource, error) { - state := &resource.StateChangeConf{ - MinTimeout: 30 * time.Second, - Delay: 10 * time.Second, - Pending: []string{"Found"}, - Target: []string{"NotFound"}, - Refresh: resourceArmRecoveryServicesBackupProtectedVMRefreshFunc(ctx, client, vaultName, resourceGroup, containerName, protectedItemName, policyId, false), - Timeout: d.Timeout(schema.TimeoutDelete), - } - - resp, err := state.WaitForState() - if err != nil { - i, _ := resp.(backup.ProtectedItemResource) - return i, fmt.Errorf("Error waiting for the Azure Backup Protected VM %q to be false (Resource Group %q) to provision: %+v", protectedItemName, resourceGroup, err) - } - - return resp.(backup.ProtectedItemResource), nil -} - -func resourceArmRecoveryServicesBackupProtectedVMRefreshFunc(ctx context.Context, client *backup.ProtectedItemsClient, vaultName, resourceGroup, containerName, protectedItemName string, policyId string, newResource bool) resource.StateRefreshFunc { - // TODO: split this into two functions - return func() (interface{}, string, error) { - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return resp, "NotFound", nil - } - - return resp, "Error", fmt.Errorf("Error making Read request on Azure Backup Protected VM %q (Resource Group %q): %+v", protectedItemName, resourceGroup, err) - } else if !newResource && policyId != "" { - if properties := resp.Properties; properties != nil { - if vm, ok := properties.AsAzureIaaSComputeVMProtectedItem(); ok { - if v := vm.PolicyID; v != nil { - if strings.Replace(*v, "Subscriptions", "subscriptions", 1) != policyId { - return resp, "NotFound", nil - } - } else { - return resp, "Error", fmt.Errorf("Error reading policy ID attribute nil on Azure Backup Protected VM %q (Resource Group %q)", protectedItemName, resourceGroup) - } - } else { - return resp, "Error", fmt.Errorf("Error reading properties on Azure Backup Protected VM %q (Resource Group %q)", protectedItemName, resourceGroup) - } - } else { - return resp, "Error", fmt.Errorf("Error reading properties on empty Azure Backup Protected VM %q (Resource Group %q)", protectedItemName, resourceGroup) - } - } - return resp, "Found", nil - } -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_recovery_services_vault.go b/azurerm/internal/services/recoveryservices/resource_arm_recovery_services_vault.go deleted file mode 100644 index ea2cff873e88..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_recovery_services_vault.go +++ /dev/null @@ -1,237 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "log" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2016-06-01/recoveryservices" - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2019-05-13/backup" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmRecoveryServicesVault() *schema.Resource { - return &schema.Resource{ - Create: resourceArmRecoveryServicesVaultCreateUpdate, - Read: resourceArmRecoveryServicesVaultRead, - Update: resourceArmRecoveryServicesVaultCreateUpdate, - Delete: resourceArmRecoveryServicesVaultDelete, - - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - - "location": azure.SchemaLocation(), - - "resource_group_name": azure.SchemaResourceGroupName(), - - "tags": tags.Schema(), - - "sku": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringInSlice([]string{ - string(recoveryservices.RS0), - string(recoveryservices.Standard), - }, true), - }, - - "soft_delete_enabled": { - Type: schema.TypeBool, - Optional: true, - Default: true, - }, - }, - } -} - -func resourceArmRecoveryServicesVaultCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.VaultsClient - cfgsClient := meta.(*clients.Client).RecoveryServices.VaultsConfigsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - location := d.Get("location").(string) - resourceGroup := d.Get("resource_group_name").(string) - t := d.Get("tags").(map[string]interface{}) - - log.Printf("[DEBUG] Creating/updating Recovery Service Vault %q (resource group %q)", name, resourceGroup) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_recovery_services_vault", *existing.ID) - } - } - - vault := recoveryservices.Vault{ - Location: utils.String(location), - Tags: tags.Expand(t), - Sku: &recoveryservices.Sku{ - Name: recoveryservices.SkuName(d.Get("sku").(string)), - }, - Properties: &recoveryservices.VaultProperties{}, - } - - vault, err := client.CreateOrUpdate(ctx, resourceGroup, name, vault) - if err != nil { - return fmt.Errorf("Error creating/updating Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - cfg := backup.ResourceVaultConfigResource{ - Properties: &backup.ResourceVaultConfig{ - EnhancedSecurityState: backup.EnhancedSecurityStateEnabled, // always enabled - }, - } - - if sd := d.Get("soft_delete_enabled").(bool); sd { - cfg.Properties.SoftDeleteFeatureState = backup.SoftDeleteFeatureStateEnabled - } else { - cfg.Properties.SoftDeleteFeatureState = backup.SoftDeleteFeatureStateDisabled - } - - stateConf := &resource.StateChangeConf{ - Pending: []string{"syncing"}, - Target: []string{"success"}, - MinTimeout: 30 * time.Second, - Refresh: func() (interface{}, string, error) { - resp, err := cfgsClient.Update(ctx, name, resourceGroup, cfg) - if err != nil { - if strings.Contains(err.Error(), "ResourceNotYetSynced") { - return resp, "syncing", nil - } - return resp, "error", fmt.Errorf("Error updating Recovery Service Vault Cfg %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - return resp, "success", nil - }, - } - - if d.IsNewResource() { - stateConf.Timeout = d.Timeout(schema.TimeoutCreate) - } else { - stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) - } - - if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for on update for Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error issuing read request for Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Error Recovery Service Vault %q (Resource Group %q): read returned nil", name, resourceGroup) - } - - d.SetId(*vault.ID) - - return resourceArmRecoveryServicesVaultRead(d, meta) -} - -func resourceArmRecoveryServicesVaultRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.VaultsClient - cfgsClient := meta.(*clients.Client).RecoveryServices.VaultsConfigsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - name := id.Path["vaults"] - resourceGroup := id.ResourceGroup - - log.Printf("[DEBUG] Reading Recovery Service Vault %q (resource group %q)", name, resourceGroup) - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if sku := resp.Sku; sku != nil { - d.Set("sku", string(sku.Name)) - } - - cfg, err := cfgsClient.Get(ctx, name, resourceGroup) - if err != nil { - return fmt.Errorf("Error reading Recovery Service Vault Cfg %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if props := cfg.Properties; props != nil { - d.Set("soft_delete_enabled", props.SoftDeleteFeatureState == backup.SoftDeleteFeatureStateEnabled) - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmRecoveryServicesVaultDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).RecoveryServices.VaultsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - name := id.Path["vaults"] - resourceGroup := id.ResourceGroup - - log.Printf("[DEBUG] Deleting Recovery Service Vault %q (resource group %q)", name, resourceGroup) - - resp, err := client.Delete(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing delete request for Recovery Service Vault %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_fabric.go b/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_fabric.go deleted file mode 100644 index 00bae41b7f19..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_fabric.go +++ /dev/null @@ -1,162 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSiteRecoveryFabric() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSiteRecoveryFabricCreate, - Read: resourceArmSiteRecoveryFabricRead, - Update: nil, - Delete: resourceArmSiteRecoveryFabricDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "location": azure.SchemaLocation(), - }, - } -} - -func resourceArmSiteRecoveryFabricCreate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - location := azure.NormalizeLocation(d.Get("location").(string)) - name := d.Get("name").(string) - - client := meta.(*clients.Client).RecoveryServices.FabricClient(resGroup, vaultName) - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - if d.IsNewResource() { - existing, err := client.Get(ctx, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_site_recovery_fabric", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - parameters := siterecovery.FabricCreationInput{ - Properties: &siterecovery.FabricCreationInputProperties{ - CustomDetails: siterecovery.AzureFabricCreationInput{ - InstanceType: "Azure", - Location: &location, - }, - }, - } - - future, err := client.Create(ctx, name, parameters) - if err != nil { - return fmt.Errorf("Error creating site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - - resp, err := client.Get(ctx, name) - if err != nil { - return fmt.Errorf("Error retrieving site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmSiteRecoveryFabricRead(d, meta) -} - -func resourceArmSiteRecoveryFabricRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - name := id.Path["replicationFabrics"] - - client := meta.(*clients.Client).RecoveryServices.FabricClient(resGroup, vaultName) - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making read request on site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) - if props := resp.Properties; props != nil { - if azureDetails, isAzureDetails := props.CustomDetails.AsAzureFabricSpecificDetails(); isAzureDetails { - d.Set("location", azureDetails.Location) - } - } - d.Set("recovery_vault_name", vaultName) - return nil -} - -func resourceArmSiteRecoveryFabricDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - name := id.Path["replicationFabrics"] - - client := meta.(*clients.Client).RecoveryServices.FabricClient(resGroup, vaultName) - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - future, err := client.Delete(ctx, name) - if err != nil { - return fmt.Errorf("Error deleting site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of site recovery fabric %s (vault %s): %+v", name, vaultName, err) - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_network_mapping.go b/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_network_mapping.go deleted file mode 100644 index 3953ac583ab9..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_network_mapping.go +++ /dev/null @@ -1,218 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "net/http" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSiteRecoveryNetworkMapping() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSiteRecoveryNetworkMappingCreate, - Read: resourceArmSiteRecoveryNetworkMappingRead, - Delete: resourceArmSiteRecoveryNetworkMappingDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "source_recovery_fabric_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "target_recovery_fabric_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "source_network_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_network_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - }, - } -} - -func resourceArmSiteRecoveryNetworkMappingCreate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - fabricName := d.Get("source_recovery_fabric_name").(string) - targetFabricName := d.Get("target_recovery_fabric_name").(string) - sourceNetworkId := d.Get("source_network_id").(string) - targetNetworkId := d.Get("target_network_id").(string) - name := d.Get("name").(string) - - client := meta.(*clients.Client).RecoveryServices.NetworkMappingClient(resGroup, vaultName) - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - // get network name from id - parsedSourceNetworkId, err := azure.ParseAzureResourceID(sourceNetworkId) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_network_id '%s' (network mapping %s): %+v", sourceNetworkId, name, err) - } - sourceNetworkName, hasName := parsedSourceNetworkId.Path["virtualNetworks"] - if !hasName { - sourceNetworkName, hasName = parsedSourceNetworkId.Path["virtualnetworks"] // Handle that different APIs return different ID casings - if !hasName { - return fmt.Errorf("[ERROR] parsed source_network_id '%s' doesn't contain 'virtualnetworks'", parsedSourceNetworkId) - } - } - - if d.IsNewResource() { - existing, err := client.Get(ctx, fabricName, sourceNetworkName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) && - // todo this workaround can be removed when this bug is fixed - // https://github.com/Azure/azure-sdk-for-go/issues/8705 - !utils.ResponseWasStatusCode(existing.Response, http.StatusBadRequest) { - return fmt.Errorf("Error checking for presence of existing site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_site_recovery_network_mapping", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - var parameters = siterecovery.CreateNetworkMappingInput{ - Properties: &siterecovery.CreateNetworkMappingInputProperties{ - RecoveryNetworkID: &targetNetworkId, - RecoveryFabricName: &targetFabricName, - FabricSpecificDetails: siterecovery.AzureToAzureCreateNetworkMappingInput{ - PrimaryNetworkID: &sourceNetworkId, - }, - }, - } - future, err := client.Create(ctx, fabricName, sourceNetworkName, name, parameters) - if err != nil { - return fmt.Errorf("Error creating site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - - resp, err := client.Get(ctx, fabricName, sourceNetworkName, name) - if err != nil { - return fmt.Errorf("Error retrieving site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmSiteRecoveryNetworkMappingRead(d, meta) -} - -func resourceArmSiteRecoveryNetworkMappingRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["replicationFabrics"] - networkName := id.Path["replicationNetworks"] - name := id.Path["replicationNetworkMappings"] - - client := meta.(*clients.Client).RecoveryServices.NetworkMappingClient(resGroup, vaultName) - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, fabricName, networkName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request on site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - - d.Set("resource_group_name", resGroup) - d.Set("recovery_vault_name", vaultName) - d.Set("source_recovery_fabric_name", fabricName) - d.Set("name", resp.Name) - if props := resp.Properties; props != nil { - d.Set("source_network_id", props.PrimaryNetworkID) - d.Set("target_network_id", props.RecoveryNetworkID) - - targetFabricId, err := azure.ParseAzureResourceID(azure.HandleAzureSdkForGoBug2824(*resp.Properties.RecoveryFabricArmID)) - if err != nil { - return err - } - d.Set("target_recovery_fabric_name", targetFabricId.Path["replicationFabrics"]) - } - - return nil -} - -func resourceArmSiteRecoveryNetworkMappingDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["replicationFabrics"] - networkName := id.Path["replicationNetworks"] - name := id.Path["replicationNetworkMappings"] - - client := meta.(*clients.Client).RecoveryServices.NetworkMappingClient(resGroup, vaultName) - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - future, err := client.Delete(ctx, fabricName, networkName, name) - if err != nil { - return fmt.Errorf("Error deleting site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of site recovery network mapping %s (vault %s): %+v", name, vaultName, err) - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_protection_container.go b/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_protection_container.go deleted file mode 100644 index c7e505fa8dcb..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_protection_container.go +++ /dev/null @@ -1,160 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSiteRecoveryProtectionContainer() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSiteRecoveryProtectionContainerCreate, - Read: resourceArmSiteRecoveryProtectionContainerRead, - Update: nil, - Delete: resourceArmSiteRecoveryProtectionContainerDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "recovery_fabric_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - }, - } -} - -func resourceArmSiteRecoveryProtectionContainerCreate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - fabricName := d.Get("recovery_fabric_name").(string) - name := d.Get("name").(string) - - client := meta.(*clients.Client).RecoveryServices.ProtectionContainerClient(resGroup, vaultName) - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - if d.IsNewResource() { - existing, err := client.Get(ctx, fabricName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_site_recovery_protection_container", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - parameters := siterecovery.CreateProtectionContainerInput{ - Properties: &siterecovery.CreateProtectionContainerInputProperties{}, - } - - future, err := client.Create(ctx, fabricName, name, parameters) - if err != nil { - return fmt.Errorf("Error creating site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - - resp, err := client.Get(ctx, fabricName, name) - if err != nil { - return fmt.Errorf("Error retrieving site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmSiteRecoveryProtectionContainerRead(d, meta) -} - -func resourceArmSiteRecoveryProtectionContainerRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["replicationFabrics"] - name := id.Path["replicationProtectionContainers"] - - client := meta.(*clients.Client).RecoveryServices.ProtectionContainerClient(resGroup, vaultName) - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, fabricName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request on site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) - d.Set("recovery_vault_name", vaultName) - d.Set("recovery_fabric_name", fabricName) - return nil -} - -func resourceArmSiteRecoveryProtectionContainerDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["replicationFabrics"] - name := id.Path["replicationProtectionContainers"] - - client := meta.(*clients.Client).RecoveryServices.ProtectionContainerClient(resGroup, vaultName) - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - future, err := client.Delete(ctx, fabricName, name) - if err != nil { - return fmt.Errorf("Error deleting site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of site recovery protection container %s (fabric %s): %+v", name, fabricName, err) - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_protection_container_mapping.go b/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_protection_container_mapping.go deleted file mode 100644 index b341c1de26dd..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_protection_container_mapping.go +++ /dev/null @@ -1,201 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSiteRecoveryProtectionContainerMapping() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSiteRecoveryContainerMappingCreate, - Read: resourceArmSiteRecoveryContainerMappingRead, - Update: nil, - Delete: resourceArmSiteRecoveryServicesContainerMappingDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "recovery_fabric_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "recovery_replication_policy_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "recovery_source_protection_container_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "recovery_target_protection_container_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - }, - } -} - -func resourceArmSiteRecoveryContainerMappingCreate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - fabricName := d.Get("recovery_fabric_name").(string) - policyId := d.Get("recovery_replication_policy_id").(string) - protectionContainerName := d.Get("recovery_source_protection_container_name").(string) - targetContainerId := d.Get("recovery_target_protection_container_id").(string) - name := d.Get("name").(string) - - client := meta.(*clients.Client).RecoveryServices.ContainerMappingClient(resGroup, vaultName) - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - if d.IsNewResource() { - existing, err := client.Get(ctx, fabricName, protectionContainerName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing site recovery protection container mapping %s (fabric %s, container %s): %+v", name, fabricName, protectionContainerName, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_site_recovery_protection_container_mapping", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - var parameters = siterecovery.CreateProtectionContainerMappingInput{ - Properties: &siterecovery.CreateProtectionContainerMappingInputProperties{ - TargetProtectionContainerID: &targetContainerId, - PolicyID: &policyId, - ProviderSpecificInput: siterecovery.ReplicationProviderSpecificContainerMappingInput{}, - }, - } - future, err := client.Create(ctx, fabricName, protectionContainerName, name, parameters) - if err != nil { - return fmt.Errorf("Error creating site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) - } - - resp, err := client.Get(ctx, fabricName, protectionContainerName, name) - if err != nil { - return fmt.Errorf("Error retrieving site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmSiteRecoveryContainerMappingRead(d, meta) -} - -func resourceArmSiteRecoveryContainerMappingRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["replicationFabrics"] - protectionContainerName := id.Path["replicationProtectionContainers"] - name := id.Path["replicationProtectionContainerMappings"] - - client := meta.(*clients.Client).RecoveryServices.ContainerMappingClient(resGroup, vaultName) - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, fabricName, protectionContainerName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request on site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) - } - - d.Set("resource_group_name", resGroup) - d.Set("recovery_vault_name", vaultName) - d.Set("recovery_fabric_name", fabricName) - d.Set("recovery_source_protection_container_name", resp.Properties.SourceProtectionContainerFriendlyName) - d.Set("name", resp.Name) - d.Set("recovery_replication_policy_id", resp.Properties.PolicyID) - d.Set("recovery_target_protection_container_id", resp.Properties.TargetProtectionContainerID) - return nil -} - -func resourceArmSiteRecoveryServicesContainerMappingDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - fabricName := id.Path["replicationFabrics"] - protectionContainerName := id.Path["replicationProtectionContainers"] - name := id.Path["replicationProtectionContainerMappings"] - instanceType := string(siterecovery.InstanceTypeBasicReplicationProviderSpecificContainerMappingInputInstanceTypeA2A) - - client := meta.(*clients.Client).RecoveryServices.ContainerMappingClient(resGroup, vaultName) - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - input := siterecovery.RemoveProtectionContainerMappingInput{ - Properties: &siterecovery.RemoveProtectionContainerMappingInputProperties{ - ProviderSpecificInput: &siterecovery.ReplicationProviderContainerUnmappingInput{ - InstanceType: &instanceType, - }, - }, - } - - future, err := client.Delete(ctx, fabricName, protectionContainerName, name, input) - if err != nil { - return fmt.Errorf("Error deleting site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_replicated_vm.go b/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_replicated_vm.go deleted file mode 100644 index 161268f030b2..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_replicated_vm.go +++ /dev/null @@ -1,588 +0,0 @@ -package recoveryservices - -import ( - "bytes" - "fmt" - "log" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" - "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSiteRecoveryReplicatedVM() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSiteRecoveryReplicatedItemCreate, - Read: resourceArmSiteRecoveryReplicatedItemRead, - Update: resourceArmSiteRecoveryReplicatedItemUpdate, - Delete: resourceArmSiteRecoveryReplicatedItemDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(120 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(80 * time.Minute), - Delete: schema.DefaultTimeout(80 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "source_recovery_fabric_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "source_vm_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_recovery_fabric_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "recovery_replication_policy_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "source_recovery_protection_container_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "target_recovery_protection_container_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_resource_group_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_availability_set_id": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_network_id": { - Type: schema.TypeString, - Computed: true, - Optional: true, - ValidateFunc: azure.ValidateResourceID, - }, - "managed_disk": { - Type: schema.TypeSet, - ConfigMode: schema.SchemaConfigModeAttr, - Optional: true, - ForceNew: true, - Set: resourceArmSiteRecoveryReplicatedVMDiskHash, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "disk_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - DiffSuppressFunc: suppress.CaseDifference, - }, - "staging_storage_account_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_resource_group_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_disk_type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - string(compute.StandardLRS), - string(compute.PremiumLRS), - string(compute.StandardSSDLRS), - string(compute.UltraSSDLRS), - }, true), - DiffSuppressFunc: suppress.CaseDifference, - }, - "target_replica_disk_type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - string(compute.StandardLRS), - string(compute.PremiumLRS), - string(compute.StandardSSDLRS), - string(compute.UltraSSDLRS), - }, true), - DiffSuppressFunc: suppress.CaseDifference, - }, - }, - }, - }, - "network_interface": { - Type: schema.TypeSet, - ConfigMode: schema.SchemaConfigModeAttr, - Computed: true, - Optional: true, - Elem: networkInterfaceResource(), - }, - }, - } -} - -func networkInterfaceResource() *schema.Resource { - return &schema.Resource{ - Schema: map[string]*schema.Schema{ - "source_network_interface_id": { - Type: schema.TypeString, - Computed: true, - Optional: true, - ValidateFunc: azure.ValidateResourceID, - }, - "target_static_ip": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "target_subnet_name": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - }, - } -} - -func resourceArmSiteRecoveryReplicatedItemCreate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) - name := d.Get("name").(string) - fabricName := d.Get("source_recovery_fabric_name").(string) - sourceVmId := d.Get("source_vm_id").(string) - policyId := d.Get("recovery_replication_policy_id").(string) - sourceProtectionContainerName := d.Get("source_recovery_protection_container_name").(string) - targetProtectionContainerId := d.Get("target_recovery_protection_container_id").(string) - targetResourceGroupId := d.Get("target_resource_group_id").(string) - - var targetAvailabilitySetID *string - if id, isSet := d.GetOk("target_availability_set_id"); isSet { - targetAvailabilitySetID = utils.String(id.(string)) - } else { - targetAvailabilitySetID = nil - } - - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - if d.IsNewResource() { - existing, err := client.Get(ctx, fabricName, sourceProtectionContainerName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_site_recovery_replicated_vm", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - managedDisks := []siterecovery.A2AVMManagedDiskInputDetails{} - - for _, raw := range d.Get("managed_disk").(*schema.Set).List() { - diskInput := raw.(map[string]interface{}) - diskId := diskInput["disk_id"].(string) - primaryStagingAzureStorageAccountID := diskInput["staging_storage_account_id"].(string) - recoveryResourceGroupId := diskInput["target_resource_group_id"].(string) - targetReplicaDiskType := diskInput["target_replica_disk_type"].(string) - targetDiskType := diskInput["target_disk_type"].(string) - - managedDisks = append(managedDisks, siterecovery.A2AVMManagedDiskInputDetails{ - DiskID: &diskId, - PrimaryStagingAzureStorageAccountID: &primaryStagingAzureStorageAccountID, - RecoveryResourceGroupID: &recoveryResourceGroupId, - RecoveryReplicaDiskAccountType: &targetReplicaDiskType, - RecoveryTargetDiskAccountType: &targetDiskType, - }) - } - - var parameters = siterecovery.EnableProtectionInput{ - Properties: &siterecovery.EnableProtectionInputProperties{ - PolicyID: &policyId, - ProviderSpecificDetails: siterecovery.A2AEnableProtectionInput{ - FabricObjectID: &sourceVmId, - RecoveryContainerID: &targetProtectionContainerId, - RecoveryResourceGroupID: &targetResourceGroupId, - RecoveryAvailabilitySetID: targetAvailabilitySetID, - VMManagedDisks: &managedDisks, - }, - }, - } - future, err := client.Create(ctx, fabricName, sourceProtectionContainerName, name, parameters) - if err != nil { - return fmt.Errorf("Error creating replicated vm %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - resp, err := client.Get(ctx, fabricName, sourceProtectionContainerName, name) - if err != nil { - return fmt.Errorf("Error retrieving replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - // We are not allowed to configure the NIC on the initial setup, and the VM has to be replicated before - // we can reconfigure. Hence this call to update when we create. - return resourceArmSiteRecoveryReplicatedItemUpdate(d, meta) -} - -func resourceArmSiteRecoveryReplicatedItemUpdate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) - - // We are only allowed to update the configuration once the VM is fully protected - state, err := waitForReplicationToBeHealthy(d, meta) - if err != nil { - return err - } - - name := d.Get("name").(string) - fabricName := d.Get("source_recovery_fabric_name").(string) - sourceProtectionContainerName := d.Get("source_recovery_protection_container_name").(string) - targetNetworkId := d.Get("target_network_id").(string) - - var targetAvailabilitySetID *string - if id, isSet := d.GetOk("target_availability_set_id"); isSet { - tmp := id.(string) - targetAvailabilitySetID = &tmp - } else { - targetAvailabilitySetID = nil - } - - ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - vmNics := []siterecovery.VMNicInputDetails{} - for _, raw := range d.Get("network_interface").(*schema.Set).List() { - vmNicInput := raw.(map[string]interface{}) - sourceNicId := vmNicInput["source_network_interface_id"].(string) - targetStaticIp := vmNicInput["target_static_ip"].(string) - targetSubnetName := vmNicInput["target_subnet_name"].(string) - - nicId := findNicId(state, sourceNicId) - if nicId == nil { - return fmt.Errorf("Error updating replicated vm %s (vault %s): Trying to update NIC that is not known by Azure %s", name, vaultName, sourceNicId) - } - vmNics = append(vmNics, siterecovery.VMNicInputDetails{ - NicID: nicId, - RecoveryVMSubnetName: &targetSubnetName, - ReplicaNicStaticIPAddress: &targetStaticIp, - }) - } - - managedDisks := []siterecovery.A2AVMManagedDiskUpdateDetails{} - for _, raw := range d.Get("managed_disk").(*schema.Set).List() { - diskInput := raw.(map[string]interface{}) - diskId := diskInput["disk_id"].(string) - targetReplicaDiskType := diskInput["target_replica_disk_type"].(string) - targetDiskType := diskInput["target_disk_type"].(string) - - managedDisks = append(managedDisks, siterecovery.A2AVMManagedDiskUpdateDetails{ - DiskID: &diskId, - RecoveryReplicaDiskAccountType: &targetReplicaDiskType, - RecoveryTargetDiskAccountType: &targetDiskType, - }) - } - - if targetNetworkId == "" { - // No target network id was specified, so we want to preserve what was selected - if a2aDetails, isA2a := state.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { - if a2aDetails.SelectedRecoveryAzureNetworkID != nil { - targetNetworkId = *a2aDetails.SelectedRecoveryAzureNetworkID - } else { - return fmt.Errorf("target_network_id must be set when a network_interface is configured") - } - } else { - return fmt.Errorf("target_network_id must be set when a network_interface is configured") - } - } - - var parameters = siterecovery.UpdateReplicationProtectedItemInput{ - Properties: &siterecovery.UpdateReplicationProtectedItemInputProperties{ - RecoveryAzureVMName: &name, - SelectedRecoveryAzureNetworkID: &targetNetworkId, - VMNics: &vmNics, - RecoveryAvailabilitySetID: targetAvailabilitySetID, - ProviderSpecificDetails: siterecovery.A2AUpdateReplicationProtectedItemInput{ - ManagedDiskUpdateDetails: &managedDisks, - }, - }, - } - - future, err := client.Update(ctx, fabricName, sourceProtectionContainerName, name, parameters) - if err != nil { - return fmt.Errorf("Error updating replicated vm %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error updating replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - return resourceArmSiteRecoveryReplicatedItemRead(d, meta) -} - -func findNicId(state *siterecovery.ReplicationProtectedItem, sourceNicId string) *string { - if a2aDetails, isA2a := state.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { - if a2aDetails.VMNics != nil { - for _, nic := range *a2aDetails.VMNics { - if nic.SourceNicArmID != nil && *nic.SourceNicArmID == sourceNicId { - return nic.NicID - } - } - } - } - return nil -} - -func resourceArmSiteRecoveryReplicatedItemRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) - fabricName := id.Path["replicationFabrics"] - protectionContainerName := id.Path["replicationProtectionContainers"] - name := id.Path["replicationProtectedItems"] - - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, fabricName, protectionContainerName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - d.Set("name", name) - d.Set("resource_group_name", resGroup) - d.Set("recovery_vault_name", vaultName) - d.Set("source_recovery_fabric_name", fabricName) - d.Set("target_recovery_fabric_id", resp.Properties.RecoveryFabricID) - d.Set("recovery_replication_policy_id", resp.Properties.PolicyID) - d.Set("source_recovery_protection_container_name", protectionContainerName) - d.Set("target_recovery_protection_container_id", resp.Properties.RecoveryContainerID) - - if a2aDetails, isA2a := resp.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { - d.Set("source_vm_id", a2aDetails.FabricObjectID) - d.Set("target_resource_group_id", a2aDetails.RecoveryAzureResourceGroupID) - d.Set("target_availability_set_id", a2aDetails.RecoveryAvailabilitySet) - d.Set("target_network_id", a2aDetails.SelectedRecoveryAzureNetworkID) - if a2aDetails.ProtectedManagedDisks != nil { - disksOutput := make([]interface{}, 0) - for _, disk := range *a2aDetails.ProtectedManagedDisks { - diskOutput := make(map[string]interface{}) - diskOutput["disk_id"] = *disk.DiskID - diskOutput["staging_storage_account_id"] = *disk.PrimaryStagingAzureStorageAccountID - diskOutput["target_resource_group_id"] = *disk.RecoveryResourceGroupID - diskOutput["target_replica_disk_type"] = *disk.RecoveryReplicaDiskAccountType - diskOutput["target_disk_type"] = *disk.RecoveryTargetDiskAccountType - - disksOutput = append(disksOutput, diskOutput) - } - d.Set("managed_disk", schema.NewSet(resourceArmSiteRecoveryReplicatedVMDiskHash, disksOutput)) - } - - if a2aDetails.VMNics != nil { - nicsOutput := make([]interface{}, 0) - for _, nic := range *a2aDetails.VMNics { - nicOutput := make(map[string]interface{}) - if nic.SourceNicArmID != nil { - nicOutput["source_network_interface_id"] = *nic.SourceNicArmID - } - if nic.ReplicaNicStaticIPAddress != nil { - nicOutput["target_static_ip"] = *nic.ReplicaNicStaticIPAddress - } - if nic.RecoveryVMSubnetName != nil { - nicOutput["target_subnet_name"] = *nic.RecoveryVMSubnetName - } - nicsOutput = append(nicsOutput, nicOutput) - } - d.Set("network_interface", schema.NewSet(schema.HashResource(networkInterfaceResource()), nicsOutput)) - } - } - - return nil -} - -func resourceArmSiteRecoveryReplicatedItemDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) - fabricName := id.Path["replicationFabrics"] - protectionContainerName := id.Path["replicationProtectionContainers"] - name := id.Path["replicationProtectedItems"] - - disableProtectionInput := siterecovery.DisableProtectionInput{ - Properties: &siterecovery.DisableProtectionInputProperties{ - DisableProtectionReason: siterecovery.NotSpecified, - ReplicationProviderInput: siterecovery.DisableProtectionProviderSpecificInput{}, - }, - } - - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - future, err := client.Delete(ctx, fabricName, protectionContainerName, name, disableProtectionInput) - if err != nil { - return fmt.Errorf("Error deleting site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - return nil -} - -func resourceArmSiteRecoveryReplicatedVMDiskHash(v interface{}) int { - var buf bytes.Buffer - - if m, ok := v.(map[string]interface{}); ok { - if v, ok := m["disk_id"]; ok { - buf.WriteString(strings.ToLower(v.(string))) - } - } - - return hashcode.String(buf.String()) -} - -func waitForReplicationToBeHealthy(d *schema.ResourceData, meta interface{}) (*siterecovery.ReplicationProtectedItem, error) { - log.Printf("Waiting for Site Recover to replicate VM.") - stateConf := &resource.StateChangeConf{ - Target: []string{"Protected"}, - Refresh: waitForReplicationToBeHealthyRefreshFunc(d, meta), - PollInterval: time.Minute, - } - - stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) - - result, err := stateConf.WaitForState() - if err != nil { - return nil, fmt.Errorf("Error waiting for site recovery to replicate vm: %+v", err) - } - - protectedItem, ok := result.(siterecovery.ReplicationProtectedItem) - if ok { - return &protectedItem, nil - } else { - return nil, fmt.Errorf("Error waiting for site recovery return incompatible tyupe") - } -} - -func waitForReplicationToBeHealthyRefreshFunc(d *schema.ResourceData, meta interface{}) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return nil, "", err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) - fabricName := id.Path["replicationFabrics"] - protectionContainerName := id.Path["replicationProtectionContainers"] - name := id.Path["replicationProtectedItems"] - - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, fabricName, protectionContainerName, name) - if err != nil { - return nil, "", fmt.Errorf("Error making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - if resp.Properties == nil { - return nil, "", fmt.Errorf("Missing Properties in response when making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - if resp.Properties.ProviderSpecificDetails == nil { - return nil, "", fmt.Errorf("Missing Properties.ProviderSpecificDetails in response when making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - - // Find first disk that is not fully replicated yet - if a2aDetails, isA2a := resp.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { - if a2aDetails.MonitoringPercentageCompletion != nil { - log.Printf("Waiting for Site Recover to replicate VM, %d%% complete.", *a2aDetails.MonitoringPercentageCompletion) - } - if a2aDetails.VMProtectionState != nil { - return resp, *a2aDetails.VMProtectionState, nil - } - } - - if resp.Properties.ReplicationHealth == nil { - return nil, "", fmt.Errorf("Missing ReplicationHealth in response when making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) - } - return resp, *resp.Properties.ReplicationHealth, nil - } -} diff --git a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_replication_policy.go b/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_replication_policy.go deleted file mode 100644 index 1f0704015765..000000000000 --- a/azurerm/internal/services/recoveryservices/resource_arm_site_recovery_replication_policy.go +++ /dev/null @@ -1,213 +0,0 @@ -package recoveryservices - -import ( - "fmt" - "time" - - "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmSiteRecoveryReplicationPolicy() *schema.Resource { - return &schema.Resource{ - Create: resourceArmSiteRecoveryReplicationPolicyCreate, - Read: resourceArmSiteRecoveryReplicationPolicyRead, - Update: resourceArmSiteRecoveryReplicationPolicyUpdate, - Delete: resourceArmSiteRecoveryReplicationPolicyDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - "resource_group_name": azure.SchemaResourceGroupName(), - - "recovery_vault_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateRecoveryServicesVaultName, - }, - "recovery_point_retention_in_minutes": { - Type: schema.TypeInt, - Required: true, - ForceNew: false, - ValidateFunc: validation.IntBetween(1, 365*24*60), - }, - "application_consistent_snapshot_frequency_in_minutes": { - Type: schema.TypeInt, - Required: true, - ForceNew: false, - ValidateFunc: validation.IntBetween(1, 365*24*60), - }, - }, - } -} - -func resourceArmSiteRecoveryReplicationPolicyCreate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - name := d.Get("name").(string) - - client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - - if d.IsNewResource() { - existing, err := client.Get(ctx, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing site recovery replication policy %s: %+v", name, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_site_recovery_replication_policy", azure.HandleAzureSdkForGoBug2824(*existing.ID)) - } - } - - recoveryPoint := int32(d.Get("recovery_point_retention_in_minutes").(int)) - appConsitency := int32(d.Get("application_consistent_snapshot_frequency_in_minutes").(int)) - var parameters = siterecovery.CreatePolicyInput{ - Properties: &siterecovery.CreatePolicyInputProperties{ - ProviderSpecificInput: &siterecovery.A2APolicyCreationInput{ - RecoveryPointHistory: &recoveryPoint, - AppConsistentFrequencyInMinutes: &appConsitency, - MultiVMSyncStatus: siterecovery.Enable, - InstanceType: siterecovery.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeA2A, - }, - }, - } - future, err := client.Create(ctx, name, parameters) - if err != nil { - return fmt.Errorf("Error creating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error creating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - resp, err := client.Get(ctx, name) - if err != nil { - return fmt.Errorf("Error retrieving site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmSiteRecoveryReplicationPolicyRead(d, meta) -} - -func resourceArmSiteRecoveryReplicationPolicyUpdate(d *schema.ResourceData, meta interface{}) error { - resGroup := d.Get("resource_group_name").(string) - vaultName := d.Get("recovery_vault_name").(string) - name := d.Get("name").(string) - - client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) - ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - recoveryPoint := int32(d.Get("recovery_point_retention_in_minutes").(int)) - appConsitency := int32(d.Get("application_consistent_snapshot_frequency_in_minutes").(int)) - var parameters = siterecovery.UpdatePolicyInput{ - Properties: &siterecovery.UpdatePolicyInputProperties{ - ReplicationProviderSettings: &siterecovery.A2APolicyCreationInput{ - RecoveryPointHistory: &recoveryPoint, - AppConsistentFrequencyInMinutes: &appConsitency, - MultiVMSyncStatus: siterecovery.Enable, - InstanceType: siterecovery.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeA2A, - }, - }, - } - future, err := client.Update(ctx, name, parameters) - if err != nil { - return fmt.Errorf("Error updating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error updating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - resp, err := client.Get(ctx, name) - if err != nil { - return fmt.Errorf("Error retrieving site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) - - return resourceArmSiteRecoveryReplicationPolicyRead(d, meta) -} - -func resourceArmSiteRecoveryReplicationPolicyRead(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - name := id.Path["replicationPolicies"] - - client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resp, err := client.Get(ctx, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - return fmt.Errorf("Error making Read request on site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - d.Set("name", resp.Name) - d.Set("resource_group_name", resGroup) - d.Set("recovery_vault_name", vaultName) - if a2APolicyDetails, isA2A := resp.Properties.ProviderSpecificDetails.AsA2APolicyDetails(); isA2A { - d.Set("recovery_point_retention_in_minutes", a2APolicyDetails.RecoveryPointHistory) - d.Set("application_consistent_snapshot_frequency_in_minutes", a2APolicyDetails.AppConsistentFrequencyInMinutes) - } - return nil -} - -func resourceArmSiteRecoveryReplicationPolicyDelete(d *schema.ResourceData, meta interface{}) error { - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - - resGroup := id.ResourceGroup - vaultName := id.Path["vaults"] - name := id.Path["replicationPolicies"] - - client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - future, err := client.Delete(ctx, name) - if err != nil { - return fmt.Errorf("Error deleting site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for deletion of site recovery replication policy %s (vault %s): %+v", name, vaultName, err) - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_fabric_resource.go b/azurerm/internal/services/recoveryservices/site_recovery_fabric_resource.go new file mode 100644 index 000000000000..872e4d5bd30e --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_fabric_resource.go @@ -0,0 +1,162 @@ +package recoveryservices + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceSiteRecoveryFabric() *schema.Resource { + return &schema.Resource{ + Create: resourceSiteRecoveryFabricCreate, + Read: resourceSiteRecoveryFabricRead, + Update: nil, + Delete: resourceSiteRecoveryFabricDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "location": azure.SchemaLocation(), + }, + } +} + +func resourceSiteRecoveryFabricCreate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + location := azure.NormalizeLocation(d.Get("location").(string)) + name := d.Get("name").(string) + + client := meta.(*clients.Client).RecoveryServices.FabricClient(resGroup, vaultName) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + if d.IsNewResource() { + existing, err := client.Get(ctx, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_site_recovery_fabric", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + parameters := siterecovery.FabricCreationInput{ + Properties: &siterecovery.FabricCreationInputProperties{ + CustomDetails: siterecovery.AzureFabricCreationInput{ + InstanceType: "Azure", + Location: &location, + }, + }, + } + + future, err := client.Create(ctx, name, parameters) + if err != nil { + return fmt.Errorf("Error creating site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error creating site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + + resp, err := client.Get(ctx, name) + if err != nil { + return fmt.Errorf("Error retrieving site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceSiteRecoveryFabricRead(d, meta) +} + +func resourceSiteRecoveryFabricRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + name := id.Path["replicationFabrics"] + + client := meta.(*clients.Client).RecoveryServices.FabricClient(resGroup, vaultName) + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making read request on site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resGroup) + if props := resp.Properties; props != nil { + if azureDetails, isAzureDetails := props.CustomDetails.AsAzureFabricSpecificDetails(); isAzureDetails { + d.Set("location", azureDetails.Location) + } + } + d.Set("recovery_vault_name", vaultName) + return nil +} + +func resourceSiteRecoveryFabricDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + name := id.Path["replicationFabrics"] + + client := meta.(*clients.Client).RecoveryServices.FabricClient(resGroup, vaultName) + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + future, err := client.Delete(ctx, name) + if err != nil { + return fmt.Errorf("Error deleting site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of site recovery fabric %s (vault %s): %+v", name, vaultName, err) + } + + return nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_fabric_resource_test.go b/azurerm/internal/services/recoveryservices/site_recovery_fabric_resource_test.go new file mode 100644 index 000000000000..21eca58e243c --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_fabric_resource_test.go @@ -0,0 +1,81 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type SiteRecoveryFabricResource struct { +} + +func TestAccSiteRecoveryFabric_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_site_recovery_fabric", "test") + r := SiteRecoveryFabricResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (SiteRecoveryFabricResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_site_recovery_fabric" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric-%d" + location = azurerm_resource_group.test.location +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (t SiteRecoveryFabricResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + name := id.Path["replicationFabrics"] + + resp, err := clients.RecoveryServices.FabricClient(resGroup, vaultName).Get(ctx, name) + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Vault (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_network_mapping_resource.go b/azurerm/internal/services/recoveryservices/site_recovery_network_mapping_resource.go new file mode 100644 index 000000000000..e549988d58b6 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_network_mapping_resource.go @@ -0,0 +1,218 @@ +package recoveryservices + +import ( + "fmt" + "net/http" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceSiteRecoveryNetworkMapping() *schema.Resource { + return &schema.Resource{ + Create: resourceSiteRecoveryNetworkMappingCreate, + Read: resourceSiteRecoveryNetworkMappingRead, + Delete: resourceSiteRecoveryNetworkMappingDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "source_recovery_fabric_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "target_recovery_fabric_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "source_network_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_network_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + }, + } +} + +func resourceSiteRecoveryNetworkMappingCreate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + fabricName := d.Get("source_recovery_fabric_name").(string) + targetFabricName := d.Get("target_recovery_fabric_name").(string) + sourceNetworkId := d.Get("source_network_id").(string) + targetNetworkId := d.Get("target_network_id").(string) + name := d.Get("name").(string) + + client := meta.(*clients.Client).RecoveryServices.NetworkMappingClient(resGroup, vaultName) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + // get network name from id + parsedSourceNetworkId, err := azure.ParseAzureResourceID(sourceNetworkId) + if err != nil { + return fmt.Errorf("[ERROR] Unable to parse source_network_id '%s' (network mapping %s): %+v", sourceNetworkId, name, err) + } + sourceNetworkName, hasName := parsedSourceNetworkId.Path["virtualNetworks"] + if !hasName { + sourceNetworkName, hasName = parsedSourceNetworkId.Path["virtualnetworks"] // Handle that different APIs return different ID casings + if !hasName { + return fmt.Errorf("[ERROR] parsed source_network_id '%s' doesn't contain 'virtualnetworks'", parsedSourceNetworkId) + } + } + + if d.IsNewResource() { + existing, err := client.Get(ctx, fabricName, sourceNetworkName, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) && + // todo this workaround can be removed when this bug is fixed + // https://github.com/Azure/azure-sdk-for-go/issues/8705 + !utils.ResponseWasStatusCode(existing.Response, http.StatusBadRequest) { + return fmt.Errorf("Error checking for presence of existing site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_site_recovery_network_mapping", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + parameters := siterecovery.CreateNetworkMappingInput{ + Properties: &siterecovery.CreateNetworkMappingInputProperties{ + RecoveryNetworkID: &targetNetworkId, + RecoveryFabricName: &targetFabricName, + FabricSpecificDetails: siterecovery.AzureToAzureCreateNetworkMappingInput{ + PrimaryNetworkID: &sourceNetworkId, + }, + }, + } + future, err := client.Create(ctx, fabricName, sourceNetworkName, name, parameters) + if err != nil { + return fmt.Errorf("Error creating site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error creating site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + + resp, err := client.Get(ctx, fabricName, sourceNetworkName, name) + if err != nil { + return fmt.Errorf("Error retrieving site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceSiteRecoveryNetworkMappingRead(d, meta) +} + +func resourceSiteRecoveryNetworkMappingRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + networkName := id.Path["replicationNetworks"] + name := id.Path["replicationNetworkMappings"] + + client := meta.(*clients.Client).RecoveryServices.NetworkMappingClient(resGroup, vaultName) + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, fabricName, networkName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + + d.Set("resource_group_name", resGroup) + d.Set("recovery_vault_name", vaultName) + d.Set("source_recovery_fabric_name", fabricName) + d.Set("name", resp.Name) + if props := resp.Properties; props != nil { + d.Set("source_network_id", props.PrimaryNetworkID) + d.Set("target_network_id", props.RecoveryNetworkID) + + targetFabricId, err := azure.ParseAzureResourceID(azure.HandleAzureSdkForGoBug2824(*resp.Properties.RecoveryFabricArmID)) + if err != nil { + return err + } + d.Set("target_recovery_fabric_name", targetFabricId.Path["replicationFabrics"]) + } + + return nil +} + +func resourceSiteRecoveryNetworkMappingDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + networkName := id.Path["replicationNetworks"] + name := id.Path["replicationNetworkMappings"] + + client := meta.(*clients.Client).RecoveryServices.NetworkMappingClient(resGroup, vaultName) + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + future, err := client.Delete(ctx, fabricName, networkName, name) + if err != nil { + return fmt.Errorf("Error deleting site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of site recovery network mapping %s (vault %s): %+v", name, vaultName, err) + } + + return nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_network_mapping_resource_test.go b/azurerm/internal/services/recoveryservices/site_recovery_network_mapping_resource_test.go new file mode 100644 index 000000000000..cf804028887f --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_network_mapping_resource_test.go @@ -0,0 +1,114 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type SiteRecoveryNetworkMappingResource struct { +} + +func TestAccSiteRecoveryNetworkMapping_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_site_recovery_network_mapping", "test") + r := SiteRecoveryNetworkMappingResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (SiteRecoveryNetworkMappingResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d-1" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_site_recovery_fabric" "test1" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric1-%d" + location = azurerm_resource_group.test.location +} + +resource "azurerm_site_recovery_fabric" "test2" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric2-%d" + location = "%s" + depends_on = [azurerm_site_recovery_fabric.test1] +} + +resource "azurerm_virtual_network" "test1" { + name = "network1-%d" + resource_group_name = azurerm_resource_group.test.name + address_space = ["192.168.1.0/24"] + location = azurerm_site_recovery_fabric.test1.location +} + +resource "azurerm_virtual_network" "test2" { + name = "network2-%d" + resource_group_name = azurerm_resource_group.test.name + address_space = ["192.168.2.0/24"] + location = azurerm_site_recovery_fabric.test2.location +} + +resource "azurerm_site_recovery_network_mapping" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "mapping-%d" + source_recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + target_recovery_fabric_name = azurerm_site_recovery_fabric.test2.name + source_network_id = azurerm_virtual_network.test1.id + target_network_id = azurerm_virtual_network.test2.id +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (t SiteRecoveryNetworkMappingResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + networkName := id.Path["replicationNetworks"] + name := id.Path["replicationNetworkMappings"] + + resp, err := clients.RecoveryServices.NetworkMappingClient(resGroup, vaultName).Get(ctx, fabricName, networkName, name) + if err != nil { + return nil, fmt.Errorf("reading Recovery Service Vault (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_protection_container_mapping_resource.go b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_mapping_resource.go new file mode 100644 index 000000000000..736c958a142c --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_mapping_resource.go @@ -0,0 +1,201 @@ +package recoveryservices + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceSiteRecoveryProtectionContainerMapping() *schema.Resource { + return &schema.Resource{ + Create: resourceSiteRecoveryContainerMappingCreate, + Read: resourceSiteRecoveryContainerMappingRead, + Update: nil, + Delete: resourceSiteRecoveryServicesContainerMappingDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "recovery_fabric_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "recovery_replication_policy_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "recovery_source_protection_container_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "recovery_target_protection_container_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + }, + } +} + +func resourceSiteRecoveryContainerMappingCreate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + fabricName := d.Get("recovery_fabric_name").(string) + policyId := d.Get("recovery_replication_policy_id").(string) + protectionContainerName := d.Get("recovery_source_protection_container_name").(string) + targetContainerId := d.Get("recovery_target_protection_container_id").(string) + name := d.Get("name").(string) + + client := meta.(*clients.Client).RecoveryServices.ContainerMappingClient(resGroup, vaultName) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + if d.IsNewResource() { + existing, err := client.Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing site recovery protection container mapping %s (fabric %s, container %s): %+v", name, fabricName, protectionContainerName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_site_recovery_protection_container_mapping", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + parameters := siterecovery.CreateProtectionContainerMappingInput{ + Properties: &siterecovery.CreateProtectionContainerMappingInputProperties{ + TargetProtectionContainerID: &targetContainerId, + PolicyID: &policyId, + ProviderSpecificInput: siterecovery.ReplicationProviderSpecificContainerMappingInput{}, + }, + } + future, err := client.Create(ctx, fabricName, protectionContainerName, name, parameters) + if err != nil { + return fmt.Errorf("Error creating site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error creating site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) + } + + resp, err := client.Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + return fmt.Errorf("Error retrieving site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceSiteRecoveryContainerMappingRead(d, meta) +} + +func resourceSiteRecoveryContainerMappingRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectionContainerMappings"] + + client := meta.(*clients.Client).RecoveryServices.ContainerMappingClient(resGroup, vaultName) + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) + } + + d.Set("resource_group_name", resGroup) + d.Set("recovery_vault_name", vaultName) + d.Set("recovery_fabric_name", fabricName) + d.Set("recovery_source_protection_container_name", resp.Properties.SourceProtectionContainerFriendlyName) + d.Set("name", resp.Name) + d.Set("recovery_replication_policy_id", resp.Properties.PolicyID) + d.Set("recovery_target_protection_container_id", resp.Properties.TargetProtectionContainerID) + return nil +} + +func resourceSiteRecoveryServicesContainerMappingDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectionContainerMappings"] + instanceType := string(siterecovery.InstanceTypeBasicReplicationProviderSpecificContainerMappingInputInstanceTypeA2A) + + client := meta.(*clients.Client).RecoveryServices.ContainerMappingClient(resGroup, vaultName) + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + input := siterecovery.RemoveProtectionContainerMappingInput{ + Properties: &siterecovery.RemoveProtectionContainerMappingInputProperties{ + ProviderSpecificInput: &siterecovery.ReplicationProviderContainerUnmappingInput{ + InstanceType: &instanceType, + }, + }, + } + + future, err := client.Delete(ctx, fabricName, protectionContainerName, name, input) + if err != nil { + return fmt.Errorf("Error deleting site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of site recovery protection container mapping %s (vault %s): %+v", name, vaultName, err) + } + + return nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_protection_container_mapping_resource_test.go b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_mapping_resource_test.go new file mode 100644 index 000000000000..8fa453caf7de --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_mapping_resource_test.go @@ -0,0 +1,123 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type SiteRecoveryProtectionContainerMappingResource struct { +} + +func TestAccSiteRecoveryProtectionContainerMapping_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_site_recovery_protection_container_mapping", "test") + r := SiteRecoveryProtectionContainerMappingResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (SiteRecoveryProtectionContainerMappingResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test1" { + name = "acctestRG-recovery-%d-1" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-vault-%d" + location = azurerm_resource_group.test1.location + resource_group_name = azurerm_resource_group.test1.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_site_recovery_fabric" "test1" { + resource_group_name = azurerm_resource_group.test1.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric1-%d" + location = azurerm_resource_group.test1.location +} + +resource "azurerm_site_recovery_fabric" "test2" { + resource_group_name = azurerm_resource_group.test1.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric2-%d" + location = "%s" + depends_on = [azurerm_site_recovery_fabric.test1] +} + +resource "azurerm_site_recovery_protection_container" "test1" { + resource_group_name = azurerm_resource_group.test1.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + name = "acctest-protection-cont1-%d" +} + +resource "azurerm_site_recovery_protection_container" "test2" { + resource_group_name = azurerm_resource_group.test1.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test2.name + name = "acctest-protection-cont2-%d" +} + +resource "azurerm_site_recovery_replication_policy" "test" { + resource_group_name = azurerm_resource_group.test1.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-policy-%d" + recovery_point_retention_in_minutes = 24 * 60 + application_consistent_snapshot_frequency_in_minutes = 4 * 60 +} + +resource "azurerm_site_recovery_protection_container_mapping" "test" { + resource_group_name = azurerm_resource_group.test1.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + recovery_source_protection_container_name = azurerm_site_recovery_protection_container.test1.name + recovery_target_protection_container_id = azurerm_site_recovery_protection_container.test2.id + recovery_replication_policy_id = azurerm_site_recovery_replication_policy.test.id + name = "mapping-%d" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (t SiteRecoveryProtectionContainerMappingResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectionContainerMappings"] + + resp, err := clients.RecoveryServices.ContainerMappingClient(resGroup, vaultName).Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + return nil, fmt.Errorf("reading site recovery protection container mapping (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_protection_container_resource.go b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_resource.go new file mode 100644 index 000000000000..4cb435456f48 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_resource.go @@ -0,0 +1,160 @@ +package recoveryservices + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceSiteRecoveryProtectionContainer() *schema.Resource { + return &schema.Resource{ + Create: resourceSiteRecoveryProtectionContainerCreate, + Read: resourceSiteRecoveryProtectionContainerRead, + Update: nil, + Delete: resourceSiteRecoveryProtectionContainerDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "recovery_fabric_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} + +func resourceSiteRecoveryProtectionContainerCreate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + fabricName := d.Get("recovery_fabric_name").(string) + name := d.Get("name").(string) + + client := meta.(*clients.Client).RecoveryServices.ProtectionContainerClient(resGroup, vaultName) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + if d.IsNewResource() { + existing, err := client.Get(ctx, fabricName, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_site_recovery_protection_container", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + parameters := siterecovery.CreateProtectionContainerInput{ + Properties: &siterecovery.CreateProtectionContainerInputProperties{}, + } + + future, err := client.Create(ctx, fabricName, name, parameters) + if err != nil { + return fmt.Errorf("Error creating site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error creating site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + + resp, err := client.Get(ctx, fabricName, name) + if err != nil { + return fmt.Errorf("Error retrieving site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceSiteRecoveryProtectionContainerRead(d, meta) +} + +func resourceSiteRecoveryProtectionContainerRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + name := id.Path["replicationProtectionContainers"] + + client := meta.(*clients.Client).RecoveryServices.ProtectionContainerClient(resGroup, vaultName) + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, fabricName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resGroup) + d.Set("recovery_vault_name", vaultName) + d.Set("recovery_fabric_name", fabricName) + return nil +} + +func resourceSiteRecoveryProtectionContainerDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + name := id.Path["replicationProtectionContainers"] + + client := meta.(*clients.Client).RecoveryServices.ProtectionContainerClient(resGroup, vaultName) + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + future, err := client.Delete(ctx, fabricName, name) + if err != nil { + return fmt.Errorf("Error deleting site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of site recovery protection container %s (fabric %s): %+v", name, fabricName, err) + } + + return nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_protection_container_resource_test.go b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_resource_test.go new file mode 100644 index 000000000000..200c1d49c497 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_protection_container_resource_test.go @@ -0,0 +1,88 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type SiteRecoveryProtectionContainerResource struct { +} + +func TestAccSiteRecoveryProtectionContainer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_site_recovery_protection_container", "test") + r := SiteRecoveryProtectionContainerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (SiteRecoveryProtectionContainerResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_site_recovery_fabric" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric-%d" + location = azurerm_resource_group.test.location +} + +resource "azurerm_site_recovery_protection_container" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test.name + name = "acctest-protection-cont-%d" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (t SiteRecoveryProtectionContainerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + name := id.Path["replicationProtectionContainers"] + + resp, err := clients.RecoveryServices.ProtectionContainerClient(resGroup, vaultName).Get(ctx, fabricName, name) + if err != nil { + return nil, fmt.Errorf("reading site recovery protection container (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_replicated_vm_resource.go b/azurerm/internal/services/recoveryservices/site_recovery_replicated_vm_resource.go new file mode 100644 index 000000000000..07001bb0a367 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_replicated_vm_resource.go @@ -0,0 +1,588 @@ +package recoveryservices + +import ( + "bytes" + "fmt" + "log" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute" + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" + "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceSiteRecoveryReplicatedVM() *schema.Resource { + return &schema.Resource{ + Create: resourceSiteRecoveryReplicatedItemCreate, + Read: resourceSiteRecoveryReplicatedItemRead, + Update: resourceSiteRecoveryReplicatedItemUpdate, + Delete: resourceSiteRecoveryReplicatedItemDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(120 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(80 * time.Minute), + Delete: schema.DefaultTimeout(80 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "source_recovery_fabric_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "source_vm_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_recovery_fabric_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "recovery_replication_policy_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "source_recovery_protection_container_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "target_recovery_protection_container_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_resource_group_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_availability_set_id": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_network_id": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + }, + "managed_disk": { + Type: schema.TypeSet, + ConfigMode: schema.SchemaConfigModeAttr, + Optional: true, + ForceNew: true, + Set: resourceSiteRecoveryReplicatedVMDiskHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "disk_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + DiffSuppressFunc: suppress.CaseDifference, + }, + "staging_storage_account_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_resource_group_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateResourceID, + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_disk_type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(compute.StandardLRS), + string(compute.PremiumLRS), + string(compute.StandardSSDLRS), + string(compute.UltraSSDLRS), + }, true), + DiffSuppressFunc: suppress.CaseDifference, + }, + "target_replica_disk_type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(compute.StandardLRS), + string(compute.PremiumLRS), + string(compute.StandardSSDLRS), + string(compute.UltraSSDLRS), + }, true), + DiffSuppressFunc: suppress.CaseDifference, + }, + }, + }, + }, + "network_interface": { + Type: schema.TypeSet, + ConfigMode: schema.SchemaConfigModeAttr, + Computed: true, + Optional: true, + Elem: networkInterfaceResource(), + }, + }, + } +} + +func networkInterfaceResource() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "source_network_interface_id": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: azure.ValidateResourceID, + }, + "target_static_ip": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "target_subnet_name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} + +func resourceSiteRecoveryReplicatedItemCreate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) + name := d.Get("name").(string) + fabricName := d.Get("source_recovery_fabric_name").(string) + sourceVmId := d.Get("source_vm_id").(string) + policyId := d.Get("recovery_replication_policy_id").(string) + sourceProtectionContainerName := d.Get("source_recovery_protection_container_name").(string) + targetProtectionContainerId := d.Get("target_recovery_protection_container_id").(string) + targetResourceGroupId := d.Get("target_resource_group_id").(string) + + var targetAvailabilitySetID *string + if id, isSet := d.GetOk("target_availability_set_id"); isSet { + targetAvailabilitySetID = utils.String(id.(string)) + } else { + targetAvailabilitySetID = nil + } + + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + if d.IsNewResource() { + existing, err := client.Get(ctx, fabricName, sourceProtectionContainerName, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_site_recovery_replicated_vm", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + managedDisks := []siterecovery.A2AVMManagedDiskInputDetails{} + + for _, raw := range d.Get("managed_disk").(*schema.Set).List() { + diskInput := raw.(map[string]interface{}) + diskId := diskInput["disk_id"].(string) + primaryStagingAzureStorageAccountID := diskInput["staging_storage_account_id"].(string) + recoveryResourceGroupId := diskInput["target_resource_group_id"].(string) + targetReplicaDiskType := diskInput["target_replica_disk_type"].(string) + targetDiskType := diskInput["target_disk_type"].(string) + + managedDisks = append(managedDisks, siterecovery.A2AVMManagedDiskInputDetails{ + DiskID: &diskId, + PrimaryStagingAzureStorageAccountID: &primaryStagingAzureStorageAccountID, + RecoveryResourceGroupID: &recoveryResourceGroupId, + RecoveryReplicaDiskAccountType: &targetReplicaDiskType, + RecoveryTargetDiskAccountType: &targetDiskType, + }) + } + + parameters := siterecovery.EnableProtectionInput{ + Properties: &siterecovery.EnableProtectionInputProperties{ + PolicyID: &policyId, + ProviderSpecificDetails: siterecovery.A2AEnableProtectionInput{ + FabricObjectID: &sourceVmId, + RecoveryContainerID: &targetProtectionContainerId, + RecoveryResourceGroupID: &targetResourceGroupId, + RecoveryAvailabilitySetID: targetAvailabilitySetID, + VMManagedDisks: &managedDisks, + }, + }, + } + future, err := client.Create(ctx, fabricName, sourceProtectionContainerName, name, parameters) + if err != nil { + return fmt.Errorf("Error creating replicated vm %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error creating replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + resp, err := client.Get(ctx, fabricName, sourceProtectionContainerName, name) + if err != nil { + return fmt.Errorf("Error retrieving replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + // We are not allowed to configure the NIC on the initial setup, and the VM has to be replicated before + // we can reconfigure. Hence this call to update when we create. + return resourceSiteRecoveryReplicatedItemUpdate(d, meta) +} + +func resourceSiteRecoveryReplicatedItemUpdate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) + + // We are only allowed to update the configuration once the VM is fully protected + state, err := waitForReplicationToBeHealthy(d, meta) + if err != nil { + return err + } + + name := d.Get("name").(string) + fabricName := d.Get("source_recovery_fabric_name").(string) + sourceProtectionContainerName := d.Get("source_recovery_protection_container_name").(string) + targetNetworkId := d.Get("target_network_id").(string) + + var targetAvailabilitySetID *string + if id, isSet := d.GetOk("target_availability_set_id"); isSet { + tmp := id.(string) + targetAvailabilitySetID = &tmp + } else { + targetAvailabilitySetID = nil + } + + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + vmNics := []siterecovery.VMNicInputDetails{} + for _, raw := range d.Get("network_interface").(*schema.Set).List() { + vmNicInput := raw.(map[string]interface{}) + sourceNicId := vmNicInput["source_network_interface_id"].(string) + targetStaticIp := vmNicInput["target_static_ip"].(string) + targetSubnetName := vmNicInput["target_subnet_name"].(string) + + nicId := findNicId(state, sourceNicId) + if nicId == nil { + return fmt.Errorf("Error updating replicated vm %s (vault %s): Trying to update NIC that is not known by Azure %s", name, vaultName, sourceNicId) + } + vmNics = append(vmNics, siterecovery.VMNicInputDetails{ + NicID: nicId, + RecoveryVMSubnetName: &targetSubnetName, + ReplicaNicStaticIPAddress: &targetStaticIp, + }) + } + + managedDisks := []siterecovery.A2AVMManagedDiskUpdateDetails{} + for _, raw := range d.Get("managed_disk").(*schema.Set).List() { + diskInput := raw.(map[string]interface{}) + diskId := diskInput["disk_id"].(string) + targetReplicaDiskType := diskInput["target_replica_disk_type"].(string) + targetDiskType := diskInput["target_disk_type"].(string) + + managedDisks = append(managedDisks, siterecovery.A2AVMManagedDiskUpdateDetails{ + DiskID: &diskId, + RecoveryReplicaDiskAccountType: &targetReplicaDiskType, + RecoveryTargetDiskAccountType: &targetDiskType, + }) + } + + if targetNetworkId == "" { + // No target network id was specified, so we want to preserve what was selected + if a2aDetails, isA2a := state.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { + if a2aDetails.SelectedRecoveryAzureNetworkID != nil { + targetNetworkId = *a2aDetails.SelectedRecoveryAzureNetworkID + } else { + return fmt.Errorf("target_network_id must be set when a network_interface is configured") + } + } else { + return fmt.Errorf("target_network_id must be set when a network_interface is configured") + } + } + + parameters := siterecovery.UpdateReplicationProtectedItemInput{ + Properties: &siterecovery.UpdateReplicationProtectedItemInputProperties{ + RecoveryAzureVMName: &name, + SelectedRecoveryAzureNetworkID: &targetNetworkId, + VMNics: &vmNics, + RecoveryAvailabilitySetID: targetAvailabilitySetID, + ProviderSpecificDetails: siterecovery.A2AUpdateReplicationProtectedItemInput{ + ManagedDiskUpdateDetails: &managedDisks, + }, + }, + } + + future, err := client.Update(ctx, fabricName, sourceProtectionContainerName, name, parameters) + if err != nil { + return fmt.Errorf("Error updating replicated vm %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error updating replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + return resourceSiteRecoveryReplicatedItemRead(d, meta) +} + +func findNicId(state *siterecovery.ReplicationProtectedItem, sourceNicId string) *string { + if a2aDetails, isA2a := state.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { + if a2aDetails.VMNics != nil { + for _, nic := range *a2aDetails.VMNics { + if nic.SourceNicArmID != nil && *nic.SourceNicArmID == sourceNicId { + return nic.NicID + } + } + } + } + return nil +} + +func resourceSiteRecoveryReplicatedItemRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectedItems"] + + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resGroup) + d.Set("recovery_vault_name", vaultName) + d.Set("source_recovery_fabric_name", fabricName) + d.Set("target_recovery_fabric_id", resp.Properties.RecoveryFabricID) + d.Set("recovery_replication_policy_id", resp.Properties.PolicyID) + d.Set("source_recovery_protection_container_name", protectionContainerName) + d.Set("target_recovery_protection_container_id", resp.Properties.RecoveryContainerID) + + if a2aDetails, isA2a := resp.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { + d.Set("source_vm_id", a2aDetails.FabricObjectID) + d.Set("target_resource_group_id", a2aDetails.RecoveryAzureResourceGroupID) + d.Set("target_availability_set_id", a2aDetails.RecoveryAvailabilitySet) + d.Set("target_network_id", a2aDetails.SelectedRecoveryAzureNetworkID) + if a2aDetails.ProtectedManagedDisks != nil { + disksOutput := make([]interface{}, 0) + for _, disk := range *a2aDetails.ProtectedManagedDisks { + diskOutput := make(map[string]interface{}) + diskOutput["disk_id"] = *disk.DiskID + diskOutput["staging_storage_account_id"] = *disk.PrimaryStagingAzureStorageAccountID + diskOutput["target_resource_group_id"] = *disk.RecoveryResourceGroupID + diskOutput["target_replica_disk_type"] = *disk.RecoveryReplicaDiskAccountType + diskOutput["target_disk_type"] = *disk.RecoveryTargetDiskAccountType + + disksOutput = append(disksOutput, diskOutput) + } + d.Set("managed_disk", schema.NewSet(resourceSiteRecoveryReplicatedVMDiskHash, disksOutput)) + } + + if a2aDetails.VMNics != nil { + nicsOutput := make([]interface{}, 0) + for _, nic := range *a2aDetails.VMNics { + nicOutput := make(map[string]interface{}) + if nic.SourceNicArmID != nil { + nicOutput["source_network_interface_id"] = *nic.SourceNicArmID + } + if nic.ReplicaNicStaticIPAddress != nil { + nicOutput["target_static_ip"] = *nic.ReplicaNicStaticIPAddress + } + if nic.RecoveryVMSubnetName != nil { + nicOutput["target_subnet_name"] = *nic.RecoveryVMSubnetName + } + nicsOutput = append(nicsOutput, nicOutput) + } + d.Set("network_interface", schema.NewSet(schema.HashResource(networkInterfaceResource()), nicsOutput)) + } + } + + return nil +} + +func resourceSiteRecoveryReplicatedItemDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectedItems"] + + disableProtectionInput := siterecovery.DisableProtectionInput{ + Properties: &siterecovery.DisableProtectionInputProperties{ + DisableProtectionReason: siterecovery.NotSpecified, + ReplicationProviderInput: siterecovery.DisableProtectionProviderSpecificInput{}, + }, + } + + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + future, err := client.Delete(ctx, fabricName, protectionContainerName, name, disableProtectionInput) + if err != nil { + return fmt.Errorf("Error deleting site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + return nil +} + +func resourceSiteRecoveryReplicatedVMDiskHash(v interface{}) int { + var buf bytes.Buffer + + if m, ok := v.(map[string]interface{}); ok { + if v, ok := m["disk_id"]; ok { + buf.WriteString(strings.ToLower(v.(string))) + } + } + + return hashcode.String(buf.String()) +} + +func waitForReplicationToBeHealthy(d *schema.ResourceData, meta interface{}) (*siterecovery.ReplicationProtectedItem, error) { + log.Printf("Waiting for Site Recover to replicate VM.") + stateConf := &resource.StateChangeConf{ + Target: []string{"Protected"}, + Refresh: waitForReplicationToBeHealthyRefreshFunc(d, meta), + PollInterval: time.Minute, + } + + stateConf.Timeout = d.Timeout(schema.TimeoutUpdate) + + result, err := stateConf.WaitForState() + if err != nil { + return nil, fmt.Errorf("Error waiting for site recovery to replicate vm: %+v", err) + } + + protectedItem, ok := result.(siterecovery.ReplicationProtectedItem) + if ok { + return &protectedItem, nil + } else { + return nil, fmt.Errorf("Error waiting for site recovery return incompatible tyupe") + } +} + +func waitForReplicationToBeHealthyRefreshFunc(d *schema.ResourceData, meta interface{}) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return nil, "", err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + client := meta.(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName) + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectedItems"] + + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + return nil, "", fmt.Errorf("Error making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + if resp.Properties == nil { + return nil, "", fmt.Errorf("Missing Properties in response when making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + if resp.Properties.ProviderSpecificDetails == nil { + return nil, "", fmt.Errorf("Missing Properties.ProviderSpecificDetails in response when making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + + // Find first disk that is not fully replicated yet + if a2aDetails, isA2a := resp.Properties.ProviderSpecificDetails.AsA2AReplicationDetails(); isA2a { + if a2aDetails.MonitoringPercentageCompletion != nil { + log.Printf("Waiting for Site Recover to replicate VM, %d%% complete.", *a2aDetails.MonitoringPercentageCompletion) + } + if a2aDetails.VMProtectionState != nil { + return resp, *a2aDetails.VMProtectionState, nil + } + } + + if resp.Properties.ReplicationHealth == nil { + return nil, "", fmt.Errorf("Missing ReplicationHealth in response when making Read request on site recovery replicated vm %s (vault %s): %+v", name, vaultName, err) + } + return resp, *resp.Properties.ReplicationHealth, nil + } +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_replicated_vm_resource_test.go b/azurerm/internal/services/recoveryservices/site_recovery_replicated_vm_resource_test.go new file mode 100644 index 000000000000..4fd494865ab5 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_replicated_vm_resource_test.go @@ -0,0 +1,266 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type SiteRecoveryReplicatedVmResource struct { +} + +func TestAccSiteRecoveryReplicatedVm_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_site_recovery_replicated_vm", "test") + r := SiteRecoveryReplicatedVmResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (SiteRecoveryReplicatedVmResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%[1]d-1" + location = "%[2]s" +} + +resource "azurerm_resource_group" "test2" { + name = "acctestRG-recovery-%[1]d-2" + location = "%[3]s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-vault-%[1]d" + location = azurerm_resource_group.test2.location + resource_group_name = azurerm_resource_group.test2.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_site_recovery_fabric" "test1" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric1-%[1]d" + location = azurerm_resource_group.test.location +} + +resource "azurerm_site_recovery_fabric" "test2" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-fabric2-%[1]d" + location = azurerm_resource_group.test2.location + depends_on = [azurerm_site_recovery_fabric.test1] +} + +resource "azurerm_site_recovery_protection_container" "test1" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + name = "acctest-protection-cont1-%[1]d" +} + +resource "azurerm_site_recovery_protection_container" "test2" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test2.name + name = "acctest-protection-cont2-%[1]d" +} + +resource "azurerm_site_recovery_replication_policy" "test" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-policy-%[1]d" + recovery_point_retention_in_minutes = 24 * 60 + application_consistent_snapshot_frequency_in_minutes = 4 * 60 +} + +resource "azurerm_site_recovery_protection_container_mapping" "test" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + recovery_source_protection_container_name = azurerm_site_recovery_protection_container.test1.name + recovery_target_protection_container_id = azurerm_site_recovery_protection_container.test2.id + recovery_replication_policy_id = azurerm_site_recovery_replication_policy.test.id + name = "mapping-%[1]d" +} + +resource "azurerm_virtual_network" "test1" { + name = "net-%[1]d" + resource_group_name = azurerm_resource_group.test.name + address_space = ["192.168.1.0/24"] + location = azurerm_site_recovery_fabric.test1.location +} + +resource "azurerm_subnet" "test1" { + name = "snet-%[1]d" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test1.name + address_prefix = "192.168.1.0/24" +} + +resource "azurerm_virtual_network" "test2" { + name = "net-%[1]d" + resource_group_name = azurerm_resource_group.test2.name + address_space = ["192.168.2.0/24"] + location = azurerm_site_recovery_fabric.test2.location +} + +resource "azurerm_subnet" "test2_1" { + name = "acctest-snet-%[1]d_1" + resource_group_name = "${azurerm_resource_group.test2.name}" + virtual_network_name = "${azurerm_virtual_network.test2.name}" + address_prefix = "192.168.2.0/27" +} + +resource "azurerm_subnet" "test2_2" { + name = "snet-%[1]d_2" + resource_group_name = "${azurerm_resource_group.test2.name}" + virtual_network_name = "${azurerm_virtual_network.test2.name}" + address_prefix = "192.168.2.32/27" +} + +resource "azurerm_subnet" "test2_3" { + name = "snet-%[1]d_3" + resource_group_name = "${azurerm_resource_group.test2.name}" + virtual_network_name = "${azurerm_virtual_network.test2.name}" + address_prefix = "192.168.2.64/27" +} + +resource "azurerm_site_recovery_network_mapping" "test" { + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "mapping-%[1]d" + source_recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + target_recovery_fabric_name = azurerm_site_recovery_fabric.test2.name + source_network_id = azurerm_virtual_network.test1.id + target_network_id = azurerm_virtual_network.test2.id +} + +resource "azurerm_network_interface" "test" { + name = "vm-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + ip_configuration { + name = "vm-%[1]d" + subnet_id = azurerm_subnet.test1.id + private_ip_address_allocation = "Dynamic" + } +} + +resource "azurerm_virtual_machine" "test" { + name = "vm-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + vm_size = "Standard_B1s" + + storage_image_reference { + publisher = "OpenLogic" + offer = "CentOS" + sku = "7.5" + version = "latest" + } + + storage_os_disk { + name = "disk-%[1]d" + os_type = "Linux" + caching = "ReadWrite" + create_option = "FromImage" + managed_disk_type = "Premium_LRS" + } + + os_profile { + admin_username = "testadmin" + admin_password = "Password1234!" + computer_name = "vm-%[1]d" + } + + os_profile_linux_config { + disable_password_authentication = false + } + network_interface_ids = [azurerm_network_interface.test.id] +} + +resource "azurerm_storage_account" "test" { + name = "acct%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_site_recovery_replicated_vm" "test" { + name = "repl-%[1]d" + resource_group_name = azurerm_resource_group.test2.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + source_vm_id = azurerm_virtual_machine.test.id + source_recovery_fabric_name = azurerm_site_recovery_fabric.test1.name + recovery_replication_policy_id = azurerm_site_recovery_replication_policy.test.id + source_recovery_protection_container_name = azurerm_site_recovery_protection_container.test1.name + + target_resource_group_id = azurerm_resource_group.test2.id + target_recovery_fabric_id = azurerm_site_recovery_fabric.test2.id + target_recovery_protection_container_id = azurerm_site_recovery_protection_container.test2.id + + managed_disk { + disk_id = azurerm_virtual_machine.test.storage_os_disk[0].managed_disk_id + staging_storage_account_id = azurerm_storage_account.test.id + target_resource_group_id = azurerm_resource_group.test2.id + target_disk_type = "Premium_LRS" + target_replica_disk_type = "Premium_LRS" + } + + network_interface { + source_network_interface_id = azurerm_network_interface.test.id + target_subnet_name = "snet-%[1]d_2" + } + + depends_on = [ + azurerm_site_recovery_protection_container_mapping.test, + azurerm_site_recovery_network_mapping.test, + ] +} +`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) +} + +func (t SiteRecoveryReplicatedVmResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + fabricName := id.Path["replicationFabrics"] + protectionContainerName := id.Path["replicationProtectionContainers"] + name := id.Path["replicationProtectedItems"] + + resp, err := clients.RecoveryServices.ReplicationMigrationItemsClient(resGroup, vaultName).Get(ctx, fabricName, protectionContainerName, name) + if err != nil { + return nil, fmt.Errorf("reading site recovery replicated vm (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_replication_policy_resource.go b/azurerm/internal/services/recoveryservices/site_recovery_replication_policy_resource.go new file mode 100644 index 000000000000..11ebd8737307 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_replication_policy_resource.go @@ -0,0 +1,213 @@ +package recoveryservices + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceSiteRecoveryReplicationPolicy() *schema.Resource { + return &schema.Resource{ + Create: resourceSiteRecoveryReplicationPolicyCreate, + Read: resourceSiteRecoveryReplicationPolicyRead, + Update: resourceSiteRecoveryReplicationPolicyUpdate, + Delete: resourceSiteRecoveryReplicationPolicyDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "resource_group_name": azure.SchemaResourceGroupName(), + + "recovery_vault_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azure.ValidateRecoveryServicesVaultName, + }, + "recovery_point_retention_in_minutes": { + Type: schema.TypeInt, + Required: true, + ForceNew: false, + ValidateFunc: validation.IntBetween(1, 365*24*60), + }, + "application_consistent_snapshot_frequency_in_minutes": { + Type: schema.TypeInt, + Required: true, + ForceNew: false, + ValidateFunc: validation.IntBetween(1, 365*24*60), + }, + }, + } +} + +func resourceSiteRecoveryReplicationPolicyCreate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + name := d.Get("name").(string) + + client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + if d.IsNewResource() { + existing, err := client.Get(ctx, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing site recovery replication policy %s: %+v", name, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_site_recovery_replication_policy", azure.HandleAzureSdkForGoBug2824(*existing.ID)) + } + } + + recoveryPoint := int32(d.Get("recovery_point_retention_in_minutes").(int)) + appConsitency := int32(d.Get("application_consistent_snapshot_frequency_in_minutes").(int)) + parameters := siterecovery.CreatePolicyInput{ + Properties: &siterecovery.CreatePolicyInputProperties{ + ProviderSpecificInput: &siterecovery.A2APolicyCreationInput{ + RecoveryPointHistory: &recoveryPoint, + AppConsistentFrequencyInMinutes: &appConsitency, + MultiVMSyncStatus: siterecovery.Enable, + InstanceType: siterecovery.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeA2A, + }, + }, + } + future, err := client.Create(ctx, name, parameters) + if err != nil { + return fmt.Errorf("Error creating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error creating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + resp, err := client.Get(ctx, name) + if err != nil { + return fmt.Errorf("Error retrieving site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceSiteRecoveryReplicationPolicyRead(d, meta) +} + +func resourceSiteRecoveryReplicationPolicyUpdate(d *schema.ResourceData, meta interface{}) error { + resGroup := d.Get("resource_group_name").(string) + vaultName := d.Get("recovery_vault_name").(string) + name := d.Get("name").(string) + + client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + recoveryPoint := int32(d.Get("recovery_point_retention_in_minutes").(int)) + appConsitency := int32(d.Get("application_consistent_snapshot_frequency_in_minutes").(int)) + parameters := siterecovery.UpdatePolicyInput{ + Properties: &siterecovery.UpdatePolicyInputProperties{ + ReplicationProviderSettings: &siterecovery.A2APolicyCreationInput{ + RecoveryPointHistory: &recoveryPoint, + AppConsistentFrequencyInMinutes: &appConsitency, + MultiVMSyncStatus: siterecovery.Enable, + InstanceType: siterecovery.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeA2A, + }, + }, + } + future, err := client.Update(ctx, name, parameters) + if err != nil { + return fmt.Errorf("Error updating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error updating site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + resp, err := client.Get(ctx, name) + if err != nil { + return fmt.Errorf("Error retrieving site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + d.SetId(azure.HandleAzureSdkForGoBug2824(*resp.ID)) + + return resourceSiteRecoveryReplicationPolicyRead(d, meta) +} + +func resourceSiteRecoveryReplicationPolicyRead(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + name := id.Path["replicationPolicies"] + + client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + resp, err := client.Get(ctx, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("Error making Read request on site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resGroup) + d.Set("recovery_vault_name", vaultName) + if a2APolicyDetails, isA2A := resp.Properties.ProviderSpecificDetails.AsA2APolicyDetails(); isA2A { + d.Set("recovery_point_retention_in_minutes", a2APolicyDetails.RecoveryPointHistory) + d.Set("application_consistent_snapshot_frequency_in_minutes", a2APolicyDetails.AppConsistentFrequencyInMinutes) + } + return nil +} + +func resourceSiteRecoveryReplicationPolicyDelete(d *schema.ResourceData, meta interface{}) error { + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + name := id.Path["replicationPolicies"] + + client := meta.(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName) + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + future, err := client.Delete(ctx, name) + if err != nil { + return fmt.Errorf("Error deleting site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("Error waiting for deletion of site recovery replication policy %s (vault %s): %+v", name, vaultName, err) + } + + return nil +} diff --git a/azurerm/internal/services/recoveryservices/site_recovery_replication_policy_resource_test.go b/azurerm/internal/services/recoveryservices/site_recovery_replication_policy_resource_test.go new file mode 100644 index 000000000000..e543d8127c50 --- /dev/null +++ b/azurerm/internal/services/recoveryservices/site_recovery_replication_policy_resource_test.go @@ -0,0 +1,82 @@ +package recoveryservices_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +type SiteRecoveryReplicationPolicyResource struct { +} + +func TestAccSiteRecoveryReplicationPolicy_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_site_recovery_replication_policy", "test") + r := SiteRecoveryReplicationPolicyResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (SiteRecoveryReplicationPolicyResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-recovery-%d" + location = "%s" +} + +resource "azurerm_recovery_services_vault" "test" { + name = "acctest-vault-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + + soft_delete_enabled = false +} + +resource "azurerm_site_recovery_replication_policy" "test" { + resource_group_name = azurerm_resource_group.test.name + recovery_vault_name = azurerm_recovery_services_vault.test.name + name = "acctest-policy-%d" + recovery_point_retention_in_minutes = 24 * 60 + application_consistent_snapshot_frequency_in_minutes = 4 * 60 +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (t SiteRecoveryReplicationPolicyResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + + resGroup := id.ResourceGroup + vaultName := id.Path["vaults"] + name := id.Path["replicationPolicies"] + + resp, err := clients.RecoveryServices.ReplicationPoliciesClient(resGroup, vaultName).Get(ctx, name) + if err != nil { + return nil, fmt.Errorf("reading site recovery replication policy (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} diff --git a/azurerm/internal/services/recoveryservices/tests/data_source_backup_policy_vm_test.go b/azurerm/internal/services/recoveryservices/tests/data_source_backup_policy_vm_test.go deleted file mode 100644 index 292fcaa616a2..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/data_source_backup_policy_vm_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMBackupPolicyVm_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceBackupPolicyVm_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "recovery_vault_name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - ), - }, - }, - }) -} - -func testAccDataSourceBackupPolicyVm_basic(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_basicDaily(data) - return fmt.Sprintf(` -%s - -data "azurerm_backup_policy_vm" "test" { - name = azurerm_backup_policy_vm.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/recoveryservices/tests/data_source_recovery_services_vault_test.go b/azurerm/internal/services/recoveryservices/tests/data_source_recovery_services_vault_test.go deleted file mode 100644 index 970017132c73..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/data_source_recovery_services_vault_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMRecoveryServicesVault_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_recovery_services_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceRecoveryServicesVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "location"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard"), - ), - }, - }, - }) -} - -func testAccDataSourceRecoveryServicesVault_basic(data acceptance.TestData) string { - template := testAccAzureRMRecoveryServicesVault_basic(data) - return fmt.Sprintf(` -%s - -data "azurerm_recovery_services_vault" "test" { - name = azurerm_recovery_services_vault.test.name - resource_group_name = azurerm_resource_group.test.name -} -`, template) -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_container_storage_account_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_container_storage_account_test.go deleted file mode 100644 index 9cb29db4d132..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_container_storage_account_test.go +++ /dev/null @@ -1,147 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMBackupProtectionContainerStorageAccount_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_container_storage_account", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionContainerStorageAccountDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionContainerStorageAccount_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectionContainerStorageAccountExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMBackupProtectionContainerStorageAccount_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-backup-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "testvlt" { - name = "acctest-vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_backup_container_storage_account" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.testvlt.name - storage_account_id = azurerm_storage_account.test.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) -} - -func testCheckAzureRMBackupProtectionContainerStorageAccountExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.BackupProtectionContainersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - storageAccountID := state.Primary.Attributes["storage_account_id"] - - parsedStorageAccountID, err := azure.ParseAzureResourceID(storageAccountID) - if err != nil { - return fmt.Errorf("Bad: Unable to parse storage_account_id '%s': %+v", storageAccountID, err) - } - accountName, hasName := parsedStorageAccountID.Path["storageAccounts"] - if !hasName { - return fmt.Errorf("Bad: Parsed storage_account_id '%s' doesn't contain 'storageAccounts'", storageAccountID) - } - - containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageAccountID.ResourceGroup, accountName) - - // Ensure container exists in API - resp, err := client.Get(ctx, vaultName, resourceGroupName, "Azure", containerName) - if err != nil { - return fmt.Errorf("Bad: Get on protection container: %+v", err) - } - - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: container: %q does not exist", containerName) - } - - return nil - } -} - -func testCheckAzureRMBackupProtectionContainerStorageAccountDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.BackupProtectionContainersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_backup_container_storage_account" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - storageAccountID := rs.Primary.Attributes["storage_account_id"] - - parsedStorageAccountID, err := azure.ParseAzureResourceID(storageAccountID) - if err != nil { - return fmt.Errorf("Bad: Unable to parse storage_account_id '%s': %+v", storageAccountID, err) - } - accountName, hasName := parsedStorageAccountID.Path["storageAccounts"] - if !hasName { - return fmt.Errorf("Bad: Parsed storage_account_id '%s' doesn't contain 'storageAccounts'", storageAccountID) - } - - containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageAccountID.ResourceGroup, accountName) - - // Ensure container exists in API - resp, err := client.Get(ctx, vaultName, resourceGroupName, "Azure", containerName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Backup Container Storage Account still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_policy_file_share_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_policy_file_share_test.go deleted file mode 100644 index 4c25608070d5..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_policy_file_share_test.go +++ /dev/null @@ -1,239 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMBackupProtectionPolicyFileShare_basicDaily(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_file_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyFileShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data), - Check: checkAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data.ResourceName, data.RandomInteger), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyFileShare_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_file_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyFileShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data), - Check: checkAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data.ResourceName, data.RandomInteger), - }, - data.RequiresImportErrorStep(testAccAzureRMBackupProtectionPolicyFileShare_requiresImport), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyFileShare_updateDaily(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_file_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyFileShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data), - Check: checkAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data.ResourceName, data.RandomInteger), - }, - data.ImportStep(), - { - Config: testAccAzureRMBackupProtectionPolicyFileShare_updateDaily(data), - Check: checkAccAzureRMBackupProtectionPolicyFileShare_updateDaily(data.ResourceName, data.RandomInteger), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMBackupProtectionPolicyFileShareDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_backup_policy_file_share" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - policyName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Recovery Services Vault Policy still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMBackupProtectionPolicyFileShareExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - vaultName := rs.Primary.Attributes["recovery_vault_name"] - policyName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Recovery Services Vault %q Policy: %q", vaultName, policyName) - } - - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Recovery Services Vault Policy %q (resource group: %q) was not found: %+v", policyName, resourceGroup, err) - } - - return fmt.Errorf("Bad: Get on recoveryServicesVaultsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMBackupProtectionPolicyFileShare_base(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-backup-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-RSV-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyFileShare_base(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_file_share" "test" { - name = "acctest-PFS-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyFileShare_updateDaily(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyFileShare_base(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_file_share" "test" { - name = "acctest-PFS-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:30" - } - - retention_daily { - count = 180 - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyFileShare_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyFileShare_basicDaily(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_file_share" "import" { - name = azurerm_backup_policy_file_share.test.name - resource_group_name = azurerm_backup_policy_file_share.test.resource_group_name - recovery_vault_name = azurerm_backup_policy_file_share.test.recovery_vault_name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, template) -} - -func checkAccAzureRMBackupProtectionPolicyFileShare_basicDaily(resourceName string, ri int) resource.TestCheckFunc { - return resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyFileShareExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", fmt.Sprintf("acctest-PFS-%d", ri)), - resource.TestCheckResourceAttr(resourceName, "resource_group_name", fmt.Sprintf("acctestRG-backup-%d", ri)), - resource.TestCheckResourceAttr(resourceName, "recovery_vault_name", fmt.Sprintf("acctest-RSV-%d", ri)), - resource.TestCheckResourceAttr(resourceName, "backup.0.frequency", "Daily"), - resource.TestCheckResourceAttr(resourceName, "backup.0.time", "23:00"), - resource.TestCheckResourceAttr(resourceName, "retention_daily.0.count", "10"), - ) -} - -func checkAccAzureRMBackupProtectionPolicyFileShare_updateDaily(resourceName string, ri int) resource.TestCheckFunc { - return resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyFileShareExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", fmt.Sprintf("acctest-PFS-%d", ri)), - resource.TestCheckResourceAttr(resourceName, "resource_group_name", fmt.Sprintf("acctestRG-backup-%d", ri)), - resource.TestCheckResourceAttr(resourceName, "recovery_vault_name", fmt.Sprintf("acctest-RSV-%d", ri)), - resource.TestCheckResourceAttr(resourceName, "backup.0.frequency", "Daily"), - resource.TestCheckResourceAttr(resourceName, "backup.0.time", "23:30"), - resource.TestCheckResourceAttr(resourceName, "retention_daily.0.count", "180"), - ) -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_policy_vm_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_policy_vm_test.go deleted file mode 100644 index e1f753269d41..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_policy_vm_test.go +++ /dev/null @@ -1,537 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMBackupProtectionPolicyVM_basicDaily(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.frequency", "Daily"), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.time", "23:00"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_daily.0.count", "10"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMBackupProtectionPolicyVM_requiresImport), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_basicWeekly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_completeDaily(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_completeDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_completeWeekly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_completeWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.frequency", "Weekly"), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.time", "23:00"), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_weekly.0.count", "42"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_weekly.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.count", "7"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.weeks.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.count", "77"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.weeks.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.months.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_updateDaily(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMBackupProtectionPolicyVM_completeDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.frequency", "Daily"), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.time", "23:00"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_daily.0.count", "10"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_weekly.0.count", "42"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_weekly.0.weekdays.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.count", "7"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.weekdays.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.weeks.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.count", "77"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.weekdays.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.weeks.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.months.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_updateWeekly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMBackupProtectionPolicyVM_completeWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.frequency", "Weekly"), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.time", "23:00"), - resource.TestCheckResourceAttr(data.ResourceName, "backup.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_weekly.0.count", "42"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_weekly.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.count", "7"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_monthly.0.weeks.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.count", "77"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.weekdays.#", "4"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.weeks.#", "2"), - resource.TestCheckResourceAttr(data.ResourceName, "retention_yearly.0.months.#", "2"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_updateDailyToWeekly(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_updateWeeklyToDaily(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMBackupProtectionPolicyVM_basicDaily(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMBackupProtectionPolicyVM_updateWeeklyToPartial(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_policy_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectionPolicyVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectionPolicyVM_completeWeekly(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMBackupProtectionPolicyVM_completeWeeklyPartial(data), - Check: resource.ComposeAggregateTestCheckFunc( - testCheckAzureRMBackupProtectionPolicyVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMBackupProtectionPolicyVmDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_backup_policy_vm" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - policyName := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Recovery Services Vault Policy still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMBackupProtectionPolicyVmExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectionPoliciesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - vaultName := rs.Primary.Attributes["recovery_vault_name"] - policyName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Recovery Services Vault %q Policy: %q", vaultName, policyName) - } - - resp, err := client.Get(ctx, vaultName, resourceGroup, policyName) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Recovery Services Vault Policy %q (resource group: %q) was not found: %+v", policyName, resourceGroup, err) - } - - return fmt.Errorf("Bad: Get on recoveryServicesVaultsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMBackupProtectionPolicyVM_template(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-backup-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyVM_basicDaily(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyVM_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_basicDaily(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "import" { - name = azurerm_backup_policy_vm.test.name - resource_group_name = azurerm_backup_policy_vm.test.resource_group_name - recovery_vault_name = azurerm_backup_policy_vm.test.recovery_vault_name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, template) -} - -func testAccAzureRMBackupProtectionPolicyVM_basicWeekly(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Weekly" - time = "23:00" - weekdays = ["Sunday", "Wednesday"] - } - - retention_weekly { - count = 42 - weekdays = ["Sunday", "Wednesday"] - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyVM_completeDaily(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } - - retention_weekly { - count = 42 - weekdays = ["Sunday", "Wednesday"] - } - - retention_monthly { - count = 7 - weekdays = ["Sunday", "Wednesday"] - weeks = ["First", "Last"] - } - - retention_yearly { - count = 77 - weekdays = ["Sunday", "Wednesday"] - weeks = ["First", "Last"] - months = ["January", "July"] - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyVM_completeWeekly(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Weekly" - time = "23:00" - weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] - } - - retention_weekly { - count = 42 - weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] - } - - retention_monthly { - count = 7 - weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] - weeks = ["First", "Last"] - } - - retention_yearly { - count = 77 - weekdays = ["Sunday", "Wednesday", "Friday", "Saturday"] - weeks = ["First", "Last"] - months = ["January", "July"] - } -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectionPolicyVM_completeWeeklyPartial(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectionPolicyVM_template(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Weekly" - time = "23:00" - weekdays = ["Sunday", "Wednesday", "Friday"] - } - - retention_weekly { - count = 42 - weekdays = ["Sunday", "Wednesday", "Friday"] - } - - retention_monthly { - count = 7 - weekdays = ["Sunday", "Wednesday"] - weeks = ["First", "Last"] - } - - retention_yearly { - count = 77 - weekdays = ["Sunday"] - weeks = ["Last"] - months = ["January"] - } -} -`, template, data.RandomInteger) -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_protected_file_share_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_protected_file_share_test.go deleted file mode 100644 index d73a318881a1..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_protected_file_share_test.go +++ /dev/null @@ -1,309 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -// TODO: These tests fail because enabling backup on file shares with no content -func TestAccAzureRMBackupProtectedFileShare_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_protected_file_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedFileShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectedFileShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectedFileShareExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - ), - }, - data.ImportStep(), - { - // vault cannot be deleted unless we unregister all backups - Config: testAccAzureRMBackupProtectedFileShare_base(data), - }, - }, - }) -} - -func TestAccAzureRMBackupProtectedFileShare_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_protected_file_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedFileShareDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectedFileShare_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectedFileShareExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMBackupProtectedFileShare_requiresImport), - { - // vault cannot be deleted unless we unregister all backups - Config: testAccAzureRMBackupProtectedFileShare_base(data), - }, - }, - }) -} - -func TestAccAzureRMBackupProtectedFileShare_updateBackupPolicyId(t *testing.T) { - fBackupPolicyResourceName := "azurerm_backup_policy_file_share.test1" - sBackupPolicyResourceName := "azurerm_backup_policy_file_share.test2" - - data := acceptance.BuildTestData(t, "azurerm_backup_protected_file_share", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedFileShareDestroy, - Steps: []resource.TestStep{ - { - // Create resources and link first backup policy id - Config: testAccAzureRMBackupProtectedFileShare_basic(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", fBackupPolicyResourceName, "id"), - ), - }, - { - // Modify backup policy id to the second one - // Set Destroy false to prevent error from cleaning up dangling resource - Config: testAccAzureRMBackupProtectedFileShare_updatePolicy(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", sBackupPolicyResourceName, "id"), - ), - }, - { - // Remove protected items first before the associated policies are deleted - Config: testAccAzureRMBackupProtectedFileShare_base(data), - }, - }, - }) -} - -func testCheckAzureRMBackupProtectedFileShareDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_backup_protected_file_share" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - storageID := rs.Primary.Attributes["source_storage_account_id"] - fileShareName := rs.Primary.Attributes["source_file_share_name"] - - parsedStorageID, err := azure.ParseAzureResourceID(storageID) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_storage_account_id '%s': %+v", storageID, err) - } - accountName, hasName := parsedStorageID.Path["storageAccounts"] - if !hasName { - return fmt.Errorf("[ERROR] parsed source_storage_account_id '%s' doesn't contain 'storageAccounts'", storageID) - } - - protectedItemName := fmt.Sprintf("AzureFileShare;%s", fileShareName) - containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageID.ResourceGroup, accountName) - - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Azure Backup Protected File Share still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMBackupProtectedFileShareExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Azure Backup Protected File Share: %q", resourceName) - } - - vaultName := rs.Primary.Attributes["recovery_vault_name"] - storageID := rs.Primary.Attributes["source_storage_account_id"] - fileShareName := rs.Primary.Attributes["source_file_share_name"] - - parsedStorageID, err := azure.ParseAzureResourceID(storageID) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_storage_account_id '%s': %+v", storageID, err) - } - accountName, hasName := parsedStorageID.Path["storageAccounts"] - if !hasName { - return fmt.Errorf("[ERROR] parsed source_storage_account_id '%s' doesn't contain 'storageAccounts'", storageID) - } - - protectedItemName := fmt.Sprintf("AzureFileShare;%s", fileShareName) - containerName := fmt.Sprintf("StorageContainer;storage;%s;%s", parsedStorageID.ResourceGroup, accountName) - - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Azure Backup Protected File Share %q (resource group: %q) was not found: %+v", protectedItemName, resourceGroup, err) - } - - return fmt.Errorf("Bad: Get on recoveryServicesVaultsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMBackupProtectedFileShare_base(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-backup-%[1]d" - location = "%[2]s" -} - -resource "azurerm_storage_account" "test" { - name = "acctest%[3]s" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_storage_share" "test" { - name = "acctest-ss-%[1]d" - storage_account_name = "${azurerm_storage_account.test.name}" - metadata = {} - - lifecycle { - ignore_changes = [metadata] // Ignore changes Azure Backup makes to the metadata - } -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-VAULT-%[1]d" - location = "${azurerm_resource_group.test.location}" - resource_group_name = "${azurerm_resource_group.test.name}" - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_backup_policy_file_share" "test1" { - name = "acctest-PFS-%[1]d" - resource_group_name = "${azurerm_resource_group.test.name}" - recovery_vault_name = "${azurerm_recovery_services_vault.test.name}" - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString) -} - -func testAccAzureRMBackupProtectedFileShare_basic(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedFileShare_base(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_container_storage_account" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - storage_account_id = azurerm_storage_account.test.id -} - -resource "azurerm_backup_protected_file_share" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_storage_account_id = azurerm_backup_container_storage_account.test.storage_account_id - source_file_share_name = azurerm_storage_share.test.name - backup_policy_id = azurerm_backup_policy_file_share.test1.id -} -`, template) -} - -func testAccAzureRMBackupProtectedFileShare_updatePolicy(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedFileShare_base(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_file_share" "test2" { - name = "acctest-%d-Secondary" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} - -resource "azurerm_backup_container_storage_account" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - storage_account_id = azurerm_storage_account.test.id -} - -resource "azurerm_backup_protected_file_share" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_storage_account_id = azurerm_backup_container_storage_account.test.storage_account_id - source_file_share_name = azurerm_storage_share.test.name - backup_policy_id = azurerm_backup_policy_file_share.test2.id -} -`, template, data.RandomInteger) -} - -func testAccAzureRMBackupProtectedFileShare_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedFileShare_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_protected_file_share" "test_import" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_storage_account_id = azurerm_storage_account.test.id - source_file_share_name = azurerm_storage_share.test.name - backup_policy_id = azurerm_backup_policy_file_share.test1.id -} -`, template) -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_protected_vm_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_protected_vm_test.go deleted file mode 100644 index d2ba87369cec..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_backup_protected_vm_test.go +++ /dev/null @@ -1,644 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMBackupProtectedVm_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectedVm_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectedVmExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - ), - }, - data.ImportStep(), - { - // vault cannot be deleted unless we unregister all backups - Config: testAccAzureRMBackupProtectedVm_base(data), - }, - }, - }) -} - -func TestAccAzureRMBackupProtectedVm_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectedVm_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectedVmExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMBackupProtectedVm_requiresImport), - { - // vault cannot be deleted unless we unregister all backups - Config: testAccAzureRMBackupProtectedVm_base(data), - }, - }, - }) -} - -func TestAccAzureRMBackupProtectedVm_separateResourceGroups(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMBackupProtectedVm_separateResourceGroups(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMBackupProtectedVmExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - ), - }, - data.ImportStep(), - { - // vault cannot be deleted unless we unregister all backups - Config: testAccAzureRMBackupProtectedVm_additionalVault(data), - }, - }, - }) -} - -func TestAccAzureRMBackupProtectedVm_updateBackupPolicyId(t *testing.T) { - virtualMachine := "azurerm_virtual_machine.test" - fBackupPolicyResourceName := "azurerm_backup_policy_vm.test" - sBackupPolicyResourceName := "azurerm_backup_policy_vm.test_change_backup" - data := acceptance.BuildTestData(t, "azurerm_backup_protected_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMBackupProtectedVmDestroy, - Steps: []resource.TestStep{ - { // Create resources and link first backup policy id - ResourceName: fBackupPolicyResourceName, - Config: testAccAzureRMBackupProtectedVm_linkFirstBackupPolicy(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", fBackupPolicyResourceName, "id"), - ), - }, - { // Modify backup policy id to the second one - // Set Destroy false to prevent error from cleaning up dangling resource - ResourceName: sBackupPolicyResourceName, - Config: testAccAzureRMBackupProtectedVm_linkSecondBackupPolicy(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrPair(data.ResourceName, "backup_policy_id", sBackupPolicyResourceName, "id"), - ), - }, - { - // Remove backup policy link - // Backup policy link will need to be removed first so the VM's backup policy subsequently reverts to Default - // Azure API is quite sensitive, adding the step to control resource cleanup order - ResourceName: fBackupPolicyResourceName, - Config: testAccAzureRMBackupProtectedVm_withVM(data), - }, - { - // Then VM can be removed - ResourceName: virtualMachine, - Config: testAccAzureRMBackupProtectedVm_withSecondPolicy(data), - }, - { - // Remove backup policies and vault - ResourceName: data.ResourceName, - Config: testAccAzureRMBackupProtectedVm_basePolicyTest(data), - }, - }, - }) -} - -func testCheckAzureRMBackupProtectedVmDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_backup_protected_vm" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - vmId := rs.Primary.Attributes["source_vm_id"] - - parsedVmId, err := azure.ParseAzureResourceID(vmId) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_vm_id '%s': %+v", vmId, err) - } - vmName, hasName := parsedVmId.Path["virtualMachines"] - if !hasName { - return fmt.Errorf("[ERROR] parsed source_vm_id '%s' doesn't contain 'virtualMachines'", vmId) - } - - protectedItemName := fmt.Sprintf("VM;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) - containerName := fmt.Sprintf("iaasvmcontainer;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) - - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Recovery Services Protected VM still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMBackupProtectedVmExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectedItemsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Recovery Services Protected VM: %q", resourceName) - } - - vaultName := rs.Primary.Attributes["recovery_vault_name"] - vmId := rs.Primary.Attributes["source_vm_id"] - - // get VM name from id - parsedVmId, err := azure.ParseAzureResourceID(vmId) - if err != nil { - return fmt.Errorf("[ERROR] Unable to parse source_vm_id '%s': %+v", vmId, err) - } - vmName, hasName := parsedVmId.Path["virtualMachines"] - if !hasName { - return fmt.Errorf("[ERROR] parsed source_vm_id '%s' doesn't contain 'virtualMachines'", vmId) - } - - protectedItemName := fmt.Sprintf("VM;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) - containerName := fmt.Sprintf("iaasvmcontainer;iaasvmcontainerv2;%s;%s", parsedVmId.ResourceGroup, vmName) - - resp, err := client.Get(ctx, vaultName, resourceGroup, "Azure", containerName, protectedItemName, "") - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Recovery Services Protected VM %q (resource group: %q) was not found: %+v", protectedItemName, resourceGroup, err) - } - - return fmt.Errorf("Bad: Get on recoveryServicesVaultsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMBackupProtectedVm_base(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-backup-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "vnet" - location = azurerm_resource_group.test.location - address_space = ["10.0.0.0/16"] - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctest_subnet" - virtual_network_name = azurerm_virtual_network.test.name - resource_group_name = azurerm_resource_group.test.name - address_prefix = "10.0.10.0/24" -} - -resource "azurerm_network_interface" "test" { - name = "acctest_nic" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "acctestipconfig" - subnet_id = azurerm_subnet.test.id - private_ip_address_allocation = "Dynamic" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_public_ip" "test" { - name = "acctest-ip" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - domain_name_label = "acctestip%d" -} - -resource "azurerm_storage_account" "test" { - name = "acctest%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_managed_disk" "test" { - name = "acctest-datadisk" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - storage_account_type = "Standard_LRS" - create_option = "Empty" - disk_size_gb = "1023" -} - -resource "azurerm_virtual_machine" "test" { - name = "acctestvm" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - vm_size = "Standard_A0" - network_interface_ids = [azurerm_network_interface.test.id] - - storage_image_reference { - publisher = "Canonical" - offer = "UbuntuServer" - sku = "16.04-LTS" - version = "latest" - } - - storage_os_disk { - name = "acctest-osdisk" - managed_disk_type = "Standard_LRS" - caching = "ReadWrite" - create_option = "FromImage" - } - - storage_data_disk { - name = "acctest-datadisk" - managed_disk_id = azurerm_managed_disk.test.id - managed_disk_type = "Standard_LRS" - disk_size_gb = azurerm_managed_disk.test.disk_size_gb - create_option = "Attach" - lun = 0 - } - - os_profile { - computer_name = "acctest" - admin_username = "vmadmin" - admin_password = "Password123!@#" - } - - os_profile_linux_config { - disable_password_authentication = false - } - - boot_diagnostics { - enabled = true - storage_uri = azurerm_storage_account.test.primary_blob_endpoint - } -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMBackupProtectedVm_basic(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_base(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_protected_vm" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_vm_id = azurerm_virtual_machine.test.id - backup_policy_id = azurerm_backup_policy_vm.test.id -} -`, template) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_basePolicyTest(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-backup-%d-1" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "vnet" - location = azurerm_resource_group.test.location - address_space = ["10.0.0.0/16"] - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "acctest_subnet" - virtual_network_name = azurerm_virtual_network.test.name - resource_group_name = azurerm_resource_group.test.name - address_prefix = "10.0.10.0/24" -} - -resource "azurerm_network_interface" "test" { - name = "acctest_nic" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "acctestipconfig" - subnet_id = azurerm_subnet.test.id - private_ip_address_allocation = "Dynamic" - public_ip_address_id = azurerm_public_ip.test.id - } -} - -resource "azurerm_public_ip" "test" { - name = "acctest-ip" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - allocation_method = "Dynamic" - domain_name_label = "acctestip%d" -} - -resource "azurerm_storage_account" "test" { - name = "acctest%s" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_managed_disk" "test" { - name = "acctest-datadisk" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - storage_account_type = "Standard_LRS" - create_option = "Empty" - disk_size_gb = "1023" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_withVault(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_basePolicyTest(data) - return fmt.Sprintf(` -%s - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} -`, template, data.RandomInteger) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_withFirstPolicy(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_withVault(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test" { - name = "acctest-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, template, data.RandomInteger) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_withSecondPolicy(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_withFirstPolicy(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_policy_vm" "test_change_backup" { - name = "acctest2-%d" - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 15 - } -} -`, template, data.RandomInteger) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_withVM(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_withSecondPolicy(data) - return fmt.Sprintf(` -%s - -resource "azurerm_virtual_machine" "test" { - name = "acctestvm-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - vm_size = "Standard_A0" - network_interface_ids = [azurerm_network_interface.test.id] - delete_os_disk_on_termination = true - - storage_image_reference { - publisher = "Canonical" - offer = "UbuntuServer" - sku = "16.04-LTS" - version = "latest" - } - - storage_os_disk { - name = "acctest-osdisk" - managed_disk_type = "Standard_LRS" - caching = "ReadWrite" - create_option = "FromImage" - } - - storage_data_disk { - name = "acctest-datadisk" - managed_disk_id = azurerm_managed_disk.test.id - managed_disk_type = "Standard_LRS" - disk_size_gb = azurerm_managed_disk.test.disk_size_gb - create_option = "Attach" - lun = 0 - } - - os_profile { - computer_name = "acctest" - admin_username = "vmadmin" - admin_password = "Password123!@#" - } - - os_profile_linux_config { - disable_password_authentication = false - } - - boot_diagnostics { - enabled = true - storage_uri = azurerm_storage_account.test.primary_blob_endpoint - } -} -`, template, data.RandomInteger) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_linkFirstBackupPolicy(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_withVM(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_protected_vm" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_vm_id = azurerm_virtual_machine.test.id - backup_policy_id = azurerm_backup_policy_vm.test.id -} -`, template) -} - -// For update backup policy id test -func testAccAzureRMBackupProtectedVm_linkSecondBackupPolicy(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_withVM(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_protected_vm" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_vm_id = azurerm_virtual_machine.test.id - backup_policy_id = azurerm_backup_policy_vm.test_change_backup.id -} -`, template) -} - -func testAccAzureRMBackupProtectedVm_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_protected_vm" "import" { - resource_group_name = azurerm_backup_protected_vm.test.resource_group_name - recovery_vault_name = azurerm_backup_protected_vm.test.recovery_vault_name - source_vm_id = azurerm_backup_protected_vm.test.source_vm_id - backup_policy_id = azurerm_backup_protected_vm.test.backup_policy_id -} -`, template) -} - -func testAccAzureRMBackupProtectedVm_additionalVault(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_base(data) - return fmt.Sprintf(` -%s - -resource "azurerm_resource_group" "test2" { - name = "acctestRG-backup-%d-2" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test2" { - name = "acctest2-%d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_backup_policy_vm" "test2" { - name = "acctest2-%d" - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test2.name - - backup { - frequency = "Daily" - time = "23:00" - } - - retention_daily { - count = 10 - } -} -`, template, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMBackupProtectedVm_separateResourceGroups(data acceptance.TestData) string { - template := testAccAzureRMBackupProtectedVm_additionalVault(data) - return fmt.Sprintf(` -%s - -resource "azurerm_backup_protected_vm" "test" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test2.name - backup_policy_id = azurerm_backup_policy_vm.test2.id - source_vm_id = azurerm_virtual_machine.test.id -} -`, template) -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_recovery_services_vault_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_recovery_services_vault_test.go deleted file mode 100644 index 3fc5641d0f08..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_recovery_services_vault_test.go +++ /dev/null @@ -1,222 +0,0 @@ -package tests - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMRecoveryServicesVault_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRecoveryServicesVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRecoveryServicesVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRecoveryServicesVault_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRecoveryServicesVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRecoveryServicesVault_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRecoveryServicesVault_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRecoveryServicesVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRecoveryServicesVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMRecoveryServicesVault_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - { - Config: testAccAzureRMRecoveryServicesVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRecoveryServicesVault_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_recovery_services_vault", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRecoveryServicesVaultDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRecoveryServicesVault_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRecoveryServicesVaultExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "name"), - resource.TestCheckResourceAttrSet(data.ResourceName, "location"), - resource.TestCheckResourceAttrSet(data.ResourceName, "resource_group_name"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "0"), - resource.TestCheckResourceAttr(data.ResourceName, "sku", "Standard"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMRecoveryServicesVault_requiresImport), - }, - }) -} - -func testCheckAzureRMRecoveryServicesVaultDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_recovery_services_vault" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return nil - } - - return err - } - - return fmt.Errorf("Recovery Services Vault still exists:\n%#v", resp) - } - - return nil -} - -func testCheckAzureRMRecoveryServicesVaultExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.VaultsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Recovery Services Vault: %q", name) - } - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Recovery Services Vault %q (resource group: %q) was not found: %+v", name, resourceGroup, err) - } - - return fmt.Errorf("Bad: Get on recoveryServicesVaultsClient: %+v", err) - } - - return nil - } -} - -func testAccAzureRMRecoveryServicesVault_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-Vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRecoveryServicesVault_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-Vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRecoveryServicesVault_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMRecoveryServicesVault_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_recovery_services_vault" "import" { - name = azurerm_recovery_services_vault.test.name - location = azurerm_recovery_services_vault.test.location - resource_group_name = azurerm_recovery_services_vault.test.resource_group_name - sku = azurerm_recovery_services_vault.test.sku -} -`, template) -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_fabric_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_fabric_test.go deleted file mode 100644 index fcb195318dd8..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_fabric_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMSiteRecoveryFabric_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_site_recovery_fabric", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSiteRecoveryFabricDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSiteRecoveryFabric_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSiteRecoveryFabricExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMSiteRecoveryFabric_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_site_recovery_fabric" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric-%d" - location = azurerm_resource_group.test.location -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMSiteRecoveryFabricExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - fabricName := state.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.FabricClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName) - if err != nil { - return fmt.Errorf("Bad: Get on fabricClient: %+v", err) - } - - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: fabric: %q does not exist", fabricName) - } - - return nil - } -} - -func testCheckAzureRMSiteRecoveryFabricDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_site_recovery_fabric" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - fabricName := rs.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.FabricClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Fabric still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_network_mapping_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_network_mapping_test.go deleted file mode 100644 index 5a1d1ee2a303..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_network_mapping_test.go +++ /dev/null @@ -1,166 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMSiteRecoveryNetworkMapping_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_site_recovery_network_mapping", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSiteRecoveryNetworkMappingDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSiteRecoveryNetworkMapping_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSiteRecoveryNetworkMappingExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMSiteRecoveryNetworkMapping_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%d-1" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_site_recovery_fabric" "test1" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric1-%d" - location = azurerm_resource_group.test.location -} - -resource "azurerm_site_recovery_fabric" "test2" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric2-%d" - location = "%s" - depends_on = [azurerm_site_recovery_fabric.test1] -} - -resource "azurerm_virtual_network" "test1" { - name = "network1-%d" - resource_group_name = azurerm_resource_group.test.name - address_space = ["192.168.1.0/24"] - location = azurerm_site_recovery_fabric.test1.location -} - -resource "azurerm_virtual_network" "test2" { - name = "network2-%d" - resource_group_name = azurerm_resource_group.test.name - address_space = ["192.168.2.0/24"] - location = azurerm_site_recovery_fabric.test2.location -} - -resource "azurerm_site_recovery_network_mapping" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "mapping-%d" - source_recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - target_recovery_fabric_name = azurerm_site_recovery_fabric.test2.name - source_network_id = azurerm_virtual_network.test1.id - target_network_id = azurerm_virtual_network.test2.id -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMSiteRecoveryNetworkMappingExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - fabricName := state.Primary.Attributes["source_recovery_fabric_name"] - networkId := state.Primary.Attributes["source_network_id"] - mappingName := state.Primary.Attributes["name"] - - id, err := azure.ParseAzureResourceID(networkId) - if err != nil { - return err - } - networkName := id.Path["virtualnetworks"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.NetworkMappingClient(resourceGroupName, vaultName) - - // TODO Fix Bad: networkMapping error - resp, err := client.Get(ctx, fabricName, networkName, mappingName) - if err != nil { - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: networkMapping %q (network %q, %q) does not exist", mappingName, networkName, networkId) - } - - return fmt.Errorf("Bad: Get on networkMappingClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMSiteRecoveryNetworkMappingDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_site_recovery_network_mapping" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - fabricName := rs.Primary.Attributes["source_recovery_fabric_name"] - networkId := rs.Primary.Attributes["source_network_id"] - mappingName := rs.Primary.Attributes["name"] - - id, err := azure.ParseAzureResourceID(networkId) - if err != nil { - return err - } - networkName := id.Path["virtualNetworks"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.NetworkMappingClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, networkName, mappingName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Network Mapping still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_protection_container_mapping_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_protection_container_mapping_test.go deleted file mode 100644 index ffcbd8315ad0..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_protection_container_mapping_test.go +++ /dev/null @@ -1,160 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMSiteRecoveryProtectionContainerMapping_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_site_recovery_protection_container_mapping", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSiteRecoveryProtectionContainerMappingDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSiteRecoveryProtectionContainerMapping_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSiteRecoveryProtectionContainerMappingExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMSiteRecoveryProtectionContainerMapping_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test1" { - name = "acctestRG-recovery-%d-1" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-vault-%d" - location = azurerm_resource_group.test1.location - resource_group_name = azurerm_resource_group.test1.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_site_recovery_fabric" "test1" { - resource_group_name = azurerm_resource_group.test1.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric1-%d" - location = azurerm_resource_group.test1.location -} - -resource "azurerm_site_recovery_fabric" "test2" { - resource_group_name = azurerm_resource_group.test1.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric2-%d" - location = "%s" - depends_on = [azurerm_site_recovery_fabric.test1] -} - -resource "azurerm_site_recovery_protection_container" "test1" { - resource_group_name = azurerm_resource_group.test1.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - name = "acctest-protection-cont1-%d" -} - -resource "azurerm_site_recovery_protection_container" "test2" { - resource_group_name = azurerm_resource_group.test1.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test2.name - name = "acctest-protection-cont2-%d" -} - -resource "azurerm_site_recovery_replication_policy" "test" { - resource_group_name = azurerm_resource_group.test1.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-policy-%d" - recovery_point_retention_in_minutes = 24 * 60 - application_consistent_snapshot_frequency_in_minutes = 4 * 60 -} - -resource "azurerm_site_recovery_protection_container_mapping" "test" { - resource_group_name = azurerm_resource_group.test1.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - recovery_source_protection_container_name = azurerm_site_recovery_protection_container.test1.name - recovery_target_protection_container_id = azurerm_site_recovery_protection_container.test2.id - recovery_replication_policy_id = azurerm_site_recovery_replication_policy.test.id - name = "mapping-%d" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.Locations.Secondary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMSiteRecoveryProtectionContainerMappingExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - fabricName := state.Primary.Attributes["recovery_fabric_name"] - protectionContainerName := state.Primary.Attributes["recovery_source_protection_container_name"] - mappingName := state.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ContainerMappingClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, protectionContainerName, mappingName) - if err != nil { - return fmt.Errorf("Bad: Get on fabricClient: %+v", err) - } - - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: fabric: %q does not exist", fabricName) - } - - return nil - } -} - -func testCheckAzureRMSiteRecoveryProtectionContainerMappingDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_site_recovery_protection_container_mapping" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - fabricName := rs.Primary.Attributes["recovery_fabric_name"] - protectionContainerName := rs.Primary.Attributes["recovery_source_protection_container_name"] - mappingName := rs.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ContainerMappingClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, protectionContainerName, mappingName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Container Mapping still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_protection_container_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_protection_container_test.go deleted file mode 100644 index 5d98f7a0f1a3..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_protection_container_test.go +++ /dev/null @@ -1,125 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMSiteRecoveryProtectionContainer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_site_recovery_protection_container", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSiteRecoveryProtectionContainerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSiteRecoveryProtectionContainer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSiteRecoveryProtectionContainerExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMSiteRecoveryProtectionContainer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_site_recovery_fabric" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric-%d" - location = azurerm_resource_group.test.location -} - -resource "azurerm_site_recovery_protection_container" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test.name - name = "acctest-protection-cont-%d" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMSiteRecoveryProtectionContainerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - fabricName := state.Primary.Attributes["recovery_fabric_name"] - protectionContainerName := state.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectionContainerClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, protectionContainerName) - if err != nil { - return fmt.Errorf("Bad: Get on RecoveryServices.ProtectionContainerClient: %+v", err) - } - - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Protection Container: %q does not exist", fabricName) - } - - return nil - } -} - -func testCheckAzureRMSiteRecoveryProtectionContainerDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_site_recovery_protection_container" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - fabricName := rs.Primary.Attributes["recovery_fabric_name"] - protectionContainerName := rs.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ProtectionContainerClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, protectionContainerName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Protection Container still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_replicated_vm_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_replicated_vm_test.go deleted file mode 100644 index 4b8b1ce2fe19..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_replicated_vm_test.go +++ /dev/null @@ -1,303 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMSiteRecoveryReplicatedVm_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_site_recovery_replicated_vm", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSiteRecoveryReplicatedVmDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSiteRecoveryReplicatedVm_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSiteRecoveryReplicatedVmExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMSiteRecoveryReplicatedVm_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%[1]d-1" - location = "%[2]s" -} - -resource "azurerm_resource_group" "test2" { - name = "acctestRG-recovery-%[1]d-2" - location = "%[3]s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-vault-%[1]d" - location = azurerm_resource_group.test2.location - resource_group_name = azurerm_resource_group.test2.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_site_recovery_fabric" "test1" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric1-%[1]d" - location = azurerm_resource_group.test.location -} - -resource "azurerm_site_recovery_fabric" "test2" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-fabric2-%[1]d" - location = azurerm_resource_group.test2.location - depends_on = [azurerm_site_recovery_fabric.test1] -} - -resource "azurerm_site_recovery_protection_container" "test1" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - name = "acctest-protection-cont1-%[1]d" -} - -resource "azurerm_site_recovery_protection_container" "test2" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test2.name - name = "acctest-protection-cont2-%[1]d" -} - -resource "azurerm_site_recovery_replication_policy" "test" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-policy-%[1]d" - recovery_point_retention_in_minutes = 24 * 60 - application_consistent_snapshot_frequency_in_minutes = 4 * 60 -} - -resource "azurerm_site_recovery_protection_container_mapping" "test" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - recovery_source_protection_container_name = azurerm_site_recovery_protection_container.test1.name - recovery_target_protection_container_id = azurerm_site_recovery_protection_container.test2.id - recovery_replication_policy_id = azurerm_site_recovery_replication_policy.test.id - name = "mapping-%[1]d" -} - -resource "azurerm_virtual_network" "test1" { - name = "net-%[1]d" - resource_group_name = azurerm_resource_group.test.name - address_space = ["192.168.1.0/24"] - location = azurerm_site_recovery_fabric.test1.location -} - -resource "azurerm_subnet" "test1" { - name = "snet-%[1]d" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test1.name - address_prefix = "192.168.1.0/24" -} - -resource "azurerm_virtual_network" "test2" { - name = "net-%[1]d" - resource_group_name = azurerm_resource_group.test2.name - address_space = ["192.168.2.0/24"] - location = azurerm_site_recovery_fabric.test2.location -} - -resource "azurerm_subnet" "test2_1" { - name = "acctest-snet-%[1]d_1" - resource_group_name = "${azurerm_resource_group.test2.name}" - virtual_network_name = "${azurerm_virtual_network.test2.name}" - address_prefix = "192.168.2.0/27" -} - -resource "azurerm_subnet" "test2_2" { - name = "snet-%[1]d_2" - resource_group_name = "${azurerm_resource_group.test2.name}" - virtual_network_name = "${azurerm_virtual_network.test2.name}" - address_prefix = "192.168.2.32/27" -} - -resource "azurerm_subnet" "test2_3" { - name = "snet-%[1]d_3" - resource_group_name = "${azurerm_resource_group.test2.name}" - virtual_network_name = "${azurerm_virtual_network.test2.name}" - address_prefix = "192.168.2.64/27" -} - -resource "azurerm_site_recovery_network_mapping" "test" { - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "mapping-%[1]d" - source_recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - target_recovery_fabric_name = azurerm_site_recovery_fabric.test2.name - source_network_id = azurerm_virtual_network.test1.id - target_network_id = azurerm_virtual_network.test2.id -} - -resource "azurerm_network_interface" "test" { - name = "vm-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - ip_configuration { - name = "vm-%[1]d" - subnet_id = azurerm_subnet.test1.id - private_ip_address_allocation = "Dynamic" - } -} - -resource "azurerm_virtual_machine" "test" { - name = "vm-%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - vm_size = "Standard_B1s" - - storage_image_reference { - publisher = "OpenLogic" - offer = "CentOS" - sku = "7.5" - version = "latest" - } - - storage_os_disk { - name = "disk-%[1]d" - os_type = "Linux" - caching = "ReadWrite" - create_option = "FromImage" - managed_disk_type = "Premium_LRS" - } - - os_profile { - admin_username = "testadmin" - admin_password = "Password1234!" - computer_name = "vm-%[1]d" - } - - os_profile_linux_config { - disable_password_authentication = false - } - network_interface_ids = [azurerm_network_interface.test.id] -} - -resource "azurerm_storage_account" "test" { - name = "acct%[1]d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - account_tier = "Standard" - account_replication_type = "LRS" -} - -resource "azurerm_site_recovery_replicated_vm" "test" { - name = "repl-%[1]d" - resource_group_name = azurerm_resource_group.test2.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - source_vm_id = azurerm_virtual_machine.test.id - source_recovery_fabric_name = azurerm_site_recovery_fabric.test1.name - recovery_replication_policy_id = azurerm_site_recovery_replication_policy.test.id - source_recovery_protection_container_name = azurerm_site_recovery_protection_container.test1.name - - target_resource_group_id = azurerm_resource_group.test2.id - target_recovery_fabric_id = azurerm_site_recovery_fabric.test2.id - target_recovery_protection_container_id = azurerm_site_recovery_protection_container.test2.id - - managed_disk { - disk_id = azurerm_virtual_machine.test.storage_os_disk[0].managed_disk_id - staging_storage_account_id = azurerm_storage_account.test.id - target_resource_group_id = azurerm_resource_group.test2.id - target_disk_type = "Premium_LRS" - target_replica_disk_type = "Premium_LRS" - } - - network_interface { - source_network_interface_id = azurerm_network_interface.test.id - target_subnet_name = "snet-%[1]d_2" - } - - depends_on = [ - azurerm_site_recovery_protection_container_mapping.test, - azurerm_site_recovery_network_mapping.test, - ] -} -`, data.RandomInteger, data.Locations.Primary, data.Locations.Secondary) -} - -func testCheckAzureRMSiteRecoveryReplicatedVmExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - fabricName := state.Primary.Attributes["source_recovery_fabric_name"] - protectionContainerName := state.Primary.Attributes["source_recovery_protection_container_name"] - replicationName := state.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, protectionContainerName, replicationName) - if err != nil { - return fmt.Errorf("Bad: Get on replicationVmClient: %+v", err) - } - - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: fabric: %q does not exist", fabricName) - } - - return nil - } -} - -func testCheckAzureRMSiteRecoveryReplicatedVmDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_site_recovery_replicated_vm" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - fabricName := rs.Primary.Attributes["source_recovery_fabric_name"] - protectionContainerName := rs.Primary.Attributes["source_recovery_protection_container_name"] - replicationName := rs.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ReplicationMigrationItemsClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, fabricName, protectionContainerName, replicationName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Replicated VM still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_replication_policy_test.go b/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_replication_policy_test.go deleted file mode 100644 index eb624ad3f356..000000000000 --- a/azurerm/internal/services/recoveryservices/tests/resource_arm_site_recovery_replication_policy_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMSiteRecoveryReplicationPolicy_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_site_recovery_replication_policy", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMSiteRecoveryReplicationPolicyDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMSiteRecoveryReplicationPolicy_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMSiteRecoveryReplicationPolicyExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func testAccAzureRMSiteRecoveryReplicationPolicy_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-recovery-%d" - location = "%s" -} - -resource "azurerm_recovery_services_vault" "test" { - name = "acctest-vault-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - - soft_delete_enabled = false -} - -resource "azurerm_site_recovery_replication_policy" "test" { - resource_group_name = azurerm_resource_group.test.name - recovery_vault_name = azurerm_recovery_services_vault.test.name - name = "acctest-policy-%d" - recovery_point_retention_in_minutes = 24 * 60 - application_consistent_snapshot_frequency_in_minutes = 4 * 60 -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testCheckAzureRMSiteRecoveryReplicationPolicyExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - state, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroupName := state.Primary.Attributes["resource_group_name"] - vaultName := state.Primary.Attributes["recovery_vault_name"] - policyName := state.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resourceGroupName, vaultName) - - resp, err := client.Get(ctx, policyName) - if err != nil { - return fmt.Errorf("Bad: Get on RecoveryServices.ReplicationPoliciesClient: %+v", err) - } - - if resp.Response.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: replication policy: %q does not exist", policyName) - } - - return nil - } -} - -func testCheckAzureRMSiteRecoveryReplicationPolicyDestroy(s *terraform.State) error { - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_site_recovery_replication_policy" { - continue - } - - resourceGroupName := rs.Primary.Attributes["resource_group_name"] - vaultName := rs.Primary.Attributes["recovery_vault_name"] - policyName := rs.Primary.Attributes["name"] - - client := acceptance.AzureProvider.Meta().(*clients.Client).RecoveryServices.ReplicationPoliciesClient(resourceGroupName, vaultName) - resp, err := client.Get(ctx, policyName) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Replication Policy still exists:\n%#v", resp.Properties) - } - } - - return nil -} diff --git a/azurerm/internal/services/redis/data_source_redis_cache.go b/azurerm/internal/services/redis/data_source_redis_cache.go deleted file mode 100644 index 8e183cd82623..000000000000 --- a/azurerm/internal/services/redis/data_source_redis_cache.go +++ /dev/null @@ -1,298 +0,0 @@ -package redis - -import ( - "fmt" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func dataSourceArmRedisCache() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArmRedisCacheRead, - - Timeouts: &schema.ResourceTimeout{ - Read: schema.DefaultTimeout(5 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - }, - - "location": azure.SchemaLocationForDataSource(), - - "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), - - "zones": azure.SchemaZonesComputed(), - - "capacity": { - Type: schema.TypeInt, - Computed: true, - }, - - "family": { - Type: schema.TypeString, - Computed: true, - }, - - "sku_name": { - Type: schema.TypeString, - Computed: true, - }, - - "minimum_tls_version": { - Type: schema.TypeString, - Computed: true, - }, - - "shard_count": { - Type: schema.TypeInt, - Computed: true, - }, - - "enable_non_ssl_port": { - Type: schema.TypeBool, - Computed: true, - }, - - "subnet_id": { - Type: schema.TypeString, - Computed: true, - }, - - "private_static_ip_address": { - Type: schema.TypeString, - Computed: true, - }, - - "redis_configuration": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "maxclients": { - Type: schema.TypeInt, - Computed: true, - }, - - "maxmemory_delta": { - Type: schema.TypeInt, - Computed: true, - }, - - "maxmemory_reserved": { - Type: schema.TypeInt, - Computed: true, - }, - - "maxmemory_policy": { - Type: schema.TypeString, - Computed: true, - }, - - "maxfragmentationmemory_reserved": { - Type: schema.TypeInt, - Computed: true, - }, - - "rdb_backup_enabled": { - Type: schema.TypeBool, - Computed: true, - }, - - "rdb_backup_frequency": { - Type: schema.TypeInt, - Computed: true, - }, - - "rdb_backup_max_snapshot_count": { - Type: schema.TypeInt, - Computed: true, - }, - - "rdb_storage_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "notify_keyspace_events": { - Type: schema.TypeString, - Computed: true, - }, - - "aof_backup_enabled": { - Type: schema.TypeBool, - Computed: true, - }, - - "aof_storage_connection_string_0": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "aof_storage_connection_string_1": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - "enable_authentication": { - Type: schema.TypeBool, - Computed: true, - }, - }, - }, - }, - - "patch_schedule": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "day_of_week": { - Type: schema.TypeString, - Computed: true, - }, - "start_hour_utc": { - Type: schema.TypeInt, - Computed: true, - }, - }, - }, - }, - - "hostname": { - Type: schema.TypeString, - Computed: true, - }, - - "port": { - Type: schema.TypeInt, - Computed: true, - }, - - "ssl_port": { - Type: schema.TypeInt, - Computed: true, - }, - - "primary_access_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_access_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "primary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "tags": tags.SchemaDataSource(), - }, - } -} - -func dataSourceArmRedisCacheRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.Client - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - resourceGroup := d.Get("resource_group_name").(string) - name := d.Get("name").(string) - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Error: Redis instance %q (Resource group %q) was not found", name, resourceGroup) - } - return fmt.Errorf("Error reading the state of Redis instance %q: %+v", name, err) - } - - d.SetId(*resp.ID) - - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if zones := resp.Zones; zones != nil { - d.Set("zones", zones) - } - - if sku := resp.Sku; sku != nil { - d.Set("capacity", sku.Capacity) - d.Set("family", sku.Family) - d.Set("sku_name", sku.Name) - } - - props := resp.Properties - if props != nil { - d.Set("ssl_port", props.SslPort) - d.Set("hostname", props.HostName) - d.Set("minimum_tls_version", string(props.MinimumTLSVersion)) - d.Set("port", props.Port) - d.Set("enable_non_ssl_port", props.EnableNonSslPort) - if props.ShardCount != nil { - d.Set("shard_count", props.ShardCount) - } - d.Set("private_static_ip_address", props.StaticIP) - d.Set("subnet_id", props.SubnetID) - } - - redisConfiguration, err := flattenRedisConfiguration(resp.RedisConfiguration) - - if err != nil { - return fmt.Errorf("Error flattening `redis_configuration`: %+v", err) - } - if err := d.Set("redis_configuration", redisConfiguration); err != nil { - return fmt.Errorf("Error setting `redis_configuration`: %+v", err) - } - - patchSchedulesClient := meta.(*clients.Client).Redis.PatchSchedulesClient - - schedule, err := patchSchedulesClient.Get(ctx, resourceGroup, name) - if err == nil { - patchSchedule := flattenRedisPatchSchedules(schedule) - if err = d.Set("patch_schedule", patchSchedule); err != nil { - return fmt.Errorf("Error setting `patch_schedule`: %+v", err) - } - } else { - d.Set("patch_schedule", []interface{}{}) - } - - keys, err := client.ListKeys(ctx, resourceGroup, name) - if err != nil { - return err - } - - d.Set("primary_access_key", keys.PrimaryKey) - d.Set("secondary_access_key", keys.SecondaryKey) - - if props != nil { - enableSslPort := !*props.EnableNonSslPort - d.Set("primary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keys.PrimaryKey, enableSslPort)) - d.Set("secondary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keys.SecondaryKey, enableSslPort)) - } - - return tags.FlattenAndSet(d, resp.Tags) -} diff --git a/azurerm/internal/services/redis/parse/cache.go b/azurerm/internal/services/redis/parse/cache.go new file mode 100644 index 000000000000..7b7fdc8a3c64 --- /dev/null +++ b/azurerm/internal/services/redis/parse/cache.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type CacheId struct { + SubscriptionId string + ResourceGroup string + RediName string +} + +func NewCacheID(subscriptionId, resourceGroup, rediName string) CacheId { + return CacheId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + RediName: rediName, + } +} + +func (id CacheId) String() string { + segments := []string{ + fmt.Sprintf("Redi Name %q", id.RediName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Cache", segmentsStr) +} + +func (id CacheId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Cache/Redis/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.RediName) +} + +// CacheID parses a Cache ID into an CacheId struct +func CacheID(input string) (*CacheId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := CacheId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.RediName, err = id.PopSegment("Redis"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/redis/parse/cache_test.go b/azurerm/internal/services/redis/parse/cache_test.go new file mode 100644 index 000000000000..b47ece61f2e2 --- /dev/null +++ b/azurerm/internal/services/redis/parse/cache_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = CacheId{} + +func TestCacheIDFormatter(t *testing.T) { + actual := NewCacheID("12345678-1234-9876-4563-123456789012", "resGroup1", "redis1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestCacheID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *CacheId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/", + Error: true, + }, + + { + // missing value for RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1", + Expected: &CacheId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + RediName: "redis1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CACHE/REDIS/REDIS1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := CacheID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.RediName != v.Expected.RediName { + t.Fatalf("Expected %q but got %q for RediName", v.Expected.RediName, actual.RediName) + } + } +} diff --git a/azurerm/internal/services/redis/parse/firewall_rule.go b/azurerm/internal/services/redis/parse/firewall_rule.go new file mode 100644 index 000000000000..2440409b3966 --- /dev/null +++ b/azurerm/internal/services/redis/parse/firewall_rule.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type FirewallRuleId struct { + SubscriptionId string + ResourceGroup string + RediName string + Name string +} + +func NewFirewallRuleID(subscriptionId, resourceGroup, rediName, name string) FirewallRuleId { + return FirewallRuleId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + RediName: rediName, + Name: name, + } +} + +func (id FirewallRuleId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Redi Name %q", id.RediName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Firewall Rule", segmentsStr) +} + +func (id FirewallRuleId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Cache/Redis/%s/firewallRules/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.RediName, id.Name) +} + +// FirewallRuleID parses a FirewallRule ID into an FirewallRuleId struct +func FirewallRuleID(input string) (*FirewallRuleId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := FirewallRuleId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.RediName, err = id.PopSegment("Redis"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("firewallRules"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/redis/parse/firewall_rule_test.go b/azurerm/internal/services/redis/parse/firewall_rule_test.go new file mode 100644 index 000000000000..ed48464c9292 --- /dev/null +++ b/azurerm/internal/services/redis/parse/firewall_rule_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = FirewallRuleId{} + +func TestFirewallRuleIDFormatter(t *testing.T) { + actual := NewFirewallRuleID("12345678-1234-9876-4563-123456789012", "resGroup1", "redis1", "firewallRule1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/firewallRules/firewallRule1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestFirewallRuleID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *FirewallRuleId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/", + Error: true, + }, + + { + // missing value for RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/firewallRules/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/firewallRules/firewallRule1", + Expected: &FirewallRuleId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + RediName: "redis1", + Name: "firewallRule1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CACHE/REDIS/REDIS1/FIREWALLRULES/FIREWALLRULE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := FirewallRuleID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.RediName != v.Expected.RediName { + t.Fatalf("Expected %q but got %q for RediName", v.Expected.RediName, actual.RediName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/redis/parse/linked_server.go b/azurerm/internal/services/redis/parse/linked_server.go new file mode 100644 index 000000000000..b3bbfcd1ef25 --- /dev/null +++ b/azurerm/internal/services/redis/parse/linked_server.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type LinkedServerId struct { + SubscriptionId string + ResourceGroup string + RediName string + Name string +} + +func NewLinkedServerID(subscriptionId, resourceGroup, rediName, name string) LinkedServerId { + return LinkedServerId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + RediName: rediName, + Name: name, + } +} + +func (id LinkedServerId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Redi Name %q", id.RediName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Linked Server", segmentsStr) +} + +func (id LinkedServerId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Cache/Redis/%s/linkedServers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.RediName, id.Name) +} + +// LinkedServerID parses a LinkedServer ID into an LinkedServerId struct +func LinkedServerID(input string) (*LinkedServerId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := LinkedServerId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.RediName, err = id.PopSegment("Redis"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("linkedServers"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/redis/parse/linked_server_test.go b/azurerm/internal/services/redis/parse/linked_server_test.go new file mode 100644 index 000000000000..ab44789616f2 --- /dev/null +++ b/azurerm/internal/services/redis/parse/linked_server_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = LinkedServerId{} + +func TestLinkedServerIDFormatter(t *testing.T) { + actual := NewLinkedServerID("12345678-1234-9876-4563-123456789012", "resGroup1", "redis1", "linkedServer1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/linkedServers/linkedServer1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestLinkedServerID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *LinkedServerId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/", + Error: true, + }, + + { + // missing value for RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/linkedServers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/linkedServers/linkedServer1", + Expected: &LinkedServerId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + RediName: "redis1", + Name: "linkedServer1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CACHE/REDIS/REDIS1/LINKEDSERVERS/LINKEDSERVER1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := LinkedServerID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.RediName != v.Expected.RediName { + t.Fatalf("Expected %q but got %q for RediName", v.Expected.RediName, actual.RediName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/redis/redis_cache_data_source.go b/azurerm/internal/services/redis/redis_cache_data_source.go new file mode 100644 index 000000000000..9e0a740e9f57 --- /dev/null +++ b/azurerm/internal/services/redis/redis_cache_data_source.go @@ -0,0 +1,294 @@ +package redis + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceRedisCache() *schema.Resource { + return &schema.Resource{ + Read: dataSourceRedisCacheRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "location": azure.SchemaLocationForDataSource(), + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "zones": azure.SchemaZonesComputed(), + + "capacity": { + Type: schema.TypeInt, + Computed: true, + }, + + "family": { + Type: schema.TypeString, + Computed: true, + }, + + "sku_name": { + Type: schema.TypeString, + Computed: true, + }, + + "minimum_tls_version": { + Type: schema.TypeString, + Computed: true, + }, + + "shard_count": { + Type: schema.TypeInt, + Computed: true, + }, + + "enable_non_ssl_port": { + Type: schema.TypeBool, + Computed: true, + }, + + "subnet_id": { + Type: schema.TypeString, + Computed: true, + }, + + "private_static_ip_address": { + Type: schema.TypeString, + Computed: true, + }, + + "redis_configuration": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "maxclients": { + Type: schema.TypeInt, + Computed: true, + }, + + "maxmemory_delta": { + Type: schema.TypeInt, + Computed: true, + }, + + "maxmemory_reserved": { + Type: schema.TypeInt, + Computed: true, + }, + + "maxmemory_policy": { + Type: schema.TypeString, + Computed: true, + }, + + "maxfragmentationmemory_reserved": { + Type: schema.TypeInt, + Computed: true, + }, + + "rdb_backup_enabled": { + Type: schema.TypeBool, + Computed: true, + }, + + "rdb_backup_frequency": { + Type: schema.TypeInt, + Computed: true, + }, + + "rdb_backup_max_snapshot_count": { + Type: schema.TypeInt, + Computed: true, + }, + + "rdb_storage_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "notify_keyspace_events": { + Type: schema.TypeString, + Computed: true, + }, + + "aof_backup_enabled": { + Type: schema.TypeBool, + Computed: true, + }, + + "aof_storage_connection_string_0": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "aof_storage_connection_string_1": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + "enable_authentication": { + Type: schema.TypeBool, + Computed: true, + }, + }, + }, + }, + + "patch_schedule": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "day_of_week": { + Type: schema.TypeString, + Computed: true, + }, + "start_hour_utc": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + + "hostname": { + Type: schema.TypeString, + Computed: true, + }, + + "port": { + Type: schema.TypeInt, + Computed: true, + }, + + "ssl_port": { + Type: schema.TypeInt, + Computed: true, + }, + + "primary_access_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_access_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "primary_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceRedisCacheRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.Client + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + patchSchedulesClient := meta.(*clients.Client).Redis.PatchSchedulesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id := parse.NewCacheID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + resp, err := client.Get(ctx, id.ResourceGroup, id.RediName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Redis Cache %q (Resource Group %q) was not found", id.RediName, id.ResourceGroup) + } + return fmt.Errorf("retrieving Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + + d.SetId(id.ID()) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if zones := resp.Zones; zones != nil { + d.Set("zones", zones) + } + + if sku := resp.Sku; sku != nil { + d.Set("capacity", sku.Capacity) + d.Set("family", sku.Family) + d.Set("sku_name", sku.Name) + } + + props := resp.Properties + if props != nil { + d.Set("ssl_port", props.SslPort) + d.Set("hostname", props.HostName) + d.Set("minimum_tls_version", string(props.MinimumTLSVersion)) + d.Set("port", props.Port) + d.Set("enable_non_ssl_port", props.EnableNonSslPort) + if props.ShardCount != nil { + d.Set("shard_count", props.ShardCount) + } + d.Set("private_static_ip_address", props.StaticIP) + d.Set("subnet_id", props.SubnetID) + } + + redisConfiguration, err := flattenRedisConfiguration(resp.RedisConfiguration) + if err != nil { + return fmt.Errorf("flattening `redis_configuration`: %+v", err) + } + if err := d.Set("redis_configuration", redisConfiguration); err != nil { + return fmt.Errorf("setting `redis_configuration`: %+v", err) + } + + schedule, err := patchSchedulesClient.Get(ctx, id.ResourceGroup, id.RediName) + if err == nil { + patchSchedule := flattenRedisPatchSchedules(schedule) + if err = d.Set("patch_schedule", patchSchedule); err != nil { + return fmt.Errorf("setting `patch_schedule`: %+v", err) + } + } else { + d.Set("patch_schedule", []interface{}{}) + } + + keys, err := client.ListKeys(ctx, id.ResourceGroup, id.RediName) + if err != nil { + return err + } + + d.Set("primary_access_key", keys.PrimaryKey) + d.Set("secondary_access_key", keys.SecondaryKey) + + if props != nil { + enableSslPort := !*props.EnableNonSslPort + d.Set("primary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keys.PrimaryKey, enableSslPort)) + d.Set("secondary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keys.SecondaryKey, enableSslPort)) + } + + return tags.FlattenAndSet(d, resp.Tags) +} diff --git a/azurerm/internal/services/redis/redis_cache_data_source_test.go b/azurerm/internal/services/redis/redis_cache_data_source_test.go new file mode 100644 index 000000000000..9dfe7055a428 --- /dev/null +++ b/azurerm/internal/services/redis/redis_cache_data_source_test.go @@ -0,0 +1,46 @@ +package redis_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" +) + +type RedisCacheDataSource struct { +} + +func TestAccRedisCacheDataSource_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_redis_cache", "test") + r := RedisCacheDataSource{} + + name := fmt.Sprintf("acctestRedis-%d", data.RandomInteger) + resourceGroupName := fmt.Sprintf("acctestRG-%d", data.RandomInteger) + + data.DataSourceTest(t, []resource.TestStep{ + { + Config: r.standardWithDataSource(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").HasValue(name), + check.That(data.ResourceName).Key("resource_group_name").HasValue(resourceGroupName), + check.That(data.ResourceName).Key("ssl_port").HasValue("6380"), + check.That(data.ResourceName).Key("tags.environment").HasValue("production"), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + ), + }, + }) +} + +func (r RedisCacheDataSource) standardWithDataSource(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_redis_cache" "test" { + name = azurerm_redis_cache.test.name + resource_group_name = azurerm_redis_cache.test.resource_group_name +} +`, RedisCacheResource{}.standard(data)) +} diff --git a/azurerm/internal/services/redis/redis_cache_resource.go b/azurerm/internal/services/redis/redis_cache_resource.go new file mode 100644 index 000000000000..33346fb258c3 --- /dev/null +++ b/azurerm/internal/services/redis/redis_cache_resource.go @@ -0,0 +1,850 @@ +package redis + +import ( + "context" + "fmt" + "log" + "strconv" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/services/redis/mgmt/2018-03-01/redis" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" + networkParse "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/parse" + networkValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceRedisCache() *schema.Resource { + return &schema.Resource{ + Create: resourceRedisCacheCreate, + Read: resourceRedisCacheRead, + Update: resourceRedisCacheUpdate, + Delete: resourceRedisCacheDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.CacheID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(90 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(90 * time.Minute), + Delete: schema.DefaultTimeout(90 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + StateFunc: azure.NormalizeLocation, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "zones": azure.SchemaSingleZone(), + + "capacity": { + Type: schema.TypeInt, + Required: true, + }, + + "family": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.CacheFamily, + DiffSuppressFunc: suppress.CaseDifference, + }, + + "sku_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(redis.Basic), + string(redis.Standard), + string(redis.Premium), + }, true), + DiffSuppressFunc: suppress.CaseDifference, + }, + + "minimum_tls_version": { + Type: schema.TypeString, + Optional: true, + Default: redis.OneFullStopZero, + ValidateFunc: validation.StringInSlice([]string{ + string(redis.OneFullStopZero), + string(redis.OneFullStopOne), + string(redis.OneFullStopTwo), + }, false), + }, + + "shard_count": { + Type: schema.TypeInt, + Optional: true, + }, + + "enable_non_ssl_port": { + Type: schema.TypeBool, + Default: false, + Optional: true, + }, + + "subnet_id": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: networkValidate.SubnetID, + }, + + "private_static_ip_address": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + + "redis_configuration": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "maxclients": { + Type: schema.TypeInt, + Computed: true, + }, + + "maxmemory_delta": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + + "maxmemory_reserved": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + + "maxmemory_policy": { + Type: schema.TypeString, + Optional: true, + Default: "volatile-lru", + ValidateFunc: validate.MaxMemoryPolicy, + }, + + "maxfragmentationmemory_reserved": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + }, + + "rdb_backup_enabled": { + Type: schema.TypeBool, + Optional: true, + }, + + "rdb_backup_frequency": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validate.CacheBackupFrequency, + }, + + "rdb_backup_max_snapshot_count": { + Type: schema.TypeInt, + Optional: true, + }, + + "rdb_storage_connection_string": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + }, + + "notify_keyspace_events": { + Type: schema.TypeString, + Optional: true, + }, + + "aof_backup_enabled": { + Type: schema.TypeBool, + Optional: true, + }, + + "aof_storage_connection_string_0": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + }, + + "aof_storage_connection_string_1": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + }, + "enable_authentication": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + }, + }, + }, + + "patch_schedule": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "day_of_week": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.IsDayOfTheWeek(true), + }, + "start_hour_utc": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(0, 23), + }, + }, + }, + }, + + "hostname": { + Type: schema.TypeString, + Computed: true, + }, + + "port": { + Type: schema.TypeInt, + Computed: true, + }, + + "ssl_port": { + Type: schema.TypeInt, + Computed: true, + }, + + "primary_access_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_access_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "primary_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceRedisCacheCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.Client + patchClient := meta.(*clients.Client).Redis.PatchSchedulesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + log.Printf("[INFO] preparing arguments for Azure ARM Redis Cache creation.") + + location := azure.NormalizeLocation(d.Get("location").(string)) + enableNonSSLPort := d.Get("enable_non_ssl_port").(bool) + + capacity := int32(d.Get("capacity").(int)) + family := redis.SkuFamily(d.Get("family").(string)) + sku := redis.SkuName(d.Get("sku_name").(string)) + + t := d.Get("tags").(map[string]interface{}) + expandedTags := tags.Expand(t) + + id := parse.NewCacheID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + existing, err := client.Get(ctx, id.ResourceGroup, id.RediName) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Redis Instance %s (resource group %q): %+v", id.RediName, id.ResourceGroup, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_redis_cache", id.ID()) + } + + patchSchedule := expandRedisPatchSchedule(d) + redisConfiguration, err := expandRedisConfiguration(d) + if err != nil { + return fmt.Errorf("parsing Redis Configuration: %+v", err) + } + + parameters := redis.CreateParameters{ + Location: utils.String(location), + CreateProperties: &redis.CreateProperties{ + EnableNonSslPort: utils.Bool(enableNonSSLPort), + Sku: &redis.Sku{ + Capacity: utils.Int32(capacity), + Family: family, + Name: sku, + }, + MinimumTLSVersion: redis.TLSVersion(d.Get("minimum_tls_version").(string)), + RedisConfiguration: redisConfiguration, + }, + Tags: expandedTags, + } + + if v, ok := d.GetOk("shard_count"); ok { + shardCount := int32(v.(int)) + parameters.ShardCount = &shardCount + } + + if v, ok := d.GetOk("private_static_ip_address"); ok { + parameters.StaticIP = utils.String(v.(string)) + } + + if v, ok := d.GetOk("subnet_id"); ok { + parsed, parseErr := networkParse.SubnetID(v.(string)) + if parseErr != nil { + return err + } + + locks.ByName(parsed.VirtualNetworkName, network.VirtualNetworkResourceName) + defer locks.UnlockByName(parsed.VirtualNetworkName, network.VirtualNetworkResourceName) + + locks.ByName(parsed.Name, network.SubnetResourceName) + defer locks.UnlockByName(parsed.Name, network.SubnetResourceName) + + parameters.SubnetID = utils.String(v.(string)) + } + + if v, ok := d.GetOk("zones"); ok { + parameters.Zones = azure.ExpandZones(v.([]interface{})) + } + + future, err := client.Create(ctx, id.ResourceGroup, id.RediName, parameters) + if err != nil { + return fmt.Errorf("creating Redis Cache %q (Resource Group %q): %v", id.RediName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for the create of Redis Cache %q (Resource Group %q): %v", id.RediName, id.ResourceGroup, err) + } + + log.Printf("[DEBUG] Waiting for Redis Cache %q (Resource Group %q) to become available", id.RediName, id.ResourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Scaling", "Updating", "Creating"}, + Target: []string{"Succeeded"}, + Refresh: redisStateRefreshFunc(ctx, client, id.ResourceGroup, id.RediName), + MinTimeout: 15 * time.Second, + Timeout: d.Timeout(schema.TimeoutCreate), + } + + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for Redis Cache %q (Resource Group %q) to become available: %s", id.RediName, id.ResourceGroup, err) + } + + d.SetId(id.ID()) + + if patchSchedule != nil { + if _, err = patchClient.CreateOrUpdate(ctx, id.ResourceGroup, id.RediName, *patchSchedule); err != nil { + return fmt.Errorf("setting Redis Patch Schedule: %+v", err) + } + } + + return resourceRedisCacheRead(d, meta) +} + +func resourceRedisCacheUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.Client + patchClient := meta.(*clients.Client).Redis.PatchSchedulesClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + log.Printf("[INFO] preparing arguments for Azure ARM Redis Cache update.") + + id, err := parse.CacheID(d.Id()) + if err != nil { + return err + } + + enableNonSSLPort := d.Get("enable_non_ssl_port").(bool) + + capacity := int32(d.Get("capacity").(int)) + family := redis.SkuFamily(d.Get("family").(string)) + sku := redis.SkuName(d.Get("sku_name").(string)) + + t := d.Get("tags").(map[string]interface{}) + expandedTags := tags.Expand(t) + + parameters := redis.UpdateParameters{ + UpdateProperties: &redis.UpdateProperties{ + MinimumTLSVersion: redis.TLSVersion(d.Get("minimum_tls_version").(string)), + EnableNonSslPort: utils.Bool(enableNonSSLPort), + Sku: &redis.Sku{ + Capacity: utils.Int32(capacity), + Family: family, + Name: sku, + }, + }, + Tags: expandedTags, + } + + if v, ok := d.GetOk("shard_count"); ok { + if d.HasChange("shard_count") { + shardCount := int32(v.(int)) + parameters.ShardCount = &shardCount + } + } + + if d.HasChange("redis_configuration") { + redisConfiguration, err := expandRedisConfiguration(d) + if err != nil { + return fmt.Errorf("parsing Redis Configuration: %+v", err) + } + parameters.RedisConfiguration = redisConfiguration + } + + if _, err := client.Update(ctx, id.ResourceGroup, id.RediName, parameters); err != nil { + return fmt.Errorf("updating Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + + log.Printf("[DEBUG] Waiting for Redis Cache %q (Resource Group %q) to become available", id.RediName, id.ResourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Scaling", "Updating", "Creating"}, + Target: []string{"Succeeded"}, + Refresh: redisStateRefreshFunc(ctx, client, id.ResourceGroup, id.RediName), + MinTimeout: 15 * time.Second, + Timeout: d.Timeout(schema.TimeoutUpdate), + } + + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for Redis Cache %q (Resource Group %q) to become available: %+v", id.RediName, id.ResourceGroup, err) + } + + patchSchedule := expandRedisPatchSchedule(d) + + if patchSchedule == nil || len(*patchSchedule.ScheduleEntries.ScheduleEntries) == 0 { + _, err = patchClient.Delete(ctx, id.ResourceGroup, id.RediName) + if err != nil { + return fmt.Errorf("deleting Redis Patch Schedule: %+v", err) + } + } else { + _, err = patchClient.CreateOrUpdate(ctx, id.ResourceGroup, id.RediName, *patchSchedule) + if err != nil { + return fmt.Errorf("setting Redis Patch Schedule: %+v", err) + } + } + + return resourceRedisCacheRead(d, meta) +} + +func resourceRedisCacheRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.Client + patchSchedulesClient := meta.(*clients.Client).Redis.PatchSchedulesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.CacheID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.RediName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + return fmt.Errorf("retrieving Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + + keysResp, err := client.ListKeys(ctx, id.ResourceGroup, id.RediName) + if err != nil { + return fmt.Errorf("listing keys for Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + + schedule, err := patchSchedulesClient.Get(ctx, id.ResourceGroup, id.RediName) + if err == nil { + patchSchedule := flattenRedisPatchSchedules(schedule) + if err = d.Set("patch_schedule", patchSchedule); err != nil { + return fmt.Errorf("setting `patch_schedule`: %+v", err) + } + } + + d.Set("name", id.RediName) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if zones := resp.Zones; zones != nil { + d.Set("zones", zones) + } + + if sku := resp.Sku; sku != nil { + d.Set("capacity", sku.Capacity) + d.Set("family", sku.Family) + d.Set("sku_name", sku.Name) + } + + if props := resp.Properties; props != nil { + d.Set("ssl_port", props.SslPort) + d.Set("hostname", props.HostName) + d.Set("minimum_tls_version", string(props.MinimumTLSVersion)) + d.Set("port", props.Port) + d.Set("enable_non_ssl_port", props.EnableNonSslPort) + if props.ShardCount != nil { + d.Set("shard_count", props.ShardCount) + } + d.Set("private_static_ip_address", props.StaticIP) + + subnetId := "" + if props.SubnetID != nil { + parsed, err := networkParse.SubnetID(*props.SubnetID) + if err != nil { + return err + } + + subnetId = parsed.ID() + } + d.Set("subnet_id", subnetId) + } + + redisConfiguration, err := flattenRedisConfiguration(resp.RedisConfiguration) + if err != nil { + return fmt.Errorf("flattening `redis_configuration`: %+v", err) + } + if err := d.Set("redis_configuration", redisConfiguration); err != nil { + return fmt.Errorf("setting `redis_configuration`: %+v", err) + } + + d.Set("primary_access_key", keysResp.PrimaryKey) + d.Set("secondary_access_key", keysResp.SecondaryKey) + + if props := resp.Properties; props != nil { + enableSslPort := !*props.EnableNonSslPort + d.Set("primary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keysResp.PrimaryKey, enableSslPort)) + d.Set("secondary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keysResp.SecondaryKey, enableSslPort)) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceRedisCacheDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.Client + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.CacheID(d.Id()) + if err != nil { + return err + } + + read, err := client.Get(ctx, id.ResourceGroup, id.RediName) + if err != nil { + return fmt.Errorf("retrieving Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + if read.Properties == nil { + return fmt.Errorf("retrieving Redis Cache %q (Resource Group %q): `properties` was nil", id.RediName, id.ResourceGroup) + } + props := *read.Properties + if subnetID := props.SubnetID; subnetID != nil { + parsed, parseErr := networkParse.SubnetID(*subnetID) + if parseErr != nil { + return err + } + + locks.ByName(parsed.VirtualNetworkName, network.VirtualNetworkResourceName) + defer locks.UnlockByName(parsed.VirtualNetworkName, network.VirtualNetworkResourceName) + + locks.ByName(parsed.Name, network.SubnetResourceName) + defer locks.UnlockByName(parsed.Name, network.SubnetResourceName) + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.RediName) + if err != nil { + return fmt.Errorf("deleting Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + if !response.WasNotFound(future.Response()) { + return fmt.Errorf("waiting for deletion of Redis Cache %q (Resource Group %q): %+v", id.RediName, id.ResourceGroup, err) + } + } + + return nil +} + +func redisStateRefreshFunc(ctx context.Context, client *redis.Client, resourceGroupName string, sgName string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + res, err := client.Get(ctx, resourceGroupName, sgName) + if err != nil { + return nil, "", fmt.Errorf("polling for status of Redis Cache %q (Resource Group %q): %+v", sgName, resourceGroupName, err) + } + + return res, string(res.ProvisioningState), nil + } +} + +func expandRedisConfiguration(d *schema.ResourceData) (map[string]*string, error) { + output := make(map[string]*string) + + input := d.Get("redis_configuration").([]interface{}) + if len(input) == 0 || input[0] == nil { + return output, nil + } + raw := input[0].(map[string]interface{}) + + if v := raw["maxclients"].(int); v > 0 { + output["maxclients"] = utils.String(strconv.Itoa(v)) + } + + if v := raw["maxmemory_delta"].(int); v > 0 { + output["maxmemory-delta"] = utils.String(strconv.Itoa(v)) + } + + if v := raw["maxmemory_reserved"].(int); v > 0 { + output["maxmemory-reserved"] = utils.String(strconv.Itoa(v)) + } + + if v := raw["maxmemory_policy"].(string); v != "" { + output["maxmemory-policy"] = utils.String(v) + } + + if v := raw["maxfragmentationmemory_reserved"].(int); v > 0 { + output["maxfragmentationmemory-reserved"] = utils.String(strconv.Itoa(v)) + } + + // RDB Backup + if v := raw["rdb_backup_enabled"].(bool); v { + if connStr := raw["rdb_storage_connection_string"].(string); connStr == "" { + return nil, fmt.Errorf("The rdb_storage_connection_string property must be set when rdb_backup_enabled is true") + } + output["rdb-backup-enabled"] = utils.String(strconv.FormatBool(v)) + } + + if v := raw["rdb_backup_frequency"].(int); v > 0 { + output["rdb-backup-frequency"] = utils.String(strconv.Itoa(v)) + } + + if v := raw["rdb_backup_max_snapshot_count"].(int); v > 0 { + output["rdb-backup-max-snapshot-count"] = utils.String(strconv.Itoa(v)) + } + + if v := raw["rdb_storage_connection_string"].(string); v != "" { + output["rdb-storage-connection-string"] = utils.String(v) + } + + if v := raw["notify_keyspace_events"].(string); v != "" { + output["notify-keyspace-events"] = utils.String(v) + } + + // AOF Backup + if v := raw["aof_backup_enabled"].(bool); v { + output["aof-backup-enabled"] = utils.String(strconv.FormatBool(v)) + } + + if v := raw["aof_storage_connection_string_0"].(string); v != "" { + output["aof-storage-connection-string-0"] = utils.String(v) + } + + if v := raw["aof_storage_connection_string_1"].(string); v != "" { + output["aof-storage-connection-string-1"] = utils.String(v) + } + + authEnabled := raw["enable_authentication"].(bool) + // Redis authentication can only be disabled if it is launched inside a VNET. + if _, isPrivate := d.GetOk("subnet_id"); !isPrivate { + if !authEnabled { + return nil, fmt.Errorf("Cannot set `enable_authentication` to `false` when `subnet_id` is not set") + } + } else { + value := isAuthNotRequiredAsString(authEnabled) + output["authnotrequired"] = utils.String(value) + } + return output, nil +} + +func expandRedisPatchSchedule(d *schema.ResourceData) *redis.PatchSchedule { + v, ok := d.GetOk("patch_schedule") + if !ok { + return nil + } + + scheduleValues := v.([]interface{}) + entries := make([]redis.ScheduleEntry, 0) + for _, scheduleValue := range scheduleValues { + vals := scheduleValue.(map[string]interface{}) + dayOfWeek := vals["day_of_week"].(string) + startHourUtc := vals["start_hour_utc"].(int) + + entry := redis.ScheduleEntry{ + DayOfWeek: redis.DayOfWeek(dayOfWeek), + StartHourUtc: utils.Int32(int32(startHourUtc)), + } + entries = append(entries, entry) + } + + schedule := redis.PatchSchedule{ + ScheduleEntries: &redis.ScheduleEntries{ + ScheduleEntries: &entries, + }, + } + return &schedule +} + +func flattenRedisConfiguration(input map[string]*string) ([]interface{}, error) { + outputs := make(map[string]interface{}, len(input)) + + if v := input["maxclients"]; v != nil { + i, err := strconv.Atoi(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `maxclients` %q: %+v", *v, err) + } + outputs["maxclients"] = i + } + if v := input["maxmemory-delta"]; v != nil { + i, err := strconv.Atoi(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `maxmemory-delta` %q: %+v", *v, err) + } + outputs["maxmemory_delta"] = i + } + if v := input["maxmemory-reserved"]; v != nil { + i, err := strconv.Atoi(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `maxmemory-reserved` %q: %+v", *v, err) + } + outputs["maxmemory_reserved"] = i + } + if v := input["maxmemory-policy"]; v != nil { + outputs["maxmemory_policy"] = *v + } + + if v := input["maxfragmentationmemory-reserved"]; v != nil { + i, err := strconv.Atoi(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `maxfragmentationmemory-reserved` %q: %+v", *v, err) + } + outputs["maxfragmentationmemory_reserved"] = i + } + + // delta, reserved, enabled, frequency,, count, + if v := input["rdb-backup-enabled"]; v != nil { + b, err := strconv.ParseBool(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `rdb-backup-enabled` %q: %+v", *v, err) + } + outputs["rdb_backup_enabled"] = b + } + if v := input["rdb-backup-frequency"]; v != nil { + i, err := strconv.Atoi(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `rdb-backup-frequency` %q: %+v", *v, err) + } + outputs["rdb_backup_frequency"] = i + } + if v := input["rdb-backup-max-snapshot-count"]; v != nil { + i, err := strconv.Atoi(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `rdb-backup-max-snapshot-count` %q: %+v", *v, err) + } + outputs["rdb_backup_max_snapshot_count"] = i + } + if v := input["rdb-storage-connection-string"]; v != nil { + outputs["rdb_storage_connection_string"] = *v + } + if v := input["notify-keyspace-events"]; v != nil { + outputs["notify_keyspace_events"] = *v + } + + if v := input["aof-backup-enabled"]; v != nil { + b, err := strconv.ParseBool(*v) + if err != nil { + return nil, fmt.Errorf("Error parsing `aof-backup-enabled` %q: %+v", *v, err) + } + outputs["aof_backup_enabled"] = b + } + if v := input["aof-storage-connection-string-0"]; v != nil { + outputs["aof_storage_connection_string_0"] = *v + } + if v := input["aof-storage-connection-string-1"]; v != nil { + outputs["aof_storage_connection_string_1"] = *v + } + + // `authnotrequired` is not set for instances launched outside a VNET + outputs["enable_authentication"] = true + if v := input["authnotrequired"]; v != nil { + outputs["enable_authentication"] = isAuthRequiredAsBool(*v) + } + + return []interface{}{outputs}, nil +} + +func isAuthRequiredAsBool(not_required string) bool { + value := strings.ToLower(not_required) + output := map[string]bool{ + "yes": false, + "no": true, + } + return output[value] +} + +func isAuthNotRequiredAsString(auth_required bool) string { + output := map[bool]string{ + true: "no", + false: "yes", + } + return output[auth_required] +} + +func flattenRedisPatchSchedules(schedule redis.PatchSchedule) []interface{} { + outputs := make([]interface{}, 0) + + for _, entry := range *schedule.ScheduleEntries.ScheduleEntries { + output := make(map[string]interface{}) + + output["day_of_week"] = string(entry.DayOfWeek) + output["start_hour_utc"] = int(*entry.StartHourUtc) + + outputs = append(outputs, output) + } + + return outputs +} + +func getRedisConnectionString(redisHostName string, sslPort int32, accessKey string, enableSslPort bool) string { + return fmt.Sprintf("%s:%d,password=%s,ssl=%t,abortConnect=False", redisHostName, sslPort, accessKey, enableSslPort) +} diff --git a/azurerm/internal/services/redis/redis_cache_resource_test.go b/azurerm/internal/services/redis/redis_cache_resource_test.go new file mode 100644 index 000000000000..9e7239b69e00 --- /dev/null +++ b/azurerm/internal/services/redis/redis_cache_resource_test.go @@ -0,0 +1,969 @@ +package redis_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type RedisCacheResource struct { +} + +func TestAccRedisCache_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("minimum_tls_version").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + testCheckSSLInConnectionString(data.ResourceName, "primary_connection_string", true), + testCheckSSLInConnectionString(data.ResourceName, "secondary_connection_string", true), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_withoutSSL(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, false), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + testCheckSSLInConnectionString(data.ResourceName, "primary_connection_string", false), + testCheckSSLInConnectionString(data.ResourceName, "secondary_connection_string", false), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data, true), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccRedisCache_standard(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.standard(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_premium(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.premium(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_premiumSharded(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.premiumSharded(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_premiumShardedScaling(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.premiumSharded(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.premiumShardedScaled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccRedisCache_NonStandardCasing(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.nonStandardCasing(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.nonStandardCasing(data), + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + }) +} + +func TestAccRedisCache_BackupDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.backupDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccRedisCache_BackupEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.backupEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + // `redis_configuration.0.rdb_storage_connection_string` is returned as: + // "...;AccountKey=[key hidden]" rather than "...;AccountKey=fsjfvjnfnf" + // TODO: remove this once the Bug's been fixed: + // https://github.com/Azure/azure-rest-api-specs/issues/3037 + ExpectNonEmptyPlan: true, + }, + data.ImportStep("redis_configuration.0.rdb_storage_connection_string"), + }) +} + +func TestAccRedisCache_BackupEnabledDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.backupEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + // `redis_configuration.0.rdb_storage_connection_string` is returned as: + // "...;AccountKey=[key hidden]" rather than "...;AccountKey=fsjfvjnfnf" + // TODO: remove this once the Bug's been fixed: + // https://github.com/Azure/azure-rest-api-specs/issues/3037 + ExpectNonEmptyPlan: true, + }, + { + Config: r.backupDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + // `redis_configuration.0.rdb_storage_connection_string` is returned as: + // "...;AccountKey=[key hidden]" rather than "...;AccountKey=fsjfvjnfnf" + // TODO: remove this once the Bug's been fixed: + // https://github.com/Azure/azure-rest-api-specs/issues/3037 + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccRedisCache_AOFBackupEnabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.aofBackupEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + ExpectNonEmptyPlan: true, + }, + data.ImportStep("redis_configuration.0.aof_storage_connection_string_0", + "redis_configuration.0.aof_storage_connection_string_1"), + }) +} + +func TestAccRedisCache_AOFBackupEnabledDisabled(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.aofBackupEnabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + ExpectNonEmptyPlan: true, + }, + { + Config: r.aofBackupDisabled(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + ExpectNonEmptyPlan: true, + }, + }) +} + +func TestAccRedisCache_PatchSchedule(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.patchSchedule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_PatchScheduleUpdated(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.patchSchedule(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.premium(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccRedisCache_InternalSubnet(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.internalSubnet(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_InternalSubnetStaticIP(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.internalSubnetStaticIP(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_InternalSubnet_withZone(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.internalSubnet_withZone(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("zones.#").HasValue("1"), + check.That(data.ResourceName).Key("zones.0").HasValue("1"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisCache_SubscribeAllEvents(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.subscribeAllEvents(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func TestAccRedisCache_WithoutAuth(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") + r := RedisCacheResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.withoutAuth(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("redis_configuration.0.enable_authentication").HasValue("false"), + ), + }, + }) +} + +func (t RedisCacheResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.CacheID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Redis.Client.Get(ctx, id.ResourceGroup, id.RediName) + if err != nil { + return nil, fmt.Errorf("reading Redis Cache (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (RedisCacheResource) basic(data acceptance.TestData, requireSSL bool) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "C" + sku_name = "Basic" + enable_non_ssl_port = %t + minimum_tls_version = "1.2" + + redis_configuration { + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, !requireSSL) +} + +func (RedisCacheResource) requiresImport(data acceptance.TestData) string { + template := RedisCacheResource{}.basic(data, true) + return fmt.Sprintf(` +%s + +resource "azurerm_redis_cache" "import" { + name = azurerm_redis_cache.test.name + location = azurerm_redis_cache.test.location + resource_group_name = azurerm_redis_cache.test.resource_group_name + capacity = azurerm_redis_cache.test.capacity + family = azurerm_redis_cache.test.family + sku_name = azurerm_redis_cache.test.sku_name + enable_non_ssl_port = azurerm_redis_cache.test.enable_non_ssl_port + + redis_configuration { + } +} +`, template) +} + +func (RedisCacheResource) standard(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "C" + sku_name = "Standard" + enable_non_ssl_port = false + redis_configuration { + } + + tags = { + environment = "production" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) premium(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + maxmemory_reserved = 2 + maxfragmentationmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) premiumSharded(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = true + shard_count = 3 + + redis_configuration { + maxmemory_reserved = 2 + maxfragmentationmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) premiumShardedScaled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 2 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = true + shard_count = 3 + + redis_configuration { + maxmemory_reserved = 2 + maxfragmentationmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) nonStandardCasing(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "c" + sku_name = "basic" + enable_non_ssl_port = false + redis_configuration { + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) backupDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 3 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + rdb_backup_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) backupEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 3 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + rdb_backup_enabled = true + rdb_backup_frequency = 60 + rdb_backup_max_snapshot_count = 1 + rdb_storage_connection_string = "DefaultEndpointsProtocol=https;BlobEndpoint=${azurerm_storage_account.test.primary_blob_endpoint};AccountName=${azurerm_storage_account.test.name};AccountKey=${azurerm_storage_account.test.primary_access_key}" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (RedisCacheResource) aofBackupDisabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 3 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + aof_backup_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) aofBackupEnabled(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + aof_backup_enabled = true + aof_storage_connection_string_0 = "DefaultEndpointsProtocol=https;BlobEndpoint=${azurerm_storage_account.test.primary_blob_endpoint};AccountName=${azurerm_storage_account.test.name};AccountKey=${azurerm_storage_account.test.primary_access_key}" + aof_storage_connection_string_1 = "DefaultEndpointsProtocol=https;BlobEndpoint=${azurerm_storage_account.test.primary_blob_endpoint};AccountName=${azurerm_storage_account.test.name};AccountKey=${azurerm_storage_account.test.secondary_access_key}" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (RedisCacheResource) patchSchedule(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + maxmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } + + patch_schedule { + day_of_week = "Tuesday" + start_hour_utc = 8 + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (RedisCacheResource) subscribeAllEvents(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "unlikely23exst2acct%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "GRS" + + tags = { + environment = "staging" + } +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 3 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + notify_keyspace_events = "KAE" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) +} + +func (RedisCacheResource) internalSubnet(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestnw-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "testsubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + subnet_id = azurerm_subnet.test.id + redis_configuration { + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (RedisCacheResource) internalSubnetStaticIP(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestnw-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "testsubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + subnet_id = azurerm_subnet.test.id + private_static_ip_address = "10.0.1.20" + redis_configuration { + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (RedisCacheResource) internalSubnet_withZone(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestnw-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "testsubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + subnet_id = azurerm_subnet.test.id + redis_configuration { + } + zones = ["1"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (RedisCacheResource) withoutAuth(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_virtual_network" "test" { + name = "acctestnw-%d" + address_space = ["10.0.0.0/16"] + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_subnet" "test" { + name = "testsubnet" + resource_group_name = azurerm_resource_group.test.name + virtual_network_name = azurerm_virtual_network.test.name + address_prefix = "10.0.1.0/24" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + subnet_id = azurerm_subnet.test.id + redis_configuration { + enable_authentication = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func testCheckSSLInConnectionString(resourceName string, propertyName string, requireSSL bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + connectionString := rs.Primary.Attributes[propertyName] + if strings.Contains(connectionString, fmt.Sprintf("ssl=%t", requireSSL)) { + return nil + } + if strings.Contains(connectionString, fmt.Sprintf("ssl=%t", !requireSSL)) { + return fmt.Errorf("Bad: wrong SSL setting in connection string: %s", propertyName) + } + + return fmt.Errorf("Bad: missing SSL setting in connection string: %s", propertyName) + } +} diff --git a/azurerm/internal/services/redis/redis_firewall_rule_resource.go b/azurerm/internal/services/redis/redis_firewall_rule_resource.go new file mode 100644 index 000000000000..5677be7f8b4d --- /dev/null +++ b/azurerm/internal/services/redis/redis_firewall_rule_resource.go @@ -0,0 +1,166 @@ +package redis + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/redis/mgmt/2018-03-01/redis" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceRedisFirewallRule() *schema.Resource { + return &schema.Resource{ + Create: resourceRedisFirewallRuleCreateUpdate, + Read: resourceRedisFirewallRuleRead, + Update: resourceRedisFirewallRuleCreateUpdate, + Delete: resourceRedisFirewallRuleDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.FirewallRuleID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.FirewallRuleName, + }, + + "redis_cache_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "start_ip": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.All( + validation.IsIPAddress, + validation.StringIsNotEmpty, + ), + }, + + "end_ip": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.All( + validation.IsIPAddress, + validation.StringIsNotEmpty, + ), + }, + }, + } +} + +func resourceRedisFirewallRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.FirewallRulesClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + log.Printf("[INFO] preparing arguments for AzureRM Redis Firewall Rule creation.") + + startIP := d.Get("start_ip").(string) + endIP := d.Get("end_ip").(string) + + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + resourceId := parse.NewFirewallRuleID(subscriptionId, d.Get("resource_group_name").(string), d.Get("redis_cache_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.RediName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Firewall Rule %q (Redis Cache %q / Resource Group %q): %+v", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_redis_firewall_rule", resourceId.ID()) + } + } + + parameters := redis.FirewallRuleCreateParameters{ + FirewallRuleProperties: &redis.FirewallRuleProperties{ + StartIP: utils.String(startIP), + EndIP: utils.String(endIP), + }, + } + + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.RediName, resourceId.Name, parameters); err != nil { + return fmt.Errorf("creating Firewall Rule %q (Redis Cache %q / Resource Group %q): %+v", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup, err) + } + + // TODO: confirm if we need to re-add the poller here + + d.SetId(resourceId.ID()) + return resourceRedisFirewallRuleRead(d, meta) +} + +func resourceRedisFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.FirewallRulesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.FirewallRuleID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Firewall Rule %q was not found in Redis Cache %q / Resource Group %q - removing from state", id.Name, id.RediName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Firewall Rule %q (Redis Cache %q / Resource Group %q): %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("redis_cache_name", id.RediName) + d.Set("resource_group_name", id.ResourceGroup) + if props := resp.FirewallRuleProperties; props != nil { + d.Set("start_ip", props.StartIP) + d.Set("end_ip", props.EndIP) + } + + return nil +} + +func resourceRedisFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.FirewallRulesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.FirewallRuleID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Delete(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("deleting Firewall Rule %q (Redis Cache %q / Resource Group %q): %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/redis/redis_firewall_rule_resource_test.go b/azurerm/internal/services/redis/redis_firewall_rule_resource_test.go new file mode 100644 index 000000000000..b288df31a9d2 --- /dev/null +++ b/azurerm/internal/services/redis/redis_firewall_rule_resource_test.go @@ -0,0 +1,201 @@ +package redis_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type RedisFirewallRuleResource struct { +} + +func TestAccRedisFirewallRule_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") + r := RedisFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisFirewallRule_multi(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") + r := RedisFirewallRuleResource{} + ruleTwo := "azurerm_redis_firewall_rule.double" + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.multi(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(ruleTwo).ExistsInAzure(r), + ), + }, + data.ImportStep(), + data.ImportStepFor(ruleTwo), + }) +} + +func TestAccRedisFirewallRule_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") + r := RedisFirewallRuleResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccRedisFirewallRule_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") + r := RedisFirewallRuleResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + }) +} + +func (t RedisFirewallRuleResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.FirewallRuleID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Redis.FirewallRulesClient.Get(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Redis Firewall Rule (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.FirewallRuleProperties != nil), nil +} + +func (RedisFirewallRuleResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + maxmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} + +resource "azurerm_redis_firewall_rule" "test" { + name = "fwrule%d" + redis_cache_name = azurerm_redis_cache.test.name + resource_group_name = azurerm_resource_group.test.name + start_ip = "1.2.3.4" + end_ip = "2.3.4.5" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r RedisFirewallRuleResource) multi(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_redis_firewall_rule" "double" { + name = "fwruletwo%d" + redis_cache_name = azurerm_redis_cache.test.name + resource_group_name = azurerm_resource_group.test.name + start_ip = "4.5.6.7" + end_ip = "8.9.0.1" +} +`, r.basic(data), data.RandomInteger) +} + +func (r RedisFirewallRuleResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_redis_firewall_rule" "import" { + name = azurerm_redis_firewall_rule.test.name + redis_cache_name = azurerm_redis_firewall_rule.test.redis_cache_name + resource_group_name = azurerm_redis_firewall_rule.test.resource_group_name + start_ip = azurerm_redis_firewall_rule.test.start_ip + end_ip = azurerm_redis_firewall_rule.test.end_ip +} +`, r.basic(data)) +} + +func (RedisFirewallRuleResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "test" { + name = "acctestRedis-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + maxmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} + +resource "azurerm_redis_firewall_rule" "test" { + name = "fwrule%d" + redis_cache_name = azurerm_redis_cache.test.name + resource_group_name = azurerm_resource_group.test.name + start_ip = "2.3.4.5" + end_ip = "6.7.8.9" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/redis/redis_linked_server_resource.go b/azurerm/internal/services/redis/redis_linked_server_resource.go new file mode 100644 index 000000000000..c5b9583a159f --- /dev/null +++ b/azurerm/internal/services/redis/redis_linked_server_resource.go @@ -0,0 +1,247 @@ +package redis + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/redis/mgmt/2018-03-01/redis" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/validate" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceRedisLinkedServer() *schema.Resource { + return &schema.Resource{ + Create: resourceRedisLinkedServerCreate, + Read: resourceRedisLinkedServerRead, + Delete: resourceRedisLinkedServerDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.LinkedServerID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "target_redis_cache_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "linked_redis_cache_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.CacheID, + }, + + "linked_redis_cache_location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "server_role": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(redis.ReplicationRolePrimary), + string(redis.ReplicationRoleSecondary), + // TODO: make this case-sensitive in 3.0 + }, true), + DiffSuppressFunc: suppress.CaseDifference, + }, + + "name": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceRedisLinkedServerCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.LinkedServerClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + log.Printf("[INFO] preparing arguments for Redis Linked Server creation.") + + linkedRedisCacheId := d.Get("linked_redis_cache_id").(string) + linkedRedisCacheLocation := d.Get("linked_redis_cache_location").(string) + serverRole := redis.ReplicationRole(d.Get("server_role").(string)) + + // The name needs to match the linked_redis_cache_id + cacheId, err := parse.CacheID(linkedRedisCacheId) + if err != nil { + return err + } + + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + resourceId := parse.NewLinkedServerID(subscriptionId, d.Get("resource_group_name").(string), d.Get("target_redis_cache_name").(string), cacheId.RediName) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.RediName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Linked Server %q (Redis Cache %q / Resource Group %q): %+v", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup, err) + } + } + if !utils.ResponseWasNotFound(existing.Response) { + return tf.ImportAsExistsError("azurerm_redis_linked_server", resourceId.ID()) + } + } + + parameters := redis.LinkedServerCreateParameters{ + LinkedServerCreateProperties: &redis.LinkedServerCreateProperties{ + LinkedRedisCacheID: utils.String(linkedRedisCacheId), + LinkedRedisCacheLocation: utils.String(linkedRedisCacheLocation), + ServerRole: serverRole, + }, + } + + future, err := client.Create(ctx, resourceId.ResourceGroup, resourceId.RediName, resourceId.Name, parameters) + if err != nil { + return fmt.Errorf("waiting for creation of Linked Server %q (Redis Cache %q / Resource Group %q): %+v", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for the creation of Linked Server %q (Redis Cache %q / Resource Group %q): %+v", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup, err) + } + + log.Printf("[DEBUG] Waiting for Linked Server %q (Redis Cache %q / Resource Group %q) to become available", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Linking", "Updating", "Creating", "Syncing"}, + Target: []string{"Succeeded"}, + Refresh: redisLinkedServerStateRefreshFunc(ctx, client, resourceId), + MinTimeout: 15 * time.Second, + Timeout: d.Timeout(schema.TimeoutCreate), + } + + if _, err = stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for Linked Server %q (Redis Cache %q / Resource Group %q) to become available: %+v", resourceId.Name, resourceId.RediName, resourceId.ResourceGroup, err) + } + + d.SetId(resourceId.ID()) + return resourceRedisLinkedServerRead(d, meta) +} + +func resourceRedisLinkedServerRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.LinkedServerClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LinkedServerID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Linked Server %q (Redis Cache %q / Resource Group %q) was not found - removing from state!", id.Name, id.RediName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Linked Server %q (Redis Cache %q / Resource Group %q): %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("target_redis_cache_name", id.RediName) + d.Set("resource_group_name", id.ResourceGroup) + if props := resp.LinkedServerProperties; props != nil { + linkedRedisCacheId := "" + if props.LinkedRedisCacheID != nil { + cacheId, err := parse.CacheID(*props.LinkedRedisCacheID) + if err != nil { + return err + } + + linkedRedisCacheId = cacheId.ID() + } + d.Set("linked_redis_cache_id", linkedRedisCacheId) + + d.Set("linked_redis_cache_location", props.LinkedRedisCacheLocation) + d.Set("server_role", string(props.ServerRole)) + } + + return nil +} + +func resourceRedisLinkedServerDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Redis.LinkedServerClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.LinkedServerID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Delete(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(resp) { + return fmt.Errorf("deleting Linked Server %q (Redis Cache %q / Resource Group %q): %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + } + + // No LinkedServerDeleteFuture + // https://github.com/Azure/azure-sdk-for-go/issues/12159 + log.Printf("[DEBUG] Waiting for Linked Server %q (Redis Cache %q / Resource Group %q) to be eventually deleted", id.Name, id.RediName, id.ResourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Exists"}, + Target: []string{"NotFound"}, + Refresh: redisLinkedServerDeleteStateRefreshFunc(ctx, client, *id), + MinTimeout: 10 * time.Second, + ContinuousTargetOccurence: 10, + Timeout: d.Timeout(schema.TimeoutDelete), + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for Linked Server %q (Redis Cache %q / Resource Group %q) to be deleted: %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + + return nil +} + +func redisLinkedServerStateRefreshFunc(ctx context.Context, client *redis.LinkedServerClient, id parse.LinkedServerId) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + res, err := client.Get(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + return nil, "", fmt.Errorf("retrieving status of Linked Server %q (Redis Cache %q / Resource Group %q): %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + + return res, *res.LinkedServerProperties.ProvisioningState, nil + } +} + +func redisLinkedServerDeleteStateRefreshFunc(ctx context.Context, client *redis.LinkedServerClient, id parse.LinkedServerId) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + res, err := client.Get(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(res.Response) { + return "NotFound", "NotFound", nil + } + + return nil, "", fmt.Errorf("retrieving status of Linked Server %q (Redis Cache %q / Resource Group %q): %+v", id.Name, id.RediName, id.ResourceGroup, err) + } + + return res, "Exists", nil + } +} diff --git a/azurerm/internal/services/redis/redis_linked_server_resource_test.go b/azurerm/internal/services/redis/redis_linked_server_resource_test.go new file mode 100644 index 000000000000..651ba79be7fb --- /dev/null +++ b/azurerm/internal/services/redis/redis_linked_server_resource_test.go @@ -0,0 +1,134 @@ +package redis_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type RedisLinkedServerResource struct { +} + +func TestAccRedisLinkedServer_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_linked_server", "test") + r := RedisLinkedServerResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRedisLinkedServer_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_redis_linked_server", "test") + r := RedisLinkedServerResource{} + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t RedisLinkedServerResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.LinkedServerID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Redis.LinkedServerClient.Get(ctx, id.ResourceGroup, id.RediName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Redis Linked Server (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.LinkedServerProperties != nil), nil +} + +func (RedisLinkedServerResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "pri" { + name = "acctestRG-redis-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "pri" { + name = "acctestRedispri%d" + location = azurerm_resource_group.pri.location + resource_group_name = azurerm_resource_group.pri.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + maxmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} + +resource "azurerm_resource_group" "sec" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_redis_cache" "sec" { + name = "acctestRedissec%d" + location = azurerm_resource_group.sec.location + resource_group_name = azurerm_resource_group.sec.name + capacity = 1 + family = "P" + sku_name = "Premium" + enable_non_ssl_port = false + + redis_configuration { + maxmemory_reserved = 2 + maxmemory_delta = 2 + maxmemory_policy = "allkeys-lru" + } +} + +resource "azurerm_redis_linked_server" "test" { + target_redis_cache_name = azurerm_redis_cache.pri.name + resource_group_name = azurerm_redis_cache.pri.resource_group_name + linked_redis_cache_id = azurerm_redis_cache.sec.id + linked_redis_cache_location = azurerm_redis_cache.sec.location + server_role = "Secondary" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, + data.RandomInteger, data.Locations.Secondary, data.RandomInteger) +} + +func (r RedisLinkedServerResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_redis_linked_server" "import" { + target_redis_cache_name = azurerm_redis_linked_server.test.target_redis_cache_name + resource_group_name = azurerm_redis_linked_server.test.resource_group_name + linked_redis_cache_id = azurerm_redis_linked_server.test.linked_redis_cache_id + linked_redis_cache_location = azurerm_redis_linked_server.test.linked_redis_cache_location + server_role = azurerm_redis_linked_server.test.server_role +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/redis/registration.go b/azurerm/internal/services/redis/registration.go index fae41cb4a954..beb18d14e8a7 100644 --- a/azurerm/internal/services/redis/registration.go +++ b/azurerm/internal/services/redis/registration.go @@ -21,15 +21,15 @@ func (r Registration) WebsiteCategories() []string { // SupportedDataSources returns the supported Data Sources supported by this Service func (r Registration) SupportedDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_redis_cache": dataSourceArmRedisCache(), + "azurerm_redis_cache": dataSourceRedisCache(), } } // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_redis_cache": resourceArmRedisCache(), - "azurerm_redis_firewall_rule": resourceArmRedisFirewallRule(), - "azurerm_redis_linked_server": resourceArmRedisLinkedServer(), + "azurerm_redis_cache": resourceRedisCache(), + "azurerm_redis_firewall_rule": resourceRedisFirewallRule(), + "azurerm_redis_linked_server": resourceRedisLinkedServer(), } } diff --git a/azurerm/internal/services/redis/resource_arm_redis_cache.go b/azurerm/internal/services/redis/resource_arm_redis_cache.go deleted file mode 100644 index 3c582e93668a..000000000000 --- a/azurerm/internal/services/redis/resource_arm_redis_cache.go +++ /dev/null @@ -1,922 +0,0 @@ -package redis - -import ( - "context" - "fmt" - "log" - "net/http" - "strconv" - "strings" - "time" - - "github.com/Azure/azure-sdk-for-go/services/redis/mgmt/2018-03-01/redis" - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/locks" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/network" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmRedisCache() *schema.Resource { - return &schema.Resource{ - Create: resourceArmRedisCacheCreate, - Read: resourceArmRedisCacheRead, - Update: resourceArmRedisCacheUpdate, - Delete: resourceArmRedisCacheDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(90 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(90 * time.Minute), - Delete: schema.DefaultTimeout(90 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - - "location": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - StateFunc: azure.NormalizeLocation, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "zones": azure.SchemaSingleZone(), - - "capacity": { - Type: schema.TypeInt, - Required: true, - }, - - "family": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validateRedisFamily, - DiffSuppressFunc: suppress.CaseDifference, - }, - - "sku_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{ - string(redis.Basic), - string(redis.Standard), - string(redis.Premium), - }, true), - DiffSuppressFunc: suppress.CaseDifference, - }, - - "minimum_tls_version": { - Type: schema.TypeString, - Optional: true, - Default: redis.OneFullStopZero, - ValidateFunc: validation.StringInSlice([]string{ - string(redis.OneFullStopZero), - string(redis.OneFullStopOne), - string(redis.OneFullStopTwo), - }, false), - }, - - "shard_count": { - Type: schema.TypeInt, - Optional: true, - }, - - "enable_non_ssl_port": { - Type: schema.TypeBool, - Default: false, - Optional: true, - }, - - "subnet_id": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - }, - - "private_static_ip_address": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - }, - - "redis_configuration": { - Type: schema.TypeList, - Optional: true, - Computed: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "maxclients": { - Type: schema.TypeInt, - Computed: true, - }, - - "maxmemory_delta": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - }, - - "maxmemory_reserved": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - }, - - "maxmemory_policy": { - Type: schema.TypeString, - Optional: true, - Default: "volatile-lru", - ValidateFunc: validateRedisMaxMemoryPolicy, - }, - - "maxfragmentationmemory_reserved": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - }, - - "rdb_backup_enabled": { - Type: schema.TypeBool, - Optional: true, - }, - - "rdb_backup_frequency": { - Type: schema.TypeInt, - Optional: true, - ValidateFunc: validateRedisBackupFrequency, - }, - - "rdb_backup_max_snapshot_count": { - Type: schema.TypeInt, - Optional: true, - }, - - "rdb_storage_connection_string": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - }, - - "notify_keyspace_events": { - Type: schema.TypeString, - Optional: true, - }, - - "aof_backup_enabled": { - Type: schema.TypeBool, - Optional: true, - }, - - "aof_storage_connection_string_0": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - }, - - "aof_storage_connection_string_1": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - }, - "enable_authentication": { - Type: schema.TypeBool, - Optional: true, - Default: true, - }, - }, - }, - }, - - "patch_schedule": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "day_of_week": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.IsDayOfTheWeek(true), - }, - "start_hour_utc": { - Type: schema.TypeInt, - Optional: true, - ValidateFunc: validation.IntBetween(0, 23), - }, - }, - }, - }, - - "hostname": { - Type: schema.TypeString, - Computed: true, - }, - - "port": { - Type: schema.TypeInt, - Computed: true, - }, - - "ssl_port": { - Type: schema.TypeInt, - Computed: true, - }, - - "primary_access_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_access_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "primary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "tags": tags.Schema(), - }, - } -} - -func resourceArmRedisCacheCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.Client - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - log.Printf("[INFO] preparing arguments for Azure ARM Redis Cache creation.") - - name := d.Get("name").(string) - location := azure.NormalizeLocation(d.Get("location").(string)) - resGroup := d.Get("resource_group_name").(string) - - enableNonSSLPort := d.Get("enable_non_ssl_port").(bool) - - capacity := int32(d.Get("capacity").(int)) - family := redis.SkuFamily(d.Get("family").(string)) - sku := redis.SkuName(d.Get("sku_name").(string)) - - t := d.Get("tags").(map[string]interface{}) - expandedTags := tags.Expand(t) - - existing, err := client.Get(ctx, resGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Redis Instance %s (resource group %s) ID", name, resGroup) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_redis_cache", *existing.ID) - } - - patchSchedule := expandRedisPatchSchedule(d) - redisConfiguration, err := expandRedisConfiguration(d) - if err != nil { - return fmt.Errorf("Error parsing Redis Configuration: %+v", err) - } - - parameters := redis.CreateParameters{ - Location: utils.String(location), - CreateProperties: &redis.CreateProperties{ - EnableNonSslPort: utils.Bool(enableNonSSLPort), - Sku: &redis.Sku{ - Capacity: utils.Int32(capacity), - Family: family, - Name: sku, - }, - MinimumTLSVersion: redis.TLSVersion(d.Get("minimum_tls_version").(string)), - RedisConfiguration: redisConfiguration, - }, - Tags: expandedTags, - } - - if v, ok := d.GetOk("shard_count"); ok { - shardCount := int32(v.(int)) - parameters.ShardCount = &shardCount - } - - if v, ok := d.GetOk("private_static_ip_address"); ok { - parameters.StaticIP = utils.String(v.(string)) - } - - if v, ok := d.GetOk("subnet_id"); ok { - parsed, parseErr := azure.ParseAzureResourceID(v.(string)) - if parseErr != nil { - return fmt.Errorf("Error parsing Azure Resource ID %q", v.(string)) - } - subnetName := parsed.Path["subnets"] - virtualNetworkName := parsed.Path["virtualNetworks"] - - locks.ByName(virtualNetworkName, network.VirtualNetworkResourceName) - defer locks.UnlockByName(virtualNetworkName, network.VirtualNetworkResourceName) - - locks.ByName(subnetName, network.SubnetResourceName) - defer locks.UnlockByName(subnetName, network.SubnetResourceName) - - parameters.SubnetID = utils.String(v.(string)) - } - - if v, ok := d.GetOk("zones"); ok { - parameters.Zones = azure.ExpandZones(v.([]interface{})) - } - - future, err := client.Create(ctx, resGroup, name, parameters) - if err != nil { - return fmt.Errorf("Error issuing create request for Redis Cache %s (resource group %s): %v", name, resGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting for the create of Redis Cache %s (resource group %s): %v", name, resGroup, err) - } - - read, err := client.Get(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("Error reading Redis Cache %s (resource group %s): %v", name, resGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Redis Cache %s (resource group %s) ID", name, resGroup) - } - - log.Printf("[DEBUG] Waiting for Redis Cache (%s) to become available", d.Get("name")) - stateConf := &resource.StateChangeConf{ - Pending: []string{"Scaling", "Updating", "Creating"}, - Target: []string{"Succeeded"}, - Refresh: redisStateRefreshFunc(ctx, client, resGroup, name), - MinTimeout: 15 * time.Second, - Timeout: d.Timeout(schema.TimeoutCreate), - } - - if _, err = stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Redis Cache (%s) to become available: %s", d.Get("name"), err) - } - - d.SetId(*read.ID) - - if schedule := patchSchedule; schedule != nil { - patchClient := meta.(*clients.Client).Redis.PatchSchedulesClient - _, err = patchClient.CreateOrUpdate(ctx, resGroup, name, *schedule) - if err != nil { - return fmt.Errorf("Error setting Redis Patch Schedule: %+v", err) - } - } - - return resourceArmRedisCacheRead(d, meta) -} - -func resourceArmRedisCacheUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.Client - ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - log.Printf("[INFO] preparing arguments for Azure ARM Redis Cache update.") - - name := d.Get("name").(string) - resGroup := d.Get("resource_group_name").(string) - - enableNonSSLPort := d.Get("enable_non_ssl_port").(bool) - - capacity := int32(d.Get("capacity").(int)) - family := redis.SkuFamily(d.Get("family").(string)) - sku := redis.SkuName(d.Get("sku_name").(string)) - - t := d.Get("tags").(map[string]interface{}) - expandedTags := tags.Expand(t) - - parameters := redis.UpdateParameters{ - UpdateProperties: &redis.UpdateProperties{ - MinimumTLSVersion: redis.TLSVersion(d.Get("minimum_tls_version").(string)), - EnableNonSslPort: utils.Bool(enableNonSSLPort), - Sku: &redis.Sku{ - Capacity: utils.Int32(capacity), - Family: family, - Name: sku, - }, - }, - Tags: expandedTags, - } - - if v, ok := d.GetOk("shard_count"); ok { - if d.HasChange("shard_count") { - shardCount := int32(v.(int)) - parameters.ShardCount = &shardCount - } - } - - if d.HasChange("redis_configuration") { - redisConfiguration, err := expandRedisConfiguration(d) - if err != nil { - return fmt.Errorf("Error parsing Redis Configuration: %+v", err) - } - parameters.RedisConfiguration = redisConfiguration - } - - if _, err := client.Update(ctx, resGroup, name, parameters); err != nil { - return err - } - - read, err := client.Get(ctx, resGroup, name) - if err != nil { - return err - } - if read.ID == nil { - return fmt.Errorf("Cannot read Redis Instance %s (resource group %s) ID", name, resGroup) - } - - log.Printf("[DEBUG] Waiting for Redis Instance (%s) to become available", d.Get("name")) - stateConf := &resource.StateChangeConf{ - Pending: []string{"Scaling", "Updating", "Creating"}, - Target: []string{"Succeeded"}, - Refresh: redisStateRefreshFunc(ctx, client, resGroup, name), - MinTimeout: 15 * time.Second, - Timeout: d.Timeout(schema.TimeoutUpdate), - } - - if _, err = stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Redis Instance (%s) to become available: %s", d.Get("name"), err) - } - - d.SetId(*read.ID) - - patchSchedule := expandRedisPatchSchedule(d) - - patchClient := meta.(*clients.Client).Redis.PatchSchedulesClient - if patchSchedule == nil || len(*patchSchedule.ScheduleEntries.ScheduleEntries) == 0 { - _, err = patchClient.Delete(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("Error deleting Redis Patch Schedule: %+v", err) - } - } else { - _, err = patchClient.CreateOrUpdate(ctx, resGroup, name, *patchSchedule) - if err != nil { - return fmt.Errorf("Error setting Redis Patch Schedule: %+v", err) - } - } - - return resourceArmRedisCacheRead(d, meta) -} - -func resourceArmRedisCacheRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.Client - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resGroup := id.ResourceGroup - name := id.Path["Redis"] - - resp, err := client.Get(ctx, resGroup, name) - - // covers if the resource has been deleted outside of TF, but is still in the state - if resp.StatusCode == http.StatusNotFound { - d.SetId("") - return nil - } - - if err != nil { - return fmt.Errorf("Error making Read request on Azure Redis Cache %s: %s", name, err) - } - - keysResp, err := client.ListKeys(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("Error making ListKeys request on Azure Redis Cache %s: %s", name, err) - } - - patchSchedulesClient := meta.(*clients.Client).Redis.PatchSchedulesClient - - schedule, err := patchSchedulesClient.Get(ctx, resGroup, name) - if err == nil { - patchSchedule := flattenRedisPatchSchedules(schedule) - if err = d.Set("patch_schedule", patchSchedule); err != nil { - return fmt.Errorf("Error setting `patch_schedule`: %+v", err) - } - } - - d.Set("name", name) - d.Set("resource_group_name", resGroup) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - if zones := resp.Zones; zones != nil { - d.Set("zones", zones) - } - - if sku := resp.Sku; sku != nil { - d.Set("capacity", sku.Capacity) - d.Set("family", sku.Family) - d.Set("sku_name", sku.Name) - } - - props := resp.Properties - if props != nil { - d.Set("ssl_port", props.SslPort) - d.Set("hostname", props.HostName) - d.Set("minimum_tls_version", string(props.MinimumTLSVersion)) - d.Set("port", props.Port) - d.Set("enable_non_ssl_port", props.EnableNonSslPort) - if props.ShardCount != nil { - d.Set("shard_count", props.ShardCount) - } - d.Set("private_static_ip_address", props.StaticIP) - d.Set("subnet_id", props.SubnetID) - } - - redisConfiguration, err := flattenRedisConfiguration(resp.RedisConfiguration) - if err != nil { - return fmt.Errorf("Error flattening `redis_configuration`: %+v", err) - } - if err := d.Set("redis_configuration", redisConfiguration); err != nil { - return fmt.Errorf("Error setting `redis_configuration`: %+v", err) - } - - d.Set("primary_access_key", keysResp.PrimaryKey) - d.Set("secondary_access_key", keysResp.SecondaryKey) - - if props != nil { - enableSslPort := !*props.EnableNonSslPort - d.Set("primary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keysResp.PrimaryKey, enableSslPort)) - d.Set("secondary_connection_string", getRedisConnectionString(*props.HostName, *props.SslPort, *keysResp.SecondaryKey, enableSslPort)) - } - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmRedisCacheDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.Client - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resGroup := id.ResourceGroup - name := id.Path["Redis"] - - read, err := client.Get(ctx, resGroup, name) - if err != nil { - return fmt.Errorf("Error retrieving Redis Cache %q (Resource Group %q): %+v", name, resGroup, err) - } - if read.Properties == nil { - return fmt.Errorf("Error retrieving Redis Cache properties %q (Resource Group %q): `props` was nil", name, resGroup) - } - props := *read.Properties - if subnetID := props.SubnetID; subnetID != nil { - parsed, parseErr := azure.ParseAzureResourceID(*subnetID) - if parseErr != nil { - return fmt.Errorf("Error parsing Azure Resource ID %q", *subnetID) - } - subnetName := parsed.Path["subnets"] - virtualNetworkName := parsed.Path["virtualNetworks"] - - locks.ByName(virtualNetworkName, network.VirtualNetworkResourceName) - defer locks.UnlockByName(virtualNetworkName, network.VirtualNetworkResourceName) - - locks.ByName(subnetName, network.SubnetResourceName) - defer locks.UnlockByName(subnetName, network.SubnetResourceName) - } - future, err := client.Delete(ctx, resGroup, name) - if err != nil { - if response.WasNotFound(future.Response()) { - return nil - } - - return err - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - if response.WasNotFound(future.Response()) { - return nil - } - - return err - } - - return nil -} - -func redisStateRefreshFunc(ctx context.Context, client *redis.Client, resourceGroupName string, sgName string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - res, err := client.Get(ctx, resourceGroupName, sgName) - if err != nil { - return nil, "", fmt.Errorf("Error issuing read request in redisStateRefreshFunc to Azure ARM for Redis Cache Instance '%s' (RG: '%s'): %s", sgName, resourceGroupName, err) - } - - return res, string(res.ProvisioningState), nil - } -} - -func expandRedisConfiguration(d *schema.ResourceData) (map[string]*string, error) { - output := make(map[string]*string) - - input := d.Get("redis_configuration").([]interface{}) - if len(input) == 0 || input[0] == nil { - return output, nil - } - raw := input[0].(map[string]interface{}) - - if v := raw["maxclients"].(int); v > 0 { - output["maxclients"] = utils.String(strconv.Itoa(v)) - } - - if v := raw["maxmemory_delta"].(int); v > 0 { - output["maxmemory-delta"] = utils.String(strconv.Itoa(v)) - } - - if v := raw["maxmemory_reserved"].(int); v > 0 { - output["maxmemory-reserved"] = utils.String(strconv.Itoa(v)) - } - - if v := raw["maxmemory_policy"].(string); v != "" { - output["maxmemory-policy"] = utils.String(v) - } - - if v := raw["maxfragmentationmemory_reserved"].(int); v > 0 { - output["maxfragmentationmemory-reserved"] = utils.String(strconv.Itoa(v)) - } - - // RDB Backup - if v := raw["rdb_backup_enabled"].(bool); v { - if connStr := raw["rdb_storage_connection_string"].(string); connStr == "" { - return nil, fmt.Errorf("The rdb_storage_connection_string property must be set when rdb_backup_enabled is true") - } - output["rdb-backup-enabled"] = utils.String(strconv.FormatBool(v)) - } - - if v := raw["rdb_backup_frequency"].(int); v > 0 { - output["rdb-backup-frequency"] = utils.String(strconv.Itoa(v)) - } - - if v := raw["rdb_backup_max_snapshot_count"].(int); v > 0 { - output["rdb-backup-max-snapshot-count"] = utils.String(strconv.Itoa(v)) - } - - if v := raw["rdb_storage_connection_string"].(string); v != "" { - output["rdb-storage-connection-string"] = utils.String(v) - } - - if v := raw["notify_keyspace_events"].(string); v != "" { - output["notify-keyspace-events"] = utils.String(v) - } - - // AOF Backup - if v := raw["aof_backup_enabled"].(bool); v { - output["aof-backup-enabled"] = utils.String(strconv.FormatBool(v)) - } - - if v := raw["aof_storage_connection_string_0"].(string); v != "" { - output["aof-storage-connection-string-0"] = utils.String(v) - } - - if v := raw["aof_storage_connection_string_1"].(string); v != "" { - output["aof-storage-connection-string-1"] = utils.String(v) - } - - authEnabled := raw["enable_authentication"].(bool) - // Redis authentication can only be disabled if it is launched inside a VNET. - if _, isPrivate := d.GetOk("subnet_id"); !isPrivate { - if !authEnabled { - return nil, fmt.Errorf("Cannot set `enable_authentication` to `false` when `subnet_id` is not set") - } - } else { - value := isAuthNotRequiredAsString(authEnabled) - output["authnotrequired"] = utils.String(value) - } - return output, nil -} - -func expandRedisPatchSchedule(d *schema.ResourceData) *redis.PatchSchedule { - v, ok := d.GetOk("patch_schedule") - if !ok { - return nil - } - - scheduleValues := v.([]interface{}) - entries := make([]redis.ScheduleEntry, 0) - for _, scheduleValue := range scheduleValues { - vals := scheduleValue.(map[string]interface{}) - dayOfWeek := vals["day_of_week"].(string) - startHourUtc := vals["start_hour_utc"].(int) - - entry := redis.ScheduleEntry{ - DayOfWeek: redis.DayOfWeek(dayOfWeek), - StartHourUtc: utils.Int32(int32(startHourUtc)), - } - entries = append(entries, entry) - } - - schedule := redis.PatchSchedule{ - ScheduleEntries: &redis.ScheduleEntries{ - ScheduleEntries: &entries, - }, - } - return &schedule -} - -func flattenRedisConfiguration(input map[string]*string) ([]interface{}, error) { - outputs := make(map[string]interface{}, len(input)) - - if v := input["maxclients"]; v != nil { - i, err := strconv.Atoi(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `maxclients` %q: %+v", *v, err) - } - outputs["maxclients"] = i - } - if v := input["maxmemory-delta"]; v != nil { - i, err := strconv.Atoi(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `maxmemory-delta` %q: %+v", *v, err) - } - outputs["maxmemory_delta"] = i - } - if v := input["maxmemory-reserved"]; v != nil { - i, err := strconv.Atoi(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `maxmemory-reserved` %q: %+v", *v, err) - } - outputs["maxmemory_reserved"] = i - } - if v := input["maxmemory-policy"]; v != nil { - outputs["maxmemory_policy"] = *v - } - - if v := input["maxfragmentationmemory-reserved"]; v != nil { - i, err := strconv.Atoi(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `maxfragmentationmemory-reserved` %q: %+v", *v, err) - } - outputs["maxfragmentationmemory_reserved"] = i - } - - // delta, reserved, enabled, frequency,, count, - if v := input["rdb-backup-enabled"]; v != nil { - b, err := strconv.ParseBool(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `rdb-backup-enabled` %q: %+v", *v, err) - } - outputs["rdb_backup_enabled"] = b - } - if v := input["rdb-backup-frequency"]; v != nil { - i, err := strconv.Atoi(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `rdb-backup-frequency` %q: %+v", *v, err) - } - outputs["rdb_backup_frequency"] = i - } - if v := input["rdb-backup-max-snapshot-count"]; v != nil { - i, err := strconv.Atoi(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `rdb-backup-max-snapshot-count` %q: %+v", *v, err) - } - outputs["rdb_backup_max_snapshot_count"] = i - } - if v := input["rdb-storage-connection-string"]; v != nil { - outputs["rdb_storage_connection_string"] = *v - } - if v := input["notify-keyspace-events"]; v != nil { - outputs["notify_keyspace_events"] = *v - } - - if v := input["aof-backup-enabled"]; v != nil { - b, err := strconv.ParseBool(*v) - if err != nil { - return nil, fmt.Errorf("Error parsing `aof-backup-enabled` %q: %+v", *v, err) - } - outputs["aof_backup_enabled"] = b - } - if v := input["aof-storage-connection-string-0"]; v != nil { - outputs["aof_storage_connection_string_0"] = *v - } - if v := input["aof-storage-connection-string-1"]; v != nil { - outputs["aof_storage_connection_string_1"] = *v - } - - // `authnotrequired` is not set for instances launched outside a VNET - outputs["enable_authentication"] = true - if v := input["authnotrequired"]; v != nil { - outputs["enable_authentication"] = isAuthRequiredAsBool(*v) - } - - return []interface{}{outputs}, nil -} - -func isAuthRequiredAsBool(not_required string) bool { - value := strings.ToLower(not_required) - output := map[string]bool{ - "yes": false, - "no": true, - } - return output[value] -} - -func isAuthNotRequiredAsString(auth_required bool) string { - output := map[bool]string{ - true: "no", - false: "yes", - } - return output[auth_required] -} - -func flattenRedisPatchSchedules(schedule redis.PatchSchedule) []interface{} { - outputs := make([]interface{}, 0) - - for _, entry := range *schedule.ScheduleEntries.ScheduleEntries { - output := make(map[string]interface{}) - - output["day_of_week"] = string(entry.DayOfWeek) - output["start_hour_utc"] = int(*entry.StartHourUtc) - - outputs = append(outputs, output) - } - - return outputs -} - -func validateRedisFamily(v interface{}, _ string) (warnings []string, errors []error) { - value := strings.ToLower(v.(string)) - families := map[string]bool{ - "c": true, - "p": true, - } - - if !families[value] { - errors = append(errors, fmt.Errorf("Redis Family can only be C or P")) - } - return warnings, errors -} - -func validateRedisMaxMemoryPolicy(v interface{}, _ string) (warnings []string, errors []error) { - value := strings.ToLower(v.(string)) - families := map[string]bool{ - "noeviction": true, - "allkeys-lru": true, - "volatile-lru": true, - "allkeys-random": true, - "volatile-random": true, - "volatile-ttl": true, - "allkeys-lfu": true, - "volatile-lfu": true, - } - - if !families[value] { - errors = append(errors, fmt.Errorf("Redis Max Memory Policy can only be 'noeviction' / 'allkeys-lru' / 'volatile-lru' / 'allkeys-random' / 'volatile-random' / 'volatile-ttl' / 'allkeys-lfu' / 'volatile-lfu'")) - } - - return warnings, errors -} - -func validateRedisBackupFrequency(v interface{}, _ string) (warnings []string, errors []error) { - value := v.(int) - families := map[int]bool{ - 15: true, - 30: true, - 60: true, - 360: true, - 720: true, - 1440: true, - } - - if !families[value] { - errors = append(errors, fmt.Errorf("Redis Backup Frequency can only be '15', '30', '60', '360', '720' or '1440'")) - } - - return warnings, errors -} - -func getRedisConnectionString(redisHostName string, sslPort int32, accessKey string, enableSslPort bool) string { - return fmt.Sprintf("%s:%d,password=%s,ssl=%t,abortConnect=False", redisHostName, sslPort, accessKey, enableSslPort) -} diff --git a/azurerm/internal/services/redis/resource_arm_redis_firewall_rule.go b/azurerm/internal/services/redis/resource_arm_redis_firewall_rule.go deleted file mode 100644 index 11419e70f000..000000000000 --- a/azurerm/internal/services/redis/resource_arm_redis_firewall_rule.go +++ /dev/null @@ -1,193 +0,0 @@ -package redis - -import ( - "fmt" - "log" - "regexp" - "time" - - "github.com/Azure/azure-sdk-for-go/services/redis/mgmt/2018-03-01/redis" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmRedisFirewallRule() *schema.Resource { - return &schema.Resource{ - Create: resourceArmRedisFirewallRuleCreateUpdate, - Read: resourceArmRedisFirewallRuleRead, - Update: resourceArmRedisFirewallRuleCreateUpdate, - Delete: resourceArmRedisFirewallRuleDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateRedisFirewallRuleName, - }, - - "redis_cache_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "start_ip": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.All( - validation.IsIPAddress, - validation.StringIsNotEmpty, - ), - }, - - "end_ip": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.All( - validation.IsIPAddress, - validation.StringIsNotEmpty, - ), - }, - }, - } -} - -func resourceArmRedisFirewallRuleCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.FirewallRulesClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - log.Printf("[INFO] preparing arguments for AzureRM Redis Firewall Rule creation.") - - name := d.Get("name").(string) - cacheName := d.Get("redis_cache_name").(string) - resourceGroup := d.Get("resource_group_name").(string) - startIP := d.Get("start_ip").(string) - endIP := d.Get("end_ip").(string) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, cacheName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Redis Firewall Rule %q (cache %q / resource group %q) ID", name, cacheName, resourceGroup) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_redis_firewall_rule", *existing.ID) - } - } - - parameters := redis.FirewallRuleCreateParameters{ - FirewallRuleProperties: &redis.FirewallRuleProperties{ - StartIP: utils.String(startIP), - EndIP: utils.String(endIP), - }, - } - - return resource.Retry(d.Timeout(schema.TimeoutCreate), func() *resource.RetryError { - if _, err := client.CreateOrUpdate(ctx, resourceGroup, cacheName, name, parameters); err != nil { - return resource.NonRetryableError(fmt.Errorf("Error creating the rule: %s", err)) - } - - read, err := client.Get(ctx, resourceGroup, cacheName, name) - if err != nil { - return resource.RetryableError(fmt.Errorf("Expected instance to be created but was in non existent state, retrying")) - } - if read.ID == nil { - return resource.NonRetryableError(fmt.Errorf("Cannot read Redis Firewall Rule %q (cache %q / resource group %q) ID", name, cacheName, resourceGroup)) - } - - d.SetId(*read.ID) - - return resource.NonRetryableError(resourceArmRedisFirewallRuleRead(d, meta)) - }) -} - -func resourceArmRedisFirewallRuleRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.FirewallRulesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - cacheName := id.Path["Redis"] - name := id.Path["firewallRules"] - - resp, err := client.Get(ctx, resourceGroup, cacheName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Redis Firewall Rule %q was not found in Cache %q / Resource Group %q - removing from state", name, cacheName, resourceGroup) - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Azure Redis Firewall Rule %q: %+v", name, err) - } - - d.Set("name", name) - d.Set("redis_cache_name", cacheName) - d.Set("resource_group_name", resourceGroup) - if props := resp.FirewallRuleProperties; props != nil { - d.Set("start_ip", props.StartIP) - d.Set("end_ip", props.EndIP) - } - - return nil -} - -func resourceArmRedisFirewallRuleDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.FirewallRulesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - cacheName := id.Path["Redis"] - name := id.Path["firewallRules"] - - resp, err := client.Delete(ctx, resourceGroup, cacheName, name) - - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing AzureRM delete request of Redis Firewall Rule %q (cache %q / resource group %q): %+v", name, cacheName, resourceGroup, err) - } - } - - return nil -} - -func validateRedisFirewallRuleName(v interface{}, k string) (warnings []string, errors []error) { - value := v.(string) - - if matched := regexp.MustCompile(`^\w+$`).Match([]byte(value)); !matched { - errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and underscores", k)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/redis/resource_arm_redis_linked_server.go b/azurerm/internal/services/redis/resource_arm_redis_linked_server.go deleted file mode 100644 index 97fa6b9ab187..000000000000 --- a/azurerm/internal/services/redis/resource_arm_redis_linked_server.go +++ /dev/null @@ -1,252 +0,0 @@ -package redis - -import ( - "context" - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/redis/mgmt/2018-03-01/redis" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmRedisLinkedServer() *schema.Resource { - return &schema.Resource{ - Create: resourceArmRedisLinkedServerCreate, - Read: resourceArmRedisLinkedServerRead, - Delete: resourceArmRedisLinkedServerDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "target_redis_cache_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "linked_redis_cache_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azure.ValidateResourceID, - }, - - "linked_redis_cache_location": azure.SchemaLocation(), - - "resource_group_name": azure.SchemaResourceGroupName(), - - "server_role": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - string(redis.ReplicationRolePrimary), - string(redis.ReplicationRoleSecondary), - }, true), - DiffSuppressFunc: suppress.CaseDifference, - }, - - "name": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func resourceArmRedisLinkedServerCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.LinkedServerClient - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() - log.Printf("[INFO] preparing arguments for AzureRM Redis Linked Server creation.") - - redisCacheName := d.Get("target_redis_cache_name").(string) - linkedRedisCacheId := d.Get("linked_redis_cache_id").(string) - linkedRedisCacheLocation := d.Get("linked_redis_cache_location").(string) - resourceGroup := d.Get("resource_group_name").(string) - serverRole := redis.ReplicationRole(d.Get("server_role").(string)) - - // The name needs to match the linked_redis_cache_id - id, err := azure.ParseAzureResourceID(linkedRedisCacheId) - if err != nil { - return err - } - name := id.Path["Redis"] - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, redisCacheName, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Redis Linked Server %q (cache %q / resource group %q) ID", name, redisCacheName, resourceGroup) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_redis_linked_server", *existing.ID) - } - } - - parameters := redis.LinkedServerCreateParameters{ - LinkedServerCreateProperties: &redis.LinkedServerCreateProperties{ - LinkedRedisCacheID: utils.String(linkedRedisCacheId), - LinkedRedisCacheLocation: utils.String(linkedRedisCacheLocation), - ServerRole: serverRole, - }, - } - - return resource.Retry(d.Timeout(schema.TimeoutCreate), func() *resource.RetryError { - future, err := client.Create(ctx, resourceGroup, redisCacheName, name, parameters) - if err != nil { - return resource.NonRetryableError(fmt.Errorf("Error issuing for the create of Redis Linked Server %s (resource group %s): %v", name, resourceGroup, err)) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return resource.NonRetryableError(fmt.Errorf("Error waiting for the create of Redis Linked Server %s (resource group %s): %v", name, resourceGroup, err)) - } - - read, err := client.Get(ctx, resourceGroup, redisCacheName, name) - if err != nil { - return resource.RetryableError(fmt.Errorf("Expected instance to be created but was in non existent state, retrying")) - } - if read.ID == nil { - return resource.NonRetryableError(fmt.Errorf("Cannot read Redis Linked Server %q (cache %q / resource group %q) ID", name, redisCacheName, resourceGroup)) - } - - log.Printf("[DEBUG] Waiting for Redis Linked Server (%s) to become available", d.Get("name")) - stateConf := &resource.StateChangeConf{ - Pending: []string{"Linking", "Updating", "Creating", "Syncing"}, - Target: []string{"Succeeded"}, - Refresh: redisLinkedServerStateRefreshFunc(ctx, client, resourceGroup, redisCacheName, name), - MinTimeout: 15 * time.Second, - Timeout: d.Timeout(schema.TimeoutCreate), - } - - if _, err = stateConf.WaitForState(); err != nil { - return resource.NonRetryableError(fmt.Errorf("Error waiting for Redis Linked Server (%s) to become available: %s", d.Get("name"), err)) - } - - d.SetId(*read.ID) - - return resource.NonRetryableError(resourceArmRedisLinkedServerRead(d, meta)) - }) -} - -func resourceArmRedisLinkedServerRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.LinkedServerClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - redisCacheName := id.Path["Redis"] - name := id.Path["linkedServers"] - - resp, err := client.Get(ctx, resourceGroup, redisCacheName, name) - - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[DEBUG] Redis Linked Server %q was not found in Cache %q / Resource Group %q - removing from state", name, redisCacheName, resourceGroup) - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Azure Redis Linked Server %q: %+v", name, err) - } - - d.Set("name", name) - d.Set("target_redis_cache_name", redisCacheName) - d.Set("resource_group_name", resourceGroup) - if props := resp.LinkedServerProperties; props != nil { - d.Set("linked_redis_cache_id", props.LinkedRedisCacheID) - d.Set("linked_redis_cache_location", props.LinkedRedisCacheLocation) - d.Set("server_role", string(props.ServerRole)) - } - - return nil -} - -func resourceArmRedisLinkedServerDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Redis.LinkedServerClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - redisCacheName := id.Path["Redis"] - name := id.Path["linkedServers"] - - resp, err := client.Delete(ctx, resourceGroup, redisCacheName, name) - if err != nil { - if !utils.ResponseWasNotFound(resp) { - return fmt.Errorf("Error issuing AzureRM delete request of Redis Linked Server %q (cache %q / resource group %q): %+v", name, redisCacheName, resourceGroup, err) - } - } - - // No LinkedServerDeleteFuture - // https://github.com/Azure/azure-sdk-for-go/issues/12159 - log.Printf("[DEBUG] Waiting for Redis Linked Server %q (cache %q / Resource Group %q) to be eventually deleted", name, redisCacheName, resourceGroup) - stateConf := &resource.StateChangeConf{ - Pending: []string{"Exists"}, - Target: []string{"NotFound"}, - Refresh: redisLinkedServerDeleteStateRefreshFunc(ctx, client, resourceGroup, redisCacheName, name), - MinTimeout: 10 * time.Second, - ContinuousTargetOccurence: 10, - Timeout: d.Timeout(schema.TimeoutDelete), - } - - if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("failed to wait for Redis Linked Server %q (cache %q / resource group %q) to be deleted: %+v", name, redisCacheName, resourceGroup, err) - } - - return nil -} - -func redisLinkedServerStateRefreshFunc(ctx context.Context, client *redis.LinkedServerClient, resourceGroupName string, redisCacheName string, name string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - res, err := client.Get(ctx, resourceGroupName, redisCacheName, name) - if err != nil { - return nil, "", fmt.Errorf("Error issuing read request in redisStateRefreshFunc to Azure ARM for Redis Linked Server Instance '%s' (RG: '%s'): %s", name, resourceGroupName, err) - } - - return res, *res.LinkedServerProperties.ProvisioningState, nil - } -} - -func redisLinkedServerDeleteStateRefreshFunc(ctx context.Context, client *redis.LinkedServerClient, resourceGroupName string, redisCacheName string, name string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - res, err := client.Get(ctx, resourceGroupName, redisCacheName, name) - if err != nil { - if utils.ResponseWasNotFound(res.Response) { - return "NotFound", "NotFound", nil - } - - return nil, "", fmt.Errorf("failed to poll to check if the Linked Server has been deleted: %+v", err) - } - - return res, "Exists", nil - } -} diff --git a/azurerm/internal/services/redis/resourceids.go b/azurerm/internal/services/redis/resourceids.go new file mode 100644 index 000000000000..8a5d09de6662 --- /dev/null +++ b/azurerm/internal/services/redis/resourceids.go @@ -0,0 +1,7 @@ +package redis + +// TODO: fix the generator bug making this `Redi` rather than `Redis` + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cache -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=FirewallRule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/firewallRules/firewallRule1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=LinkedServer -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/linkedServers/linkedServer1 diff --git a/azurerm/internal/services/redis/tests/data_source_redis_cache_test.go b/azurerm/internal/services/redis/tests/data_source_redis_cache_test.go deleted file mode 100644 index b2d763652a32..000000000000 --- a/azurerm/internal/services/redis/tests/data_source_redis_cache_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package tests - -import ( - "fmt" - - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" -) - -func TestAccDataSourceAzureRMRedisCache_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "data.azurerm_redis_cache", "test") - - name := fmt.Sprintf("acctestRedis-%d", data.RandomInteger) - resourceGroupName := fmt.Sprintf("acctestRG-%d", data.RandomInteger) - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDataSourceAzureRMRedisCache_standardWithDataSource(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "name", name), - resource.TestCheckResourceAttr(data.ResourceName, "resource_group_name", resourceGroupName), - resource.TestCheckResourceAttr(data.ResourceName, "ssl_port", "6380"), - resource.TestCheckResourceAttr(data.ResourceName, "tags.environment", "production"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - ), - }, - }, - }) -} - -func testAccDataSourceAzureRMRedisCache_standardWithDataSource(data acceptance.TestData) string { - config := testAccAzureRMRedisCache_standard(data) - return fmt.Sprintf(` -%s - -data "azurerm_redis_cache" "test" { - name = azurerm_redis_cache.test.name - resource_group_name = azurerm_redis_cache.test.resource_group_name -} -`, config) -} diff --git a/azurerm/internal/services/redis/tests/resource_arm_redis_cache_test.go b/azurerm/internal/services/redis/tests/resource_arm_redis_cache_test.go deleted file mode 100644 index fb8c4df2f126..000000000000 --- a/azurerm/internal/services/redis/tests/resource_arm_redis_cache_test.go +++ /dev/null @@ -1,1084 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMRedisCache_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_basic(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "minimum_tls_version"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - testCheckSSLInConnectionString(data.ResourceName, "primary_connection_string", true), - testCheckSSLInConnectionString(data.ResourceName, "secondary_connection_string", true), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_withoutSSL(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_basic(data, false), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - testCheckSSLInConnectionString(data.ResourceName, "primary_connection_string", false), - testCheckSSLInConnectionString(data.ResourceName, "secondary_connection_string", false), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_basic(data, true), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMRedisCache_requiresImport), - }, - }) -} - -func TestAccAzureRMRedisCache_standard(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_standard(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_premium(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_premium(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_premiumSharded(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_premiumSharded(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_premiumShardedScaling(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_premiumSharded(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMRedisCache_premiumShardedScaled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMRedisCache_NonStandardCasing(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheNonStandardCasing(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMRedisCacheNonStandardCasing(data), - PlanOnly: true, - ExpectNonEmptyPlan: false, - }, - }, - }) -} - -func TestAccAzureRMRedisCache_BackupDisabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheBackupDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMRedisCache_BackupEnabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheBackupEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - // `redis_configuration.0.rdb_storage_connection_string` is returned as: - // "...;AccountKey=[key hidden]" rather than "...;AccountKey=fsjfvjnfnf" - // TODO: remove this once the Bug's been fixed: - // https://github.com/Azure/azure-rest-api-specs/issues/3037 - ExpectNonEmptyPlan: true, - }, - data.ImportStep("redis_configuration.0.rdb_storage_connection_string"), - }, - }) -} - -func TestAccAzureRMRedisCache_BackupEnabledDisabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheBackupEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - // `redis_configuration.0.rdb_storage_connection_string` is returned as: - // "...;AccountKey=[key hidden]" rather than "...;AccountKey=fsjfvjnfnf" - // TODO: remove this once the Bug's been fixed: - // https://github.com/Azure/azure-rest-api-specs/issues/3037 - ExpectNonEmptyPlan: true, - }, - { - Config: testAccAzureRMRedisCacheBackupDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - // `redis_configuration.0.rdb_storage_connection_string` is returned as: - // "...;AccountKey=[key hidden]" rather than "...;AccountKey=fsjfvjnfnf" - // TODO: remove this once the Bug's been fixed: - // https://github.com/Azure/azure-rest-api-specs/issues/3037 - ExpectNonEmptyPlan: true, - }, - }, - }) -} - -func TestAccAzureRMRedisCache_AOFBackupEnabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheAOFBackupEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - data.ImportStep("redis_configuration.0.aof_storage_connection_string_0", - "redis_configuration.0.aof_storage_connection_string_1"), - }, - }) -} - -func TestAccAzureRMRedisCache_AOFBackupEnabledDisabled(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheAOFBackupEnabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - { - Config: testAccAzureRMRedisCacheAOFBackupDisabled(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} -func TestAccAzureRMRedisCache_PatchSchedule(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCachePatchSchedule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_PatchScheduleUpdated(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCachePatchSchedule(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMRedisCache_premium(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMRedisCache_InternalSubnet(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_internalSubnet(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_InternalSubnetStaticIP(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_internalSubnetStaticIP(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_InternalSubnet_withZone(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCache_internalSubnet_withZone(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "zones.#", "1"), - resource.TestCheckResourceAttr(data.ResourceName, "zones.0", "1"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisCache_SubscribeAllEvents(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheSubscribeAllEvents(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - ), - }, - }, - }) -} - -func TestAccAzureRMRedisCache_WithoutAuth(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_cache", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisCacheDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisCacheWithoutAuth(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisCacheExists(data.ResourceName), - resource.TestCheckResourceAttr(data.ResourceName, "redis_configuration.0.enable_authentication", "false"), - ), - }, - }, - }) -} - -func testCheckAzureRMRedisCacheExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Redis.Client - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - redisName := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Redis Instance: %s", redisName) - } - - resp, err := conn.Get(ctx, resourceGroup, redisName) - if err != nil { - return fmt.Errorf("Bad: Get on redis.Client: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Redis Instance %q (resource group: %q) does not exist", redisName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMRedisCacheDestroy(s *terraform.State) error { - conn := acceptance.AzureProvider.Meta().(*clients.Client).Redis.Client - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_redis_cache" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := conn.Get(ctx, resourceGroup, name) - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Redis Instance still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMRedisCache_basic(data acceptance.TestData, requireSSL bool) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "C" - sku_name = "Basic" - enable_non_ssl_port = %t - minimum_tls_version = "1.2" - - redis_configuration { - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, !requireSSL) -} - -func testAccAzureRMRedisCache_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMRedisCache_basic(data, true) - return fmt.Sprintf(` -%s - -resource "azurerm_redis_cache" "import" { - name = azurerm_redis_cache.test.name - location = azurerm_redis_cache.test.location - resource_group_name = azurerm_redis_cache.test.resource_group_name - capacity = azurerm_redis_cache.test.capacity - family = azurerm_redis_cache.test.family - sku_name = azurerm_redis_cache.test.sku_name - enable_non_ssl_port = azurerm_redis_cache.test.enable_non_ssl_port - - redis_configuration { - } -} -`, template) -} - -func testAccAzureRMRedisCache_standard(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "C" - sku_name = "Standard" - enable_non_ssl_port = false - redis_configuration { - } - - tags = { - environment = "production" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCache_premium(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - maxmemory_reserved = 2 - maxfragmentationmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCache_premiumSharded(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = true - shard_count = 3 - - redis_configuration { - maxmemory_reserved = 2 - maxfragmentationmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCache_premiumShardedScaled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 2 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = true - shard_count = 3 - - redis_configuration { - maxmemory_reserved = 2 - maxfragmentationmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCacheNonStandardCasing(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "c" - sku_name = "basic" - enable_non_ssl_port = false - redis_configuration { - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCacheBackupDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 3 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - rdb_backup_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCacheBackupEnabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 3 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - rdb_backup_enabled = true - rdb_backup_frequency = 60 - rdb_backup_max_snapshot_count = 1 - rdb_storage_connection_string = "DefaultEndpointsProtocol=https;BlobEndpoint=${azurerm_storage_account.test.primary_blob_endpoint};AccountName=${azurerm_storage_account.test.name};AccountKey=${azurerm_storage_account.test.primary_access_key}" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMRedisCacheAOFBackupDisabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 3 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - aof_backup_enabled = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCacheAOFBackupEnabled(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - aof_backup_enabled = true - aof_storage_connection_string_0 = "DefaultEndpointsProtocol=https;BlobEndpoint=${azurerm_storage_account.test.primary_blob_endpoint};AccountName=${azurerm_storage_account.test.name};AccountKey=${azurerm_storage_account.test.primary_access_key}" - aof_storage_connection_string_1 = "DefaultEndpointsProtocol=https;BlobEndpoint=${azurerm_storage_account.test.primary_blob_endpoint};AccountName=${azurerm_storage_account.test.name};AccountKey=${azurerm_storage_account.test.secondary_access_key}" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMRedisCachePatchSchedule(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - maxmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } - - patch_schedule { - day_of_week = "Tuesday" - start_hour_utc = 8 - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRedisCacheSubscribeAllEvents(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_storage_account" "test" { - name = "unlikely23exst2acct%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "GRS" - - tags = { - environment = "staging" - } -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 3 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - notify_keyspace_events = "KAE" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger) -} - -func testAccAzureRMRedisCache_internalSubnet(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestnw-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "testsubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - subnet_id = azurerm_subnet.test.id - redis_configuration { - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRedisCache_internalSubnetStaticIP(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestnw-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "testsubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - subnet_id = azurerm_subnet.test.id - private_static_ip_address = "10.0.1.20" - redis_configuration { - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRedisCache_internalSubnet_withZone(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestnw-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "testsubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - subnet_id = azurerm_subnet.test.id - redis_configuration { - } - zones = ["1"] -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRedisCacheWithoutAuth(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_virtual_network" "test" { - name = "acctestnw-%d" - address_space = ["10.0.0.0/16"] - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name -} - -resource "azurerm_subnet" "test" { - name = "testsubnet" - resource_group_name = azurerm_resource_group.test.name - virtual_network_name = azurerm_virtual_network.test.name - address_prefix = "10.0.1.0/24" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - subnet_id = azurerm_subnet.test.id - redis_configuration { - enable_authentication = false - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testCheckSSLInConnectionString(resourceName string, propertyName string, requireSSL bool) resource.TestCheckFunc { - return func(s *terraform.State) error { - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - connectionString := rs.Primary.Attributes[propertyName] - if strings.Contains(connectionString, fmt.Sprintf("ssl=%t", requireSSL)) { - return nil - } - if strings.Contains(connectionString, fmt.Sprintf("ssl=%t", !requireSSL)) { - return fmt.Errorf("Bad: wrong SSL setting in connection string: %s", propertyName) - } - - return fmt.Errorf("Bad: missing SSL setting in connection string: %s", propertyName) - } -} diff --git a/azurerm/internal/services/redis/tests/resource_arm_redis_firewall_rule_test.go b/azurerm/internal/services/redis/tests/resource_arm_redis_firewall_rule_test.go deleted file mode 100644 index 497f8cf0668d..000000000000 --- a/azurerm/internal/services/redis/tests/resource_arm_redis_firewall_rule_test.go +++ /dev/null @@ -1,255 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMRedisFirewallRule_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisFirewallRuleExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisFirewallRule_multi(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") - ruleTwo := "azurerm_redis_firewall_rule.double" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisFirewallRule_multi(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisFirewallRuleExists(data.ResourceName), - testCheckAzureRMRedisFirewallRuleExists(ruleTwo), - ), - }, - data.ImportStep(), - { - ResourceName: ruleTwo, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccAzureRMRedisFirewallRule_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisFirewallRuleExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMRedisFirewallRule_requiresImport), - }, - }) -} - -func TestAccAzureRMRedisFirewallRule_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_firewall_rule", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisFirewallRuleDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisFirewallRule_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisFirewallRuleExists(data.ResourceName), - ), - }, - { - Config: testAccAzureRMRedisFirewallRule_update(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisFirewallRuleExists(data.ResourceName), - ), - }, - }, - }) -} - -func testCheckAzureRMRedisFirewallRuleExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Redis.FirewallRulesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - name := rs.Primary.Attributes["name"] - cacheName := rs.Primary.Attributes["redis_cache_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, cacheName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Firewall Rule %q (cache %q resource group: %q) does not exist", name, cacheName, resourceGroup) - } - return fmt.Errorf("Bad: Get on redis.FirewallRulesClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMRedisFirewallRuleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Resource.GroupsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_redis_firewall_rule" { - continue - } - - resourceGroup := rs.Primary.ID - - resp, err := client.Get(ctx, resourceGroup) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Firewall Rule still exists:\n%#v", resp.Properties) - } - } - - return nil -} - -func testAccAzureRMRedisFirewallRule_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - maxmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} - -resource "azurerm_redis_firewall_rule" "test" { - name = "fwrule%d" - redis_cache_name = azurerm_redis_cache.test.name - resource_group_name = azurerm_resource_group.test.name - start_ip = "1.2.3.4" - end_ip = "2.3.4.5" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRedisFirewallRule_multi(data acceptance.TestData) string { - template := testAccAzureRMRedisFirewallRule_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_redis_firewall_rule" "double" { - name = "fwruletwo%d" - redis_cache_name = azurerm_redis_cache.test.name - resource_group_name = azurerm_resource_group.test.name - start_ip = "4.5.6.7" - end_ip = "8.9.0.1" -} -`, template, data.RandomInteger) -} - -func testAccAzureRMRedisFirewallRule_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMRedisFirewallRule_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_redis_firewall_rule" "import" { - name = azurerm_redis_firewall_rule.test.name - redis_cache_name = azurerm_redis_firewall_rule.test.redis_cache_name - resource_group_name = azurerm_redis_firewall_rule.test.resource_group_name - start_ip = azurerm_redis_firewall_rule.test.start_ip - end_ip = azurerm_redis_firewall_rule.test.end_ip -} -`, template) -} - -func testAccAzureRMRedisFirewallRule_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "test" { - name = "acctestRedis-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - maxmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} - -resource "azurerm_redis_firewall_rule" "test" { - name = "fwrule%d" - redis_cache_name = azurerm_redis_cache.test.name - resource_group_name = azurerm_resource_group.test.name - start_ip = "2.3.4.5" - end_ip = "6.7.8.9" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} diff --git a/azurerm/internal/services/redis/tests/resource_arm_redis_linked_server_test.go b/azurerm/internal/services/redis/tests/resource_arm_redis_linked_server_test.go deleted file mode 100644 index 55c76402c184..000000000000 --- a/azurerm/internal/services/redis/tests/resource_arm_redis_linked_server_test.go +++ /dev/null @@ -1,177 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func TestAccAzureRMRedisLinkedServer_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_linked_server", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisLinkedServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisLinkedServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisLinkedServerExists(data.ResourceName), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRedisLinkedServer_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_redis_linked_server", "test") - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRedisLinkedServerDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRedisLinkedServer_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRedisLinkedServerExists(data.ResourceName), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMRedisLinkedServer_requiresImport), - }, - }) -} - -func testCheckAzureRMRedisLinkedServerExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Redis.LinkedServerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %q", resourceName) - } - - name := rs.Primary.Attributes["name"] - cacheName := rs.Primary.Attributes["target_redis_cache_name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, cacheName, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Linked Server %q (cache %q resource group: %q) does not exist", name, cacheName, resourceGroup) - } - return fmt.Errorf("Bad: Get on redis.LinkedServersClient: %+v", err) - } - - return nil - } -} - -func testCheckAzureRMRedisLinkedServerDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Redis.LinkedServerClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_redis_linked_server" { - continue - } - - redisCacheName := rs.Primary.Attributes["target_redis_cache_name"] - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - - resp, err := client.Get(ctx, resourceGroup, redisCacheName, name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Linked Server still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMRedisLinkedServer_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "pri" { - name = "acctestRG-redis-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "pri" { - name = "acctestRedispri%d" - location = azurerm_resource_group.pri.location - resource_group_name = azurerm_resource_group.pri.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - maxmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} - -resource "azurerm_resource_group" "sec" { - name = "accsecRG-%d" - location = "%s" -} - -resource "azurerm_redis_cache" "sec" { - name = "acctestRedissec%d" - location = azurerm_resource_group.sec.location - resource_group_name = azurerm_resource_group.sec.name - capacity = 1 - family = "P" - sku_name = "Premium" - enable_non_ssl_port = false - - redis_configuration { - maxmemory_reserved = 2 - maxmemory_delta = 2 - maxmemory_policy = "allkeys-lru" - } -} - -resource "azurerm_redis_linked_server" "test" { - target_redis_cache_name = azurerm_redis_cache.pri.name - resource_group_name = azurerm_redis_cache.pri.resource_group_name - linked_redis_cache_id = azurerm_redis_cache.sec.id - linked_redis_cache_location = azurerm_redis_cache.sec.location - server_role = "Secondary" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, - data.RandomInteger, data.Locations.Secondary, data.RandomInteger) -} - -func testAccAzureRMRedisLinkedServer_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMRedisLinkedServer_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_redis_linked_server" "import" { - target_redis_cache_name = azurerm_redis_linked_server.test.target_redis_cache_name - resource_group_name = azurerm_redis_linked_server.test.resource_group_name - linked_redis_cache_id = azurerm_redis_linked_server.test.linked_redis_cache_id - linked_redis_cache_location = azurerm_redis_linked_server.test.linked_redis_cache_location - server_role = azurerm_redis_linked_server.test.server_role -} -`, template) -} diff --git a/azurerm/internal/services/redis/validate/cache_backup_frequency.go b/azurerm/internal/services/redis/validate/cache_backup_frequency.go new file mode 100644 index 000000000000..cd1d378e4d4b --- /dev/null +++ b/azurerm/internal/services/redis/validate/cache_backup_frequency.go @@ -0,0 +1,21 @@ +package validate + +import "fmt" + +func CacheBackupFrequency(v interface{}, _ string) (warnings []string, errors []error) { + value := v.(int) + families := map[int]bool{ + 15: true, + 30: true, + 60: true, + 360: true, + 720: true, + 1440: true, + } + + if !families[value] { + errors = append(errors, fmt.Errorf("Redis Backup Frequency can only be '15', '30', '60', '360', '720' or '1440'")) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/redis/validate/cache_backup_frequency_test.go b/azurerm/internal/services/redis/validate/cache_backup_frequency_test.go new file mode 100644 index 000000000000..8b98fe22bcd4 --- /dev/null +++ b/azurerm/internal/services/redis/validate/cache_backup_frequency_test.go @@ -0,0 +1,29 @@ +package validate + +import "testing" + +func TestCacheBackupFrequency_validation(t *testing.T) { + cases := []struct { + Value int + ErrCount int + }{ + {Value: 1, ErrCount: 1}, + {Value: 15, ErrCount: 0}, + {Value: 30, ErrCount: 0}, + {Value: 45, ErrCount: 1}, + {Value: 60, ErrCount: 0}, + {Value: 120, ErrCount: 1}, + {Value: 240, ErrCount: 1}, + {Value: 360, ErrCount: 0}, + {Value: 720, ErrCount: 0}, + {Value: 1440, ErrCount: 0}, + } + + for _, tc := range cases { + _, errors := CacheBackupFrequency(tc.Value, "azurerm_redis_cache") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the AzureRM Redis Cache Backup Frequency to trigger a validation error for '%d'", tc.Value) + } + } +} diff --git a/azurerm/internal/services/redis/validate/cache_family.go b/azurerm/internal/services/redis/validate/cache_family.go new file mode 100644 index 000000000000..63e6ebb2b7eb --- /dev/null +++ b/azurerm/internal/services/redis/validate/cache_family.go @@ -0,0 +1,19 @@ +package validate + +import ( + "fmt" + "strings" +) + +func CacheFamily(v interface{}, _ string) (warnings []string, errors []error) { + value := strings.ToLower(v.(string)) + families := map[string]bool{ + "c": true, + "p": true, + } + + if !families[value] { + errors = append(errors, fmt.Errorf("Redis Family can only be C or P")) + } + return warnings, errors +} diff --git a/azurerm/internal/services/redis/validate/cache_family_test.go b/azurerm/internal/services/redis/validate/cache_family_test.go new file mode 100644 index 000000000000..88c4202bfaa5 --- /dev/null +++ b/azurerm/internal/services/redis/validate/cache_family_test.go @@ -0,0 +1,47 @@ +package validate + +import "testing" + +func TestCacheFamily_validation(t *testing.T) { + cases := []struct { + Value string + ErrCount int + }{ + { + Value: "C", + ErrCount: 0, + }, + { + Value: "P", + ErrCount: 0, + }, + { + Value: "c", + ErrCount: 0, + }, + { + Value: "p", + ErrCount: 0, + }, + { + Value: "a", + ErrCount: 1, + }, + { + Value: "b", + ErrCount: 1, + }, + { + Value: "D", + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := CacheFamily(tc.Value, "azurerm_redis_cache") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Azure RM Redis Cache Family to trigger a validation error") + } + } +} diff --git a/azurerm/internal/services/redis/validate/cache_id.go b/azurerm/internal/services/redis/validate/cache_id.go new file mode 100644 index 000000000000..f6acd1415a56 --- /dev/null +++ b/azurerm/internal/services/redis/validate/cache_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" +) + +func CacheID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.CacheID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/redis/validate/cache_id_test.go b/azurerm/internal/services/redis/validate/cache_id_test.go new file mode 100644 index 000000000000..5564986ca3e1 --- /dev/null +++ b/azurerm/internal/services/redis/validate/cache_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestCacheID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/", + Valid: false, + }, + + { + // missing value for RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CACHE/REDIS/REDIS1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := CacheID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/redis/validate/firewall_rule_id.go b/azurerm/internal/services/redis/validate/firewall_rule_id.go new file mode 100644 index 000000000000..0856904f8c97 --- /dev/null +++ b/azurerm/internal/services/redis/validate/firewall_rule_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" +) + +func FirewallRuleID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.FirewallRuleID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/redis/validate/firewall_rule_id_test.go b/azurerm/internal/services/redis/validate/firewall_rule_id_test.go new file mode 100644 index 000000000000..197b128ad66d --- /dev/null +++ b/azurerm/internal/services/redis/validate/firewall_rule_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestFirewallRuleID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/", + Valid: false, + }, + + { + // missing value for RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/firewallRules/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/firewallRules/firewallRule1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CACHE/REDIS/REDIS1/FIREWALLRULES/FIREWALLRULE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := FirewallRuleID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/redis/validate/firewall_rule_name.go b/azurerm/internal/services/redis/validate/firewall_rule_name.go new file mode 100644 index 000000000000..cf1868adeacc --- /dev/null +++ b/azurerm/internal/services/redis/validate/firewall_rule_name.go @@ -0,0 +1,16 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func FirewallRuleName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + + if matched := regexp.MustCompile(`^\w+$`).Match([]byte(value)); !matched { + errors = append(errors, fmt.Errorf("%q may only contain alphanumeric characters and underscores", k)) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/redis/validate/firewall_rule_name_test.go b/azurerm/internal/services/redis/validate/firewall_rule_name_test.go new file mode 100644 index 000000000000..b54f71cf6a89 --- /dev/null +++ b/azurerm/internal/services/redis/validate/firewall_rule_name_test.go @@ -0,0 +1,45 @@ +package validate + +import ( + "testing" +) + +func TestRedisFirewallRuleName_validation(t *testing.T) { + cases := []struct { + Value string + ErrCount int + }{ + { + Value: "ab", + ErrCount: 0, + }, + { + Value: "abc", + ErrCount: 0, + }, + { + Value: "webapp1", + ErrCount: 0, + }, + { + Value: "hello-world", + ErrCount: 1, + }, + { + Value: "hello_world", + ErrCount: 0, + }, + { + Value: "helloworld21!", + ErrCount: 1, + }, + } + + for _, tc := range cases { + _, errors := FirewallRuleName(tc.Value, "azurerm_redis_firewall_rule") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Redis Firewall Rule Name to trigger a validation error for '%s'", tc.Value) + } + } +} diff --git a/azurerm/internal/services/redis/validate/linked_server_id.go b/azurerm/internal/services/redis/validate/linked_server_id.go new file mode 100644 index 000000000000..c3bd5952cec2 --- /dev/null +++ b/azurerm/internal/services/redis/validate/linked_server_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/redis/parse" +) + +func LinkedServerID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.LinkedServerID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/redis/validate/linked_server_id_test.go b/azurerm/internal/services/redis/validate/linked_server_id_test.go new file mode 100644 index 000000000000..1a774b4f80e8 --- /dev/null +++ b/azurerm/internal/services/redis/validate/linked_server_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestLinkedServerID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/", + Valid: false, + }, + + { + // missing value for RediName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/linkedServers/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Cache/Redis/redis1/linkedServers/linkedServer1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.CACHE/REDIS/REDIS1/LINKEDSERVERS/LINKEDSERVER1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := LinkedServerID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/redis/validate/max_memory_policy.go b/azurerm/internal/services/redis/validate/max_memory_policy.go new file mode 100644 index 000000000000..91bb9ccd40b1 --- /dev/null +++ b/azurerm/internal/services/redis/validate/max_memory_policy.go @@ -0,0 +1,27 @@ +package validate + +import ( + "fmt" + "strings" +) + +func MaxMemoryPolicy(v interface{}, _ string) (warnings []string, errors []error) { + // TODO: in time this can be replaced with a generic function, moving for now + value := strings.ToLower(v.(string)) + families := map[string]bool{ + "noeviction": true, + "allkeys-lru": true, + "volatile-lru": true, + "allkeys-random": true, + "volatile-random": true, + "volatile-ttl": true, + "allkeys-lfu": true, + "volatile-lfu": true, + } + + if !families[value] { + errors = append(errors, fmt.Errorf("Redis Max Memory Policy can only be 'noeviction' / 'allkeys-lru' / 'volatile-lru' / 'allkeys-random' / 'volatile-random' / 'volatile-ttl' / 'allkeys-lfu' / 'volatile-lfu'")) + } + + return warnings, errors +} diff --git a/azurerm/internal/services/redis/validate/max_memory_policy_test.go b/azurerm/internal/services/redis/validate/max_memory_policy_test.go new file mode 100644 index 000000000000..9259bc1aa993 --- /dev/null +++ b/azurerm/internal/services/redis/validate/max_memory_policy_test.go @@ -0,0 +1,28 @@ +package validate + +import "testing" + +func TestMaxMemoryPolicy_validation(t *testing.T) { + cases := []struct { + Value string + ErrCount int + }{ + {Value: "noeviction", ErrCount: 0}, + {Value: "allkeys-lru", ErrCount: 0}, + {Value: "volatile-lru", ErrCount: 0}, + {Value: "allkeys-random", ErrCount: 0}, + {Value: "volatile-random", ErrCount: 0}, + {Value: "volatile-ttl", ErrCount: 0}, + {Value: "allkeys-lfu", ErrCount: 0}, + {Value: "volatile-lfu", ErrCount: 0}, + {Value: "something-else", ErrCount: 1}, + } + + for _, tc := range cases { + _, errors := MaxMemoryPolicy(tc.Value, "azurerm_redis_cache") + + if len(errors) != tc.ErrCount { + t.Fatalf("Expected the Azure RM Redis Cache Max Memory Policy to trigger a validation error") + } + } +} diff --git a/azurerm/internal/services/redis/validation_test.go b/azurerm/internal/services/redis/validation_test.go deleted file mode 100644 index f11b8caf08ac..000000000000 --- a/azurerm/internal/services/redis/validation_test.go +++ /dev/null @@ -1,138 +0,0 @@ -package redis - -import "testing" - -func TestAccAzureRMRedisCacheFamily_validation(t *testing.T) { - cases := []struct { - Value string - ErrCount int - }{ - { - Value: "C", - ErrCount: 0, - }, - { - Value: "P", - ErrCount: 0, - }, - { - Value: "c", - ErrCount: 0, - }, - { - Value: "p", - ErrCount: 0, - }, - { - Value: "a", - ErrCount: 1, - }, - { - Value: "b", - ErrCount: 1, - }, - { - Value: "D", - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := validateRedisFamily(tc.Value, "azurerm_redis_cache") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Azure RM Redis Cache Family to trigger a validation error") - } - } -} - -func TestAccAzureRMRedisCacheMaxMemoryPolicy_validation(t *testing.T) { - cases := []struct { - Value string - ErrCount int - }{ - {Value: "noeviction", ErrCount: 0}, - {Value: "allkeys-lru", ErrCount: 0}, - {Value: "volatile-lru", ErrCount: 0}, - {Value: "allkeys-random", ErrCount: 0}, - {Value: "volatile-random", ErrCount: 0}, - {Value: "volatile-ttl", ErrCount: 0}, - {Value: "allkeys-lfu", ErrCount: 0}, - {Value: "volatile-lfu", ErrCount: 0}, - {Value: "something-else", ErrCount: 1}, - } - - for _, tc := range cases { - _, errors := validateRedisMaxMemoryPolicy(tc.Value, "azurerm_redis_cache") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Azure RM Redis Cache Max Memory Policy to trigger a validation error") - } - } -} - -func TestAccAzureRMRedisCacheBackupFrequency_validation(t *testing.T) { - cases := []struct { - Value int - ErrCount int - }{ - {Value: 1, ErrCount: 1}, - {Value: 15, ErrCount: 0}, - {Value: 30, ErrCount: 0}, - {Value: 45, ErrCount: 1}, - {Value: 60, ErrCount: 0}, - {Value: 120, ErrCount: 1}, - {Value: 240, ErrCount: 1}, - {Value: 360, ErrCount: 0}, - {Value: 720, ErrCount: 0}, - {Value: 1440, ErrCount: 0}, - } - - for _, tc := range cases { - _, errors := validateRedisBackupFrequency(tc.Value, "azurerm_redis_cache") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the AzureRM Redis Cache Backup Frequency to trigger a validation error for '%d'", tc.Value) - } - } -} - -func TestAzureRMRedisFirewallRuleName_validation(t *testing.T) { - cases := []struct { - Value string - ErrCount int - }{ - { - Value: "ab", - ErrCount: 0, - }, - { - Value: "abc", - ErrCount: 0, - }, - { - Value: "webapp1", - ErrCount: 0, - }, - { - Value: "hello-world", - ErrCount: 1, - }, - { - Value: "hello_world", - ErrCount: 0, - }, - { - Value: "helloworld21!", - ErrCount: 1, - }, - } - - for _, tc := range cases { - _, errors := validateRedisFirewallRuleName(tc.Value, "azurerm_redis_firewall_rule") - - if len(errors) != tc.ErrCount { - t.Fatalf("Expected the Redis Firewall Rule Name to trigger a validation error for '%s'", tc.Value) - } - } -} diff --git a/azurerm/internal/services/relay/hybrid_connection.go b/azurerm/internal/services/relay/hybrid_connection.go deleted file mode 100644 index ed3f4aced222..000000000000 --- a/azurerm/internal/services/relay/hybrid_connection.go +++ /dev/null @@ -1,51 +0,0 @@ -package relay - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type HybridConnectionResourceID struct { - ResourceGroup string - Name string - NamespaceName string -} - -func ParseHybridConnectionID(input string) (*HybridConnectionResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Hybrid Connection ID %q: %+v", input, err) - } - hybridConnection := HybridConnectionResourceID{ - ResourceGroup: id.ResourceGroup, - } - - if hybridConnection.Name, err = id.PopSegment("hybridConnections"); err != nil { - return nil, err - } - - if hybridConnection.NamespaceName, err = id.PopSegment("namespaces"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &hybridConnection, nil -} - -// ValidateHybridConnectionID validates that the specified ID is a valid Relay Hybrid Connection ID -func ValidateHybridConnectionID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - } - - if _, err := ParseHybridConnectionID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", v, err)) - } - - return warnings, errors -} diff --git a/azurerm/internal/services/relay/hybrid_connection_test.go b/azurerm/internal/services/relay/hybrid_connection_test.go deleted file mode 100644 index 16404cf95cec..000000000000 --- a/azurerm/internal/services/relay/hybrid_connection_test.go +++ /dev/null @@ -1,110 +0,0 @@ -package relay - -import ( - "testing" -) - -func TestParseHybridConnectionID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *HybridConnectionResourceID - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing namespace value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/providers/Microsoft.Relay/namespaces/", - Expected: nil, - }, - { - Name: "App Service Hybrid Connection Resource ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Relay/namespaces/relay1/hybridConnections/hconn1", - Expected: &HybridConnectionResourceID{ - ResourceGroup: "mygroup1", - Name: "hconn1", - NamespaceName: "relay1", - }, - }, - } - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ParseHybridConnectionID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestValidateHybridConnectionID(t *testing.T) { - cases := []struct { - ID string - Valid bool - }{ - { - ID: "", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/providers/Microsoft.Relay/namespaces/", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Relay/namespaces/relay1/hybridConnections/hconn1", - Valid: true, - }, - } - - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %q", tc.ID) - _, errors := ValidateHybridConnectionID(tc.ID, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/azurerm/internal/services/relay/parse/hybrid_connection.go b/azurerm/internal/services/relay/parse/hybrid_connection.go new file mode 100644 index 000000000000..634002756a33 --- /dev/null +++ b/azurerm/internal/services/relay/parse/hybrid_connection.go @@ -0,0 +1,75 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type HybridConnectionId struct { + SubscriptionId string + ResourceGroup string + NamespaceName string + Name string +} + +func NewHybridConnectionID(subscriptionId, resourceGroup, namespaceName, name string) HybridConnectionId { + return HybridConnectionId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + NamespaceName: namespaceName, + Name: name, + } +} + +func (id HybridConnectionId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Namespace Name %q", id.NamespaceName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Hybrid Connection", segmentsStr) +} + +func (id HybridConnectionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Relay/namespaces/%s/hybridConnections/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.NamespaceName, id.Name) +} + +// HybridConnectionID parses a HybridConnection ID into an HybridConnectionId struct +func HybridConnectionID(input string) (*HybridConnectionId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := HybridConnectionId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.NamespaceName, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + if resourceId.Name, err = id.PopSegment("hybridConnections"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/relay/parse/hybrid_connection_test.go b/azurerm/internal/services/relay/parse/hybrid_connection_test.go new file mode 100644 index 000000000000..66f78a1fd946 --- /dev/null +++ b/azurerm/internal/services/relay/parse/hybrid_connection_test.go @@ -0,0 +1,128 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = HybridConnectionId{} + +func TestHybridConnectionIDFormatter(t *testing.T) { + actual := NewHybridConnectionID("12345678-1234-9876-4563-123456789012", "resGroup1", "namespace1", "hybridConnection1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/hybridConnections/hybridConnection1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestHybridConnectionID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *HybridConnectionId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/", + Error: true, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/hybridConnections/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/hybridConnections/hybridConnection1", + Expected: &HybridConnectionId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + NamespaceName: "namespace1", + Name: "hybridConnection1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.RELAY/NAMESPACES/NAMESPACE1/HYBRIDCONNECTIONS/HYBRIDCONNECTION1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := HybridConnectionID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.NamespaceName != v.Expected.NamespaceName { + t.Fatalf("Expected %q but got %q for NamespaceName", v.Expected.NamespaceName, actual.NamespaceName) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/relay/parse/namespace.go b/azurerm/internal/services/relay/parse/namespace.go new file mode 100644 index 000000000000..94f6fa0ad0bb --- /dev/null +++ b/azurerm/internal/services/relay/parse/namespace.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type NamespaceId struct { + SubscriptionId string + ResourceGroup string + Name string +} + +func NewNamespaceID(subscriptionId, resourceGroup, name string) NamespaceId { + return NamespaceId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + Name: name, + } +} + +func (id NamespaceId) String() string { + segments := []string{ + fmt.Sprintf("Name %q", id.Name), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Namespace", segmentsStr) +} + +func (id NamespaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Relay/namespaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) +} + +// NamespaceID parses a Namespace ID into an NamespaceId struct +func NamespaceID(input string) (*NamespaceId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := NamespaceId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.Name, err = id.PopSegment("namespaces"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/relay/parse/namespace_test.go b/azurerm/internal/services/relay/parse/namespace_test.go new file mode 100644 index 000000000000..44c8646af92e --- /dev/null +++ b/azurerm/internal/services/relay/parse/namespace_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = NamespaceId{} + +func TestNamespaceIDFormatter(t *testing.T) { + actual := NewNamespaceID("12345678-1234-9876-4563-123456789012", "resGroup1", "namespace1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestNamespaceID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *NamespaceId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/", + Error: true, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1", + Expected: &NamespaceId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "resGroup1", + Name: "namespace1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.RELAY/NAMESPACES/NAMESPACE1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := NamespaceID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/relay/registration.go b/azurerm/internal/services/relay/registration.go index 1e7c334402e0..0581cf8c7bb4 100644 --- a/azurerm/internal/services/relay/registration.go +++ b/azurerm/internal/services/relay/registration.go @@ -26,7 +26,7 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_relay_hybrid_connection": resourceArmHybridConnection(), - "azurerm_relay_namespace": resourceArmRelayNamespace(), + "azurerm_relay_hybrid_connection": resourceArmRelayHybridConnection(), + "azurerm_relay_namespace": resourceRelayNamespace(), } } diff --git a/azurerm/internal/services/relay/relay_hybrid_connection_resource.go b/azurerm/internal/services/relay/relay_hybrid_connection_resource.go new file mode 100644 index 000000000000..cbe4f0978e4e --- /dev/null +++ b/azurerm/internal/services/relay/relay_hybrid_connection_resource.go @@ -0,0 +1,194 @@ +package relay + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/relay/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + + "github.com/Azure/azure-sdk-for-go/services/relay/mgmt/2017-04-01/relay" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmRelayHybridConnection() *schema.Resource { + return &schema.Resource{ + Create: resourceArmRelayHybridConnectionCreateUpdate, + Read: resourceArmRelayHybridConnectionRead, + Update: resourceArmRelayHybridConnectionCreateUpdate, + Delete: resourceArmRelayHybridConnectionDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.HybridConnectionID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "relay_namespace_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "requires_client_authorization": { + Type: schema.TypeBool, + Default: true, + ForceNew: true, + Optional: true, + }, + "user_metadata": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + } +} + +func resourceArmRelayHybridConnectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Relay.HybridConnectionsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Relay Hybrid Connection creation.") + + resourceId := parse.NewHybridConnectionID(subscriptionId, d.Get("resource_group_name").(string), d.Get("relay_namespace_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.NamespaceName, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Hybrid Connection %q (Namespace %q / Resource Group %q): %+v", resourceId.Name, resourceId.NamespaceName, resourceId.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_relay_hybrid_connection", resourceId.ID()) + } + } + + requireClientAuthorization := d.Get("requires_client_authorization").(bool) + userMetadata := d.Get("user_metadata").(string) + + parameters := relay.HybridConnection{ + HybridConnectionProperties: &relay.HybridConnectionProperties{ + RequiresClientAuthorization: &requireClientAuthorization, + UserMetadata: &userMetadata, + }, + } + + if _, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.NamespaceName, resourceId.Name, parameters); err != nil { + return fmt.Errorf("creating/updating Hybrid Connection %q (Namespace %q Resource Group %q): %+v", resourceId.Name, resourceId.NamespaceName, resourceId.ResourceGroup, err) + } + + d.SetId(resourceId.ID()) + return resourceArmRelayHybridConnectionRead(d, meta) +} + +func resourceArmRelayHybridConnectionRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Relay.HybridConnectionsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.HybridConnectionID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.NamespaceName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Hybrid Connection %q (Namespace %q / Resource Group %q): %+v", id.Name, id.NamespaceName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("relay_namespace_name", id.NamespaceName) + d.Set("resource_group_name", id.ResourceGroup) + + if props := resp.HybridConnectionProperties; props != nil { + d.Set("requires_client_authorization", props.RequiresClientAuthorization) + d.Set("user_metadata", props.UserMetadata) + } + + return nil +} + +func resourceArmRelayHybridConnectionDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Relay.HybridConnectionsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.HybridConnectionID(d.Id()) + if err != nil { + return err + } + + rc, err := client.Delete(ctx, id.ResourceGroup, id.NamespaceName, id.Name) + if err != nil { + if response.WasNotFound(rc.Response) { + return nil + } + + return fmt.Errorf("deleting Hybrid Connection %q (Namespace %q / Resource Group %q): %+v", id.NamespaceName, id.NamespaceName, id.ResourceGroup, err) + } + + log.Printf("[INFO] Waiting for Hybrid Connection %q (Namespace %q / Resource Group %q) to be deleted", id.Name, id.NamespaceName, id.ResourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Pending"}, + Target: []string{"Deleted"}, + Refresh: hybridConnectionDeleteRefreshFunc(ctx, client, id.ResourceGroup, id.NamespaceName, id.Name), + MinTimeout: 15 * time.Second, + Timeout: d.Timeout(schema.TimeoutDelete), + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("waiting for Relay Hybrid Connection %q (Namespace %q Resource Group %q) to be deleted: %+v", id.Name, id.NamespaceName, id.ResourceGroup, err) + } + + return nil +} + +func hybridConnectionDeleteRefreshFunc(ctx context.Context, client *relay.HybridConnectionsClient, resourceGroupName string, relayNamespace string, name string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + res, err := client.Get(ctx, resourceGroupName, relayNamespace, name) + if err != nil { + if utils.ResponseWasNotFound(res.Response) { + return res, "Deleted", nil + } + + return nil, "Error", fmt.Errorf("Error issuing read request in relayNamespaceDeleteRefreshFunc to Relay Hybrid Connection %q (Namespace %q Resource Group %q): %s", name, relayNamespace, resourceGroupName, err) + } + + return res, "Pending", nil + } +} diff --git a/azurerm/internal/services/relay/relay_hybrid_connection_resource_test.go b/azurerm/internal/services/relay/relay_hybrid_connection_resource_test.go new file mode 100644 index 000000000000..3f6be9425280 --- /dev/null +++ b/azurerm/internal/services/relay/relay_hybrid_connection_resource_test.go @@ -0,0 +1,199 @@ +package relay_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/relay/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type RelayHybridConnectionResource struct { +} + +func TestAccRelayHybridConnection_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") + r := RelayHybridConnectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("requires_client_authorization").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRelayHybridConnection_full(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") + r := RelayHybridConnectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.full(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("requires_client_authorization").Exists(), + check.That(data.ResourceName).Key("user_metadata").HasValue("metadatatest"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRelayHybridConnection_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") + r := RelayHybridConnectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("requires_client_authorization").Exists(), + ), + }, + { + Config: r.update(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("requires_client_authorization").HasValue("false"), + check.That(data.ResourceName).Key("user_metadata").HasValue("metadataupdated"), + ), + }, + }) +} + +func TestAccRelayHybridConnection_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") + r := RelayHybridConnectionResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("requires_client_authorization").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (t RelayHybridConnectionResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.HybridConnectionID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Relay.HybridConnectionsClient.Get(ctx, id.ResourceGroup, id.NamespaceName, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Relay Hybrid Connection (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.HybridConnectionProperties != nil), nil +} + +func (RelayHybridConnectionResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_relay_namespace" "test" { + name = "acctestrn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "Standard" +} + +resource "azurerm_relay_hybrid_connection" "test" { + name = "acctestrnhc-%d" + resource_group_name = azurerm_resource_group.test.name + relay_namespace_name = azurerm_relay_namespace.test.name +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (RelayHybridConnectionResource) full(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_relay_namespace" "test" { + name = "acctestrn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "Standard" +} + +resource "azurerm_relay_hybrid_connection" "test" { + name = "acctestrnhc-%d" + resource_group_name = azurerm_resource_group.test.name + relay_namespace_name = azurerm_relay_namespace.test.name + user_metadata = "metadatatest" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (RelayHybridConnectionResource) update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_relay_namespace" "test" { + name = "acctestrn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "Standard" +} + +resource "azurerm_relay_hybrid_connection" "test" { + name = "acctestrnhc-%d" + resource_group_name = azurerm_resource_group.test.name + relay_namespace_name = azurerm_relay_namespace.test.name + requires_client_authorization = false + user_metadata = "metadataupdated" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (r RelayHybridConnectionResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_relay_hybrid_connection" "import" { + name = azurerm_relay_hybrid_connection.test.name + resource_group_name = azurerm_relay_hybrid_connection.test.resource_group_name + relay_namespace_name = azurerm_relay_hybrid_connection.test.relay_namespace_name +} +`, r.basic(data)) +} diff --git a/azurerm/internal/services/relay/relay_namespace.go b/azurerm/internal/services/relay/relay_namespace.go deleted file mode 100644 index 1ee9b81228df..000000000000 --- a/azurerm/internal/services/relay/relay_namespace.go +++ /dev/null @@ -1,47 +0,0 @@ -package relay - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type NamespaceResourceID struct { - ResourceGroup string - Name string -} - -func ParseNamespaceID(input string) (*NamespaceResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Relay Namespace ID %q: %+v", input, err) - } - nameSpace := NamespaceResourceID{ - ResourceGroup: id.ResourceGroup, - } - - if nameSpace.Name, err = id.PopSegment("namespaces"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &nameSpace, nil -} - -// ValidateNamespaceID validates that the specified ID is a valid Relay Namespace ID -func ValidateNamespaceID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - } - - if _, err := ParseNamespaceID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} diff --git a/azurerm/internal/services/relay/relay_namespace_resource.go b/azurerm/internal/services/relay/relay_namespace_resource.go new file mode 100644 index 000000000000..d116e0927784 --- /dev/null +++ b/azurerm/internal/services/relay/relay_namespace_resource.go @@ -0,0 +1,241 @@ +package relay + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/relay/parse" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + + "github.com/Azure/azure-sdk-for-go/services/relay/mgmt/2017-04-01/relay" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceRelayNamespace() *schema.Resource { + return &schema.Resource{ + Create: resourceRelayNamespaceCreateUpdate, + Read: resourceRelayNamespaceRead, + Update: resourceRelayNamespaceCreateUpdate, + Delete: resourceRelayNamespaceDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.NamespaceID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(60 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringLenBetween(6, 50), + }, + + "location": azure.SchemaLocation(), + + "resource_group_name": azure.SchemaResourceGroupName(), + + "sku_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(relay.Standard), + }, false), + }, + + "metric_id": { + Type: schema.TypeString, + Computed: true, + }, + + "primary_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_connection_string": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "primary_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "secondary_key": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + + "tags": tags.Schema(), + }, + } +} + +func resourceRelayNamespaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Relay.NamespacesClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Relay Namespace create/update.") + + location := azure.NormalizeLocation(d.Get("location").(string)) + t := d.Get("tags").(map[string]interface{}) + expandedTags := tags.Expand(t) + + resourceId := parse.NewNamespaceID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.Name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Relay Namespace %q (Resource Group %q): %+v", resourceId.Name, resourceId.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_relay_namespace", resourceId.ID()) + } + } + + parameters := relay.Namespace{ + Location: utils.String(location), + Sku: &relay.Sku{ + Name: utils.String(d.Get("sku_name").(string)), + Tier: relay.SkuTier(d.Get("sku_name").(string)), + }, + NamespaceProperties: &relay.NamespaceProperties{}, + Tags: expandedTags, + } + + future, err := client.CreateOrUpdate(ctx, resourceId.ResourceGroup, resourceId.Name, parameters) + if err != nil { + return fmt.Errorf("creating/updating Relay Namespace %q (Resource Group %q): %+v", resourceId.Name, resourceId.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for create/update of Relay Namespace %q (Resource Group %q) creation: %+v", resourceId.Name, resourceId.ResourceGroup, err) + } + + d.SetId(resourceId.ID()) + return resourceRelayNamespaceRead(d, meta) +} + +func resourceRelayNamespaceRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Relay.NamespacesClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.NamespaceID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Relay Namespace %q (Resource Group %q): %s", id.Name, id.ResourceGroup, err) + } + + keysResp, err := client.ListKeys(ctx, id.ResourceGroup, id.Name, "RootManageSharedAccessKey") + if err != nil { + return fmt.Errorf("listing keys for Relay Namespace %q (Resource Group %q): %s", id.Name, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + + if sku := resp.Sku; sku != nil { + d.Set("sku_name", sku.Name) + } + + if props := resp.NamespaceProperties; props != nil { + d.Set("metric_id", props.MetricID) + } + + d.Set("primary_connection_string", keysResp.PrimaryConnectionString) + d.Set("primary_key", keysResp.PrimaryKey) + d.Set("secondary_connection_string", keysResp.SecondaryConnectionString) + d.Set("secondary_key", keysResp.SecondaryKey) + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceRelayNamespaceDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Relay.NamespacesClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.NamespaceID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + if response.WasNotFound(future.Response()) { + return nil + } + + return fmt.Errorf("") + } + + // we can't make use of the Future here due to a bug where 404 isn't tracked as Successful + log.Printf("[DEBUG] Waiting for Relay Namespace %q (Resource Group %q) to be deleted", id.Name, id.ResourceGroup) + stateConf := &resource.StateChangeConf{ + Pending: []string{"Pending"}, + Target: []string{"Deleted"}, + Refresh: relayNamespaceDeleteRefreshFunc(ctx, client, id.ResourceGroup, id.Name), + MinTimeout: 15 * time.Second, + Timeout: d.Timeout(schema.TimeoutDelete), + } + + if _, err := stateConf.WaitForState(); err != nil { + return fmt.Errorf("Error waiting for Relay Namespace %q (Resource Group %q) to be deleted: %s", id.Name, id.ResourceGroup, err) + } + + return nil +} + +func relayNamespaceDeleteRefreshFunc(ctx context.Context, client *relay.NamespacesClient, resourceGroupName string, name string) resource.StateRefreshFunc { + return func() (interface{}, string, error) { + res, err := client.Get(ctx, resourceGroupName, name) + if err != nil { + if utils.ResponseWasNotFound(res.Response) { + return res, "Deleted", nil + } + + return nil, "Error", fmt.Errorf("Error issuing read request in relayNamespaceDeleteRefreshFunc to Relay Namespace %q (Resource Group %q): %s", name, resourceGroupName, err) + } + + return res, "Pending", nil + } +} diff --git a/azurerm/internal/services/relay/relay_namespace_resource_test.go b/azurerm/internal/services/relay/relay_namespace_resource_test.go new file mode 100644 index 000000000000..fc3f4c7f432b --- /dev/null +++ b/azurerm/internal/services/relay/relay_namespace_resource_test.go @@ -0,0 +1,153 @@ +package relay_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/relay/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type RelayNamespaceResource struct { +} + +func TestAccRelayNamespace_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_namespace", "test") + r := RelayNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("metric_id").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + check.That(data.ResourceName).Key("sku_name").HasValue("Standard"), + ), + }, + data.ImportStep(), + }) +} + +func TestAccRelayNamespace_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_namespace", "test") + r := RelayNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("metric_id").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccRelayNamespace_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_relay_namespace", "test") + r := RelayNamespaceResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.complete(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("metric_id").Exists(), + check.That(data.ResourceName).Key("primary_connection_string").Exists(), + check.That(data.ResourceName).Key("secondary_connection_string").Exists(), + check.That(data.ResourceName).Key("primary_key").Exists(), + check.That(data.ResourceName).Key("secondary_key").Exists(), + ), + }, + data.ImportStep(), + }) +} + +func (t RelayNamespaceResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := parse.NamespaceID(state.ID) + if err != nil { + return nil, err + } + + resp, err := clients.Relay.NamespacesClient.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return nil, fmt.Errorf("reading Relay Namespace (%s): %+v", id.String(), err) + } + + return utils.Bool(resp.NamespaceProperties != nil), nil +} + +func (RelayNamespaceResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_relay_namespace" "test" { + name = "acctestrn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "Standard" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r RelayNamespaceResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_relay_namespace" "import" { + name = azurerm_relay_namespace.test.name + location = azurerm_relay_namespace.test.location + resource_group_name = azurerm_relay_namespace.test.resource_group_name + + sku_name = "Standard" +} +`, r.basic(data)) +} + +func (RelayNamespaceResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_relay_namespace" "test" { + name = "acctestrn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + + sku_name = "Standard" + + tags = { + Hello = "World" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} diff --git a/azurerm/internal/services/relay/relay_namespace_test.go b/azurerm/internal/services/relay/relay_namespace_test.go deleted file mode 100644 index e014cf408a98..000000000000 --- a/azurerm/internal/services/relay/relay_namespace_test.go +++ /dev/null @@ -1,122 +0,0 @@ -package relay - -import ( - "testing" -) - -func TestParseRelayNamespaceID(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *NamespaceResourceID - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Resource Group ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: nil, - }, - { - Name: "Missing namespaces Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Relay/namespaces/", - Expected: nil, - }, - { - Name: "Wrong Casing", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Relay/Namespaces/mynamespace", - Expected: nil, - }, - { - Name: "Relay Namespace Resource ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Relay/namespaces/mynamespace", - Expected: &NamespaceResourceID{ - ResourceGroup: "mygroup1", - Name: "mynamespace", - }, - }, - } - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ParseNamespaceID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for Resource Group", v.Expected.ResourceGroup, actual.ResourceGroup) - } - } -} - -func TestValidateNamespaceID(t *testing.T) { - cases := []struct { - ID string - Valid bool - }{ - { - ID: "", - Valid: false, - }, - { - ID: "nonsense", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/providers/Microsoft.Relay/namespaces", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/providers/Microsoft.Relay/Namespaces/relay1", - Valid: false, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/providers/Microsoft.Relay/namespaces/relay1", - Valid: true, - }, - } - - for _, tc := range cases { - t.Logf("[DEBUG] Testing value %s", tc.ID) - _, errors := ValidateNamespaceID(tc.ID, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/azurerm/internal/services/relay/resource_arm_relay_hybrid_connection.go b/azurerm/internal/services/relay/resource_arm_relay_hybrid_connection.go deleted file mode 100644 index e0dafd1ede20..000000000000 --- a/azurerm/internal/services/relay/resource_arm_relay_hybrid_connection.go +++ /dev/null @@ -1,212 +0,0 @@ -package relay - -import ( - "context" - "fmt" - "log" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - - "github.com/Azure/azure-sdk-for-go/services/relay/mgmt/2017-04-01/relay" - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmHybridConnection() *schema.Resource { - return &schema.Resource{ - Create: resourceArmHybridConnectionCreateUpdate, - Read: resourceArmHybridConnectionRead, - Update: resourceArmHybridConnectionCreateUpdate, - Delete: resourceArmHybridConnectionDelete, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := ParseHybridConnectionID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(30 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "resource_group_name": azure.SchemaResourceGroupName(), - - "relay_namespace_name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "requires_client_authorization": { - Type: schema.TypeBool, - Default: true, - ForceNew: true, - Optional: true, - }, - "user_metadata": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringIsNotEmpty, - }, - }, - } -} - -func resourceArmHybridConnectionCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Relay.HybridConnectionsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - log.Printf("[INFO] preparing arguments for Relay Hybrid Connection creation.") - - name := d.Get("name").(string) - resourceGroup := d.Get("resource_group_name").(string) - relayNamespace := d.Get("relay_namespace_name").(string) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, relayNamespace, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Hybrid Connection %q (Resource Group %q, Namespace: %q): %s", name, resourceGroup, relayNamespace, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_relay_hybrid_connection", *existing.ID) - } - } - - requireClientAuthorization := d.Get("requires_client_authorization").(bool) - userMetadata := d.Get("user_metadata").(string) - - parameters := relay.HybridConnection{ - HybridConnectionProperties: &relay.HybridConnectionProperties{ - RequiresClientAuthorization: &requireClientAuthorization, - UserMetadata: &userMetadata, - }, - } - - _, err := client.CreateOrUpdate(ctx, resourceGroup, relayNamespace, name, parameters) - if err != nil { - return fmt.Errorf("Error creating Relay Hybrid Connection %q (Namespace %q Resource Group %q): %+v", name, relayNamespace, resourceGroup, err) - } - - read, err := client.Get(ctx, resourceGroup, relayNamespace, name) - if err != nil { - return fmt.Errorf("Error issuing get request for Relay Hybrid Connection %q (Namespace %q Resource Group %q): %+v", name, relayNamespace, resourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Relay Hybrid Connection %q (Namespace %q Resource group %s) ID", name, relayNamespace, resourceGroup) - } - - d.SetId(*read.ID) - - return resourceArmHybridConnectionRead(d, meta) -} - -func resourceArmHybridConnectionRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Relay.HybridConnectionsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - relayNamespace := id.Path["namespaces"] - name := id.Path["hybridConnections"] - - resp, err := client.Get(ctx, resourceGroup, relayNamespace, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Relay Hybrid Connection %q (Namespace %q Resource Group %q): %s", name, relayNamespace, resourceGroup, err) - } - - d.Set("name", name) - d.Set("resource_group_name", resourceGroup) - d.Set("relay_namespace_name", relayNamespace) - - if props := resp.HybridConnectionProperties; props != nil { - d.Set("requires_client_authorization", props.RequiresClientAuthorization) - d.Set("user_metadata", props.UserMetadata) - } - - return nil -} - -func resourceArmHybridConnectionDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Relay.HybridConnectionsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - relayNamespace := id.Path["namespaces"] - name := id.Path["hybridConnections"] - - log.Printf("[INFO] Waiting for Relay Hybrid Connection %q (Namespace %q Resource Group %q) to be deleted", name, relayNamespace, resourceGroup) - rc, err := client.Delete(ctx, resourceGroup, relayNamespace, name) - - if err != nil { - if response.WasNotFound(rc.Response) { - return nil - } - - return err - } - - stateConf := &resource.StateChangeConf{ - Pending: []string{"Pending"}, - Target: []string{"Deleted"}, - Refresh: hybridConnectionDeleteRefreshFunc(ctx, client, resourceGroup, relayNamespace, name), - MinTimeout: 15 * time.Second, - Timeout: d.Timeout(schema.TimeoutDelete), - } - - if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Relay Hybrid Connection %q (Namespace %q Resource Group %q) to be deleted: %s", name, relayNamespace, resourceGroup, err) - } - - return nil -} - -func hybridConnectionDeleteRefreshFunc(ctx context.Context, client *relay.HybridConnectionsClient, resourceGroupName string, relayNamespace string, name string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - res, err := client.Get(ctx, resourceGroupName, relayNamespace, name) - if err != nil { - if utils.ResponseWasNotFound(res.Response) { - return res, "Deleted", nil - } - - return nil, "Error", fmt.Errorf("Error issuing read request in relayNamespaceDeleteRefreshFunc to Relay Hybrid Connection %q (Namespace %q Resource Group %q): %s", name, relayNamespace, resourceGroupName, err) - } - - return res, "Pending", nil - } -} diff --git a/azurerm/internal/services/relay/resource_arm_relay_namespace.go b/azurerm/internal/services/relay/resource_arm_relay_namespace.go deleted file mode 100644 index d70646698a47..000000000000 --- a/azurerm/internal/services/relay/resource_arm_relay_namespace.go +++ /dev/null @@ -1,261 +0,0 @@ -package relay - -import ( - "context" - "fmt" - "log" - "time" - - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - - "github.com/Azure/azure-sdk-for-go/services/relay/mgmt/2017-04-01/relay" - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/helper/validation" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmRelayNamespace() *schema.Resource { - return &schema.Resource{ - Create: resourceArmRelayNamespaceCreateUpdate, - Read: resourceArmRelayNamespaceRead, - Update: resourceArmRelayNamespaceCreateUpdate, - Delete: resourceArmRelayNamespaceDelete, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := ParseNamespaceID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(30 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(30 * time.Minute), - Delete: schema.DefaultTimeout(60 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringLenBetween(6, 50), - }, - - "location": azure.SchemaLocation(), - - "resource_group_name": azure.SchemaResourceGroupName(), - - "sku_name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{ - string(relay.Standard), - }, false), - }, - - "metric_id": { - Type: schema.TypeString, - Computed: true, - }, - - "primary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_connection_string": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "primary_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "secondary_key": { - Type: schema.TypeString, - Computed: true, - Sensitive: true, - }, - - "tags": tags.Schema(), - }, - } -} - -func resourceArmRelayNamespaceCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Relay.NamespacesClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - sku := relay.Sku{ - Name: utils.String(d.Get("sku_name").(string)), - Tier: relay.SkuTier(d.Get("sku_name").(string)), - } - - log.Printf("[INFO] preparing arguments for Relay Namespace creation.") - - name := d.Get("name").(string) - location := azure.NormalizeLocation(d.Get("location").(string)) - resourceGroup := d.Get("resource_group_name").(string) - - t := d.Get("tags").(map[string]interface{}) - expandedTags := tags.Expand(t) - - if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing Relay Namespace %q (Resource Group %q): %+v", name, resourceGroup, err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_relay_namespace", *existing.ID) - } - } - - parameters := relay.Namespace{ - Location: utils.String(location), - Sku: &sku, - NamespaceProperties: &relay.NamespaceProperties{}, - Tags: expandedTags, - } - - future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters) - if err != nil { - return fmt.Errorf("Error creating Relay Namespace %q (Resource Group %q): %+v", name, resourceGroup, err) - } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("Error waiting on future for Relay Namespace %q (Resource Group %q) creation: %+v", name, resourceGroup, err) - } - - read, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Error issuing get request for Relay Namespace %q (Resource Group %q): %+v", name, resourceGroup, err) - } - if read.ID == nil { - return fmt.Errorf("Cannot read Relay Namespace %q (resource group %s) ID", name, resourceGroup) - } - - d.SetId(*read.ID) - - return resourceArmRelayNamespaceRead(d, meta) -} - -func resourceArmRelayNamespaceRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Relay.NamespacesClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - name := id.Path["namespaces"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - d.SetId("") - return nil - } - - return fmt.Errorf("Error making Read request on Relay Namespace %q (Resource Group %q): %s", name, resourceGroup, err) - } - - d.Set("name", name) - d.Set("resource_group_name", resourceGroup) - if location := resp.Location; location != nil { - d.Set("location", azure.NormalizeLocation(*location)) - } - - if sku := resp.Sku; sku != nil { - if err := d.Set("sku_name", sku.Name); err != nil { - return fmt.Errorf("Error setting 'sku_name': %+v", err) - } - } else { - return fmt.Errorf("Error making Read request on Relay Namespace %q (Resource Group %q): Unable to retrieve 'sku' value", name, resourceGroup) - } - - if props := resp.NamespaceProperties; props != nil { - d.Set("metric_id", props.MetricID) - } - - keysResp, err := client.ListKeys(ctx, resourceGroup, name, "RootManageSharedAccessKey") - if err != nil { - return fmt.Errorf("Error making ListKeys request on Relay Namespace %q (Resource Group %q): %s", name, resourceGroup, err) - } - - d.Set("primary_connection_string", keysResp.PrimaryConnectionString) - d.Set("primary_key", keysResp.PrimaryKey) - d.Set("secondary_connection_string", keysResp.SecondaryConnectionString) - d.Set("secondary_key", keysResp.SecondaryKey) - - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmRelayNamespaceDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Relay.NamespacesClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := azure.ParseAzureResourceID(d.Id()) - if err != nil { - return err - } - resourceGroup := id.ResourceGroup - name := id.Path["namespaces"] - - future, err := client.Delete(ctx, resourceGroup, name) - if err != nil { - if response.WasNotFound(future.Response()) { - return nil - } - - return err - } - - // we can't make use of the Future here due to a bug where 404 isn't tracked as Successful - log.Printf("[DEBUG] Waiting for Relay Namespace %q (Resource Group %q) to be deleted", name, resourceGroup) - stateConf := &resource.StateChangeConf{ - Pending: []string{"Pending"}, - Target: []string{"Deleted"}, - Refresh: relayNamespaceDeleteRefreshFunc(ctx, client, resourceGroup, name), - MinTimeout: 15 * time.Second, - Timeout: d.Timeout(schema.TimeoutDelete), - } - - if _, err := stateConf.WaitForState(); err != nil { - return fmt.Errorf("Error waiting for Relay Namespace %q (Resource Group %q) to be deleted: %s", name, resourceGroup, err) - } - - return nil -} - -func relayNamespaceDeleteRefreshFunc(ctx context.Context, client *relay.NamespacesClient, resourceGroupName string, name string) resource.StateRefreshFunc { - return func() (interface{}, string, error) { - res, err := client.Get(ctx, resourceGroupName, name) - if err != nil { - if utils.ResponseWasNotFound(res.Response) { - return res, "Deleted", nil - } - - return nil, "Error", fmt.Errorf("Error issuing read request in relayNamespaceDeleteRefreshFunc to Relay Namespace %q (Resource Group %q): %s", name, resourceGroupName, err) - } - - return res, "Pending", nil - } -} diff --git a/azurerm/internal/services/relay/resourceids.go b/azurerm/internal/services/relay/resourceids.go new file mode 100644 index 000000000000..be36df0cde22 --- /dev/null +++ b/azurerm/internal/services/relay/resourceids.go @@ -0,0 +1,4 @@ +package relay + +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=HybridConnection -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/hybridConnections/hybridConnection1 +//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Namespace -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1 diff --git a/azurerm/internal/services/relay/tests/resource_arm_relay_hybrid_connection_test.go b/azurerm/internal/services/relay/tests/resource_arm_relay_hybrid_connection_test.go deleted file mode 100644 index 1cad9e445efc..000000000000 --- a/azurerm/internal/services/relay/tests/resource_arm_relay_hybrid_connection_test.go +++ /dev/null @@ -1,252 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMRelayHybridConnection_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayHybridConnectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayHybridConnection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayHybridConnectionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "requires_client_authorization"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRelayHybridConnection_full(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayHybridConnectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayHybridConnection_full(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayHybridConnectionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "requires_client_authorization"), - resource.TestCheckResourceAttr(data.ResourceName, "user_metadata", "metadatatest"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRelayHybridConnection_update(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayHybridConnectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayHybridConnection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayHybridConnectionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "requires_client_authorization"), - ), - }, - { - Config: testAccAzureRMRelayHybridConnection_update(data), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(data.ResourceName, "requires_client_authorization", "false"), - resource.TestCheckResourceAttr(data.ResourceName, "user_metadata", "metadataupdated"), - ), - }, - }, - }) -} - -func TestAccAzureRMRelayHybridConnection_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_hybrid_connection", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayHybridConnectionDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayHybridConnection_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayHybridConnectionExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "requires_client_authorization"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMRelayHybridConnection_requiresImport), - }, - }) -} - -func testAccAzureRMRelayHybridConnection_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_relay_namespace" "test" { - name = "acctestrn-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "Standard" -} - -resource "azurerm_relay_hybrid_connection" "test" { - name = "acctestrnhc-%d" - resource_group_name = azurerm_resource_group.test.name - relay_namespace_name = azurerm_relay_namespace.test.name -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRelayHybridConnection_full(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_relay_namespace" "test" { - name = "acctestrn-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "Standard" -} - -resource "azurerm_relay_hybrid_connection" "test" { - name = "acctestrnhc-%d" - resource_group_name = azurerm_resource_group.test.name - relay_namespace_name = azurerm_relay_namespace.test.name - user_metadata = "metadatatest" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRelayHybridConnection_update(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_relay_namespace" "test" { - name = "acctestrn-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "Standard" -} - -resource "azurerm_relay_hybrid_connection" "test" { - name = "acctestrnhc-%d" - resource_group_name = azurerm_resource_group.test.name - relay_namespace_name = azurerm_relay_namespace.test.name - requires_client_authorization = false - user_metadata = "metadataupdated" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) -} - -func testAccAzureRMRelayHybridConnection_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMRelayHybridConnection_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_relay_hybrid_connection" "import" { - name = azurerm_relay_hybrid_connection.test.name - resource_group_name = azurerm_relay_hybrid_connection.test.resource_group_name - relay_namespace_name = azurerm_relay_hybrid_connection.test.relay_namespace_name -} -`, template) -} - -func testCheckAzureRMRelayHybridConnectionExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Relay.HybridConnectionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - relayNamespace := rs.Primary.Attributes["relay_namespace_name"] - - // Ensure resource group exists in API - - resp, err := client.Get(ctx, resourceGroup, relayNamespace, name) - if err != nil { - return fmt.Errorf("Bad: Get on relayHybridConnectionsClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Relay Hybrid Connection %q in Namespace %q (Resource Group: %q) does not exist", name, relayNamespace, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMRelayHybridConnectionDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Relay.HybridConnectionsClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_relay_hybrid_connection" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - relayNamespace := rs.Primary.Attributes["relay_namespace_name"] - - resp, err := client.Get(ctx, resourceGroup, relayNamespace, name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Relay Hybrid Connection still exists:\n%#v", resp) - } - } - - return nil -} diff --git a/azurerm/internal/services/relay/tests/resource_arm_relay_namespace_test.go b/azurerm/internal/services/relay/tests/resource_arm_relay_namespace_test.go deleted file mode 100644 index 0e2e493af8ca..000000000000 --- a/azurerm/internal/services/relay/tests/resource_arm_relay_namespace_test.go +++ /dev/null @@ -1,200 +0,0 @@ -package tests - -import ( - "fmt" - "net/http" - "testing" - - "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" -) - -func TestAccAzureRMRelayNamespace_basic(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "metric_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - resource.TestCheckResourceAttr(data.ResourceName, "sku_name", "Standard"), - ), - }, - data.ImportStep(), - }, - }) -} - -func TestAccAzureRMRelayNamespace_requiresImport(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayNamespace_basic(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "metric_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - ), - }, - data.RequiresImportErrorStep(testAccAzureRMRelayNamespace_requiresImport), - }, - }) -} - -func TestAccAzureRMRelayNamespace_complete(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_relay_namespace", "test") - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acceptance.PreCheck(t) }, - Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMRelayNamespaceDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAzureRMRelayNamespace_complete(data), - Check: resource.ComposeTestCheckFunc( - testCheckAzureRMRelayNamespaceExists(data.ResourceName), - resource.TestCheckResourceAttrSet(data.ResourceName, "metric_id"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_connection_string"), - resource.TestCheckResourceAttrSet(data.ResourceName, "primary_key"), - resource.TestCheckResourceAttrSet(data.ResourceName, "secondary_key"), - ), - }, - data.ImportStep(), - }, - }) -} - -func testCheckAzureRMRelayNamespaceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Relay.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not found: %s", resourceName) - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - name := rs.Primary.Attributes["name"] - - // Ensure resource group exists in API - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return fmt.Errorf("Bad: Get on relayNamespacesClient: %+v", err) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("Bad: Relay Namespace %q (Resource Group: %q) does not exist", name, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMRelayNamespaceDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).Relay.NamespacesClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_relay_namespace" { - continue - } - - resourceGroup := rs.Primary.Attributes["resource_group_name"] - name := rs.Primary.Attributes["name"] - - resp, err := client.Get(ctx, resourceGroup, name) - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Relay Namespace still exists:\n%#v", resp) - } - } - - return nil -} - -func testAccAzureRMRelayNamespace_basic(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_relay_namespace" "test" { - name = "acctestrn-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "Standard" -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} - -func testAccAzureRMRelayNamespace_requiresImport(data acceptance.TestData) string { - template := testAccAzureRMRelayNamespace_basic(data) - return fmt.Sprintf(` -%s - -resource "azurerm_relay_namespace" "import" { - name = azurerm_relay_namespace.test.name - location = azurerm_relay_namespace.test.location - resource_group_name = azurerm_relay_namespace.test.resource_group_name - - sku_name = "Standard" -} -`, template) -} - -func testAccAzureRMRelayNamespace_complete(data acceptance.TestData) string { - return fmt.Sprintf(` -provider "azurerm" { - features {} -} - -resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" -} - -resource "azurerm_relay_namespace" "test" { - name = "acctestrn-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - - sku_name = "Standard" - - tags = { - Hello = "World" - } -} -`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) -} diff --git a/azurerm/internal/services/relay/validate/hybrid_connection_id.go b/azurerm/internal/services/relay/validate/hybrid_connection_id.go new file mode 100644 index 000000000000..03b7bc42383c --- /dev/null +++ b/azurerm/internal/services/relay/validate/hybrid_connection_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/relay/parse" +) + +func HybridConnectionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.HybridConnectionID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/relay/validate/hybrid_connection_id_test.go b/azurerm/internal/services/relay/validate/hybrid_connection_id_test.go new file mode 100644 index 000000000000..914237a013cb --- /dev/null +++ b/azurerm/internal/services/relay/validate/hybrid_connection_id_test.go @@ -0,0 +1,88 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestHybridConnectionID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/", + Valid: false, + }, + + { + // missing value for NamespaceName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/hybridConnections/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1/hybridConnections/hybridConnection1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.RELAY/NAMESPACES/NAMESPACE1/HYBRIDCONNECTIONS/HYBRIDCONNECTION1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := HybridConnectionID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/relay/validate/namespace_id.go b/azurerm/internal/services/relay/validate/namespace_id.go new file mode 100644 index 000000000000..1fa3b280cd33 --- /dev/null +++ b/azurerm/internal/services/relay/validate/namespace_id.go @@ -0,0 +1,23 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/relay/parse" +) + +func NamespaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := parse.NamespaceID(v); err != nil { + errors = append(errors, err) + } + + return +} diff --git a/azurerm/internal/services/relay/validate/namespace_id_test.go b/azurerm/internal/services/relay/validate/namespace_id_test.go new file mode 100644 index 000000000000..47c226550818 --- /dev/null +++ b/azurerm/internal/services/relay/validate/namespace_id_test.go @@ -0,0 +1,76 @@ +package validate + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import "testing" + +func TestNamespaceID(t *testing.T) { + cases := []struct { + Input string + Valid bool + }{ + + { + // empty + Input: "", + Valid: false, + }, + + { + // missing SubscriptionId + Input: "/", + Valid: false, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Valid: false, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Valid: false, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Valid: false, + }, + + { + // missing Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/", + Valid: false, + }, + + { + // missing value for Name + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/", + Valid: false, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Relay/namespaces/namespace1", + Valid: true, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.RELAY/NAMESPACES/NAMESPACE1", + Valid: false, + }, + } + for _, tc := range cases { + t.Logf("[DEBUG] Testing Value %s", tc.Input) + _, errors := NamespaceID(tc.Input, "test") + valid := len(errors) == 0 + + if tc.Valid != valid { + t.Fatalf("Expected %t but got %t", tc.Valid, valid) + } + } +} diff --git a/azurerm/internal/services/resource/group_parser.go b/azurerm/internal/services/resource/group_parser.go deleted file mode 100644 index ac0d9a01a444..000000000000 --- a/azurerm/internal/services/resource/group_parser.go +++ /dev/null @@ -1,32 +0,0 @@ -package resource - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type ResourceGroupResourceID struct { - Name string -} - -func ParseResourceGroupID(input string) (*ResourceGroupResourceID, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, fmt.Errorf("[ERROR] Unable to parse Resource Group ID %q: %+v", input, err) - } - - group := ResourceGroupResourceID{ - Name: id.ResourceGroup, - } - - if group.Name == "" { - return nil, fmt.Errorf("ID contained no `resourceGroups` segment!") - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &group, nil -} diff --git a/azurerm/internal/services/resource/group_parser_test.go b/azurerm/internal/services/resource/group_parser_test.go deleted file mode 100644 index 67d2a1a2b7fe..000000000000 --- a/azurerm/internal/services/resource/group_parser_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package resource - -import ( - "testing" -) - -func TestParseResourceGroup(t *testing.T) { - testData := []struct { - Name string - Input string - Expected *ResourceGroupResourceID - }{ - { - Name: "Empty", - Input: "", - Expected: nil, - }, - { - Name: "No Resource Groups Segment", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000", - Expected: nil, - }, - { - Name: "No Resource Groups Value", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", - Expected: nil, - }, - { - Name: "Completed", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", - Expected: &ResourceGroupResourceID{ - Name: "foo", - }, - }, - { - Name: "App Service Resource ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Web/sites/instance1", - Expected: nil, - }, - { - Name: "Virtual Machine Resource ID", - Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/microsoft.compute/virtualMachines/machine1", - Expected: nil, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Name) - - actual, err := ParseResourceGroupID(v.Input) - if err != nil { - if v.Expected == nil { - continue - } - - t.Fatalf("Expected a value but got an error: %s", err) - } - - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/resource/group_validator.go b/azurerm/internal/services/resource/group_validator.go deleted file mode 100644 index 0880297419f8..000000000000 --- a/azurerm/internal/services/resource/group_validator.go +++ /dev/null @@ -1,34 +0,0 @@ -package resource - -import "fmt" - -// ValidateResourceGroupID validates that the specified Resource Group ID is Valid -func ValidateResourceGroupID(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if _, err := ParseResourceGroupID(v); err != nil { - errors = append(errors, fmt.Errorf("Can not parse %q as a resource id: %v", k, err)) - return - } - - return warnings, errors -} - -// ValidateResourceGroupIDOrEmpty validates that the specified ID is either Empty or a Valid Resource Group ID -func ValidateResourceGroupIDOrEmpty(i interface{}, k string) (_ []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) - return - } - - if v == "" { - return - } - - return ValidateResourceGroupID(i, k) -} diff --git a/azurerm/internal/services/resource/group_validator_test.go b/azurerm/internal/services/resource/group_validator_test.go deleted file mode 100644 index d8fef9e6cfd8..000000000000 --- a/azurerm/internal/services/resource/group_validator_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package resource - -import "testing" - -func TestValidateResourceGroupID(t *testing.T) { - cases := []struct { - ID string - Errors int - }{ - { - ID: "", - Errors: 1, - }, - { - ID: "nonsense", - Errors: 1, - }, - { - ID: "/slash", - Errors: 1, - }, - { - ID: "/path/to/nothing", - Errors: 1, - }, - { - ID: "/subscriptions", - Errors: 1, - }, - { - ID: "/providers", - Errors: 1, - }, - { - ID: "/subscriptions/not-a-guid", - Errors: 0, - }, - { - ID: "/providers/test", - Errors: 0, - }, - { - ID: "/subscriptions/00000000-0000-0000-0000-00000000000/", - Errors: 0, - }, - { - ID: "/providers/provider.name/", - Errors: 0, - }, - } - - for _, tc := range cases { - t.Run(tc.ID, func(t *testing.T) { - _, errors := ValidateResourceGroupID(tc.ID, "test") - - if len(errors) < tc.Errors { - t.Fatalf("Expected ValidateResourceGroupID to have %d not %d errors for %q", tc.Errors, len(errors), tc.ID) - } - }) - } -} - -func TestValidateResourceGroupIDOrEmpty(t *testing.T) { - cases := []struct { - ID string - Errors int - }{ - { - ID: "", - Errors: 0, - }, - { - ID: "nonsense", - Errors: 1, - }, - // as this function just calls TestAzureResourceGroupId lets not be as comprehensive - { - ID: "/providers/provider.name/", - Errors: 0, - }, - } - - for _, tc := range cases { - t.Run(tc.ID, func(t *testing.T) { - _, errors := ValidateResourceGroupIDOrEmpty(tc.ID, "test") - - if len(errors) < tc.Errors { - t.Fatalf("Expected TestAzureResourceGroupIdOrEmpty to have %d not %d errors for %q", tc.Errors, len(errors), tc.ID) - } - }) - } -} diff --git a/azurerm/internal/services/resource/resource_arm_management_lock.go b/azurerm/internal/services/resource/management_lock_resource.go similarity index 100% rename from azurerm/internal/services/resource/resource_arm_management_lock.go rename to azurerm/internal/services/resource/management_lock_resource.go diff --git a/azurerm/internal/services/resource/management_lock_resource_test.go b/azurerm/internal/services/resource/management_lock_resource_test.go new file mode 100644 index 000000000000..a7488dea0c86 --- /dev/null +++ b/azurerm/internal/services/resource/management_lock_resource_test.go @@ -0,0 +1,425 @@ +package resource_test + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMManagementLock_resourceGroupReadOnlyBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_resourceGroupReadOnlyBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_resourceGroupReadOnlyBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMManagementLock_requiresImport), + }, + }) +} + +func TestAccAzureRMManagementLock_resourceGroupReadOnlyComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_resourceGroupReadOnlyComplete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_resourceGroupCanNotDeleteBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_resourceGroupCanNotDeleteBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_resourceGroupCanNotDeleteComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_resourceGroupCanNotDeleteComplete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_publicIPReadOnlyBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_publicIPReadOnlyBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_publicIPCanNotDeleteBasic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_publicIPCanNotDeleteBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_subscriptionReadOnlyBasic(t *testing.T) { + _, exists := os.LookupEnv("TF_ACC_SUBSCRIPTION_PARALLEL_LOCK") + if !exists { + t.Skip("`TF_ACC_SUBSCRIPTION_PARALLEL_LOCK` isn't specified - skipping since this test can't be run in Parallel") + } + + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_subscriptionReadOnlyBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMManagementLock_subscriptionCanNotDeleteBasic(t *testing.T) { + _, exists := os.LookupEnv("TF_ACC_SUBSCRIPTION_PARALLEL_LOCK") + if !exists { + t.Skip("`TF_ACC_SUBSCRIPTION_PARALLEL_LOCK` isn't specified - skipping since this test can't be run in Parallel") + } + + data := acceptance.BuildTestData(t, "azurerm_management_lock", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMManagementLockDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMManagementLock_subscriptionCanNotDeleteBasic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMManagementLockExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testCheckAzureRMManagementLockExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Resource.LocksClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %q", resourceName) + } + + name := rs.Primary.Attributes["name"] + scope := rs.Primary.Attributes["scope"] + + resp, err := client.GetByScope(ctx, scope, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Management Lock %q (Scope %q) does not exist", name, scope) + } + + return fmt.Errorf("Bad: Get on managementLocksClient: %+v", err) + } + + return nil + } +} + +func testCheckAzureRMManagementLockDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Resource.LocksClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_management_lock" { + continue + } + + name := rs.Primary.Attributes["name"] + scope := rs.Primary.Attributes["scope"] + + resp, err := client.GetByScope(ctx, scope, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return nil + } + + return err + } + } + + return nil +} + +func testAccAzureRMManagementLock_resourceGroupReadOnlyBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = azurerm_resource_group.test.id + lock_level = "ReadOnly" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMManagementLock_requiresImport(data acceptance.TestData) string { + template := testAccAzureRMManagementLock_resourceGroupReadOnlyBasic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_management_lock" "import" { + name = azurerm_management_lock.test.name + scope = azurerm_management_lock.test.scope + lock_level = azurerm_management_lock.test.lock_level +} +`, template) +} + +func testAccAzureRMManagementLock_resourceGroupReadOnlyComplete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = azurerm_resource_group.test.id + lock_level = "ReadOnly" + notes = "Hello, World!" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMManagementLock_resourceGroupCanNotDeleteBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = azurerm_resource_group.test.id + lock_level = "CanNotDelete" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMManagementLock_resourceGroupCanNotDeleteComplete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = azurerm_resource_group.test.id + lock_level = "CanNotDelete" + notes = "Hello, World!" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func testAccAzureRMManagementLock_publicIPReadOnlyBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "acctestpublicip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + idle_timeout_in_minutes = 30 +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = azurerm_public_ip.test.id + lock_level = "ReadOnly" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMManagementLock_publicIPCanNotDeleteBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_public_ip" "test" { + name = "acctestpublicip-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + allocation_method = "Static" + idle_timeout_in_minutes = 30 +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = azurerm_public_ip.test.id + lock_level = "CanNotDelete" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMManagementLock_subscriptionReadOnlyBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_subscription" "current" { +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = data.azurerm_subscription.current.id + lock_level = "ReadOnly" +} +`, data.RandomInteger) +} + +func testAccAzureRMManagementLock_subscriptionCanNotDeleteBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +data "azurerm_subscription" "current" { +} + +resource "azurerm_management_lock" "test" { + name = "acctestlock-%d" + scope = data.azurerm_subscription.current.id + lock_level = "CanNotDelete" +} +`, data.RandomInteger) +} diff --git a/azurerm/internal/services/resource/parse/resource_group.go b/azurerm/internal/services/resource/parse/resource_group.go new file mode 100644 index 000000000000..c4daf961fdcc --- /dev/null +++ b/azurerm/internal/services/resource/parse/resource_group.go @@ -0,0 +1,62 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ResourceGroupId struct { + SubscriptionId string + ResourceGroup string +} + +func NewResourceGroupID(subscriptionId, resourceGroup string) ResourceGroupId { + return ResourceGroupId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + } +} + +func (id ResourceGroupId) String() string { + segments := []string{ + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Resource Group", segmentsStr) +} + +func (id ResourceGroupId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup) +} + +// ResourceGroupID parses a ResourceGroup ID into an ResourceGroupId struct +func ResourceGroupID(input string) (*ResourceGroupId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ResourceGroupId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/resource/parse/resource_group_template_deployment.go b/azurerm/internal/services/resource/parse/resource_group_template_deployment.go new file mode 100644 index 000000000000..0dc8e407c849 --- /dev/null +++ b/azurerm/internal/services/resource/parse/resource_group_template_deployment.go @@ -0,0 +1,69 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ResourceGroupTemplateDeploymentId struct { + SubscriptionId string + ResourceGroup string + DeploymentName string +} + +func NewResourceGroupTemplateDeploymentID(subscriptionId, resourceGroup, deploymentName string) ResourceGroupTemplateDeploymentId { + return ResourceGroupTemplateDeploymentId{ + SubscriptionId: subscriptionId, + ResourceGroup: resourceGroup, + DeploymentName: deploymentName, + } +} + +func (id ResourceGroupTemplateDeploymentId) String() string { + segments := []string{ + fmt.Sprintf("Deployment Name %q", id.DeploymentName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Resource Group Template Deployment", segmentsStr) +} + +func (id ResourceGroupTemplateDeploymentId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Resources/deployments/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.DeploymentName) +} + +// ResourceGroupTemplateDeploymentID parses a ResourceGroupTemplateDeployment ID into an ResourceGroupTemplateDeploymentId struct +func ResourceGroupTemplateDeploymentID(input string) (*ResourceGroupTemplateDeploymentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ResourceGroupTemplateDeploymentId{ + SubscriptionId: id.SubscriptionID, + ResourceGroup: id.ResourceGroup, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceGroup == "" { + return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") + } + + if resourceId.DeploymentName, err = id.PopSegment("deployments"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/resource/parse/resource_group_template_deployment_test.go b/azurerm/internal/services/resource/parse/resource_group_template_deployment_test.go new file mode 100644 index 000000000000..69a887cb6122 --- /dev/null +++ b/azurerm/internal/services/resource/parse/resource_group_template_deployment_test.go @@ -0,0 +1,112 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ResourceGroupTemplateDeploymentId{} + +func TestResourceGroupTemplateDeploymentIDFormatter(t *testing.T) { + actual := NewResourceGroupTemplateDeploymentID("12345678-1234-9876-4563-123456789012", "group1", "deploy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Resources/deployments/deploy1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestResourceGroupTemplateDeploymentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ResourceGroupTemplateDeploymentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // missing DeploymentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Resources/", + Error: true, + }, + + { + // missing value for DeploymentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Resources/deployments/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Resources/deployments/deploy1", + Expected: &ResourceGroupTemplateDeploymentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + DeploymentName: "deploy1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1/PROVIDERS/MICROSOFT.RESOURCES/DEPLOYMENTS/DEPLOY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ResourceGroupTemplateDeploymentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + if actual.DeploymentName != v.Expected.DeploymentName { + t.Fatalf("Expected %q but got %q for DeploymentName", v.Expected.DeploymentName, actual.DeploymentName) + } + } +} diff --git a/azurerm/internal/services/resource/parse/resource_group_test.go b/azurerm/internal/services/resource/parse/resource_group_test.go new file mode 100644 index 000000000000..2b47a46e594a --- /dev/null +++ b/azurerm/internal/services/resource/parse/resource_group_test.go @@ -0,0 +1,96 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ResourceGroupId{} + +func TestResourceGroupIDFormatter(t *testing.T) { + actual := NewResourceGroupID("12345678-1234-9876-4563-123456789012", "group1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestResourceGroupID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ResourceGroupId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for ResourceGroup + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1", + Expected: &ResourceGroupId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceGroup: "group1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/GROUP1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ResourceGroupID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + } +} diff --git a/azurerm/internal/services/resource/parse/resource_provider.go b/azurerm/internal/services/resource/parse/resource_provider.go new file mode 100644 index 000000000000..78da3dc16d8b --- /dev/null +++ b/azurerm/internal/services/resource/parse/resource_provider.go @@ -0,0 +1,51 @@ +package parse + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type ResourceProviderId struct { + SubscriptionId string + ResourceProvider string +} + +func NewResourceProviderID(subscriptionId, resourceProvider string) ResourceProviderId { + return ResourceProviderId{ + SubscriptionId: subscriptionId, + ResourceProvider: resourceProvider, + } +} + +func (id ResourceProviderId) ID() string { + fmtString := "/subscriptions/%s/providers/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceProvider) +} + +// ResourceProviderID parses a ResourceProvider ID into an ResourceProviderId struct +func ResourceProviderID(input string) (*ResourceProviderId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := ResourceProviderId{ + SubscriptionId: id.SubscriptionID, + ResourceProvider: id.Provider, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.ResourceProvider == "" { + return nil, fmt.Errorf("ID was missing the 'providers' element") + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/resource/parse/resource_provider_test.go b/azurerm/internal/services/resource/parse/resource_provider_test.go new file mode 100644 index 000000000000..114e577e6c47 --- /dev/null +++ b/azurerm/internal/services/resource/parse/resource_provider_test.go @@ -0,0 +1,96 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = ResourceProviderId{} + +func TestResourceProviderIDFormatter(t *testing.T) { + actual := NewResourceProviderID("12345678-1234-9876-4563-123456789012", "Instruments.Didgeridoo").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/providers/Instruments.Didgeridoo" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestResourceProviderID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *ResourceProviderId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing Providers + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", + Error: true, + }, + + { + // missing value for Providers + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/providers/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/providers/Instruments.Didgeridoo", + Expected: &ResourceProviderId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + ResourceProvider: "Instruments.Didgeridoo", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/PROVIDERS/INSTRUMENTS.DIDGERIDOO", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := ResourceProviderID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.ResourceProvider != v.Expected.ResourceProvider { + t.Fatalf("Expected %q but got %q for ResourceProvider", v.Expected.ResourceProvider, actual.ResourceProvider) + } + } +} diff --git a/azurerm/internal/services/resource/parse/subscription_template_deployment.go b/azurerm/internal/services/resource/parse/subscription_template_deployment.go new file mode 100644 index 000000000000..d2412204251b --- /dev/null +++ b/azurerm/internal/services/resource/parse/subscription_template_deployment.go @@ -0,0 +1,61 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "fmt" + "strings" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type SubscriptionTemplateDeploymentId struct { + SubscriptionId string + DeploymentName string +} + +func NewSubscriptionTemplateDeploymentID(subscriptionId, deploymentName string) SubscriptionTemplateDeploymentId { + return SubscriptionTemplateDeploymentId{ + SubscriptionId: subscriptionId, + DeploymentName: deploymentName, + } +} + +func (id SubscriptionTemplateDeploymentId) String() string { + segments := []string{ + fmt.Sprintf("Deployment Name %q", id.DeploymentName), + } + segmentsStr := strings.Join(segments, " / ") + return fmt.Sprintf("%s: (%s)", "Subscription Template Deployment", segmentsStr) +} + +func (id SubscriptionTemplateDeploymentId) ID() string { + fmtString := "/subscriptions/%s/providers/Microsoft.Resources/deployments/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.DeploymentName) +} + +// SubscriptionTemplateDeploymentID parses a SubscriptionTemplateDeployment ID into an SubscriptionTemplateDeploymentId struct +func SubscriptionTemplateDeploymentID(input string) (*SubscriptionTemplateDeploymentId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, err + } + + resourceId := SubscriptionTemplateDeploymentId{ + SubscriptionId: id.SubscriptionID, + } + + if resourceId.SubscriptionId == "" { + return nil, fmt.Errorf("ID was missing the 'subscriptions' element") + } + + if resourceId.DeploymentName, err = id.PopSegment("deployments"); err != nil { + return nil, err + } + + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &resourceId, nil +} diff --git a/azurerm/internal/services/resource/parse/subscription_template_deployment_test.go b/azurerm/internal/services/resource/parse/subscription_template_deployment_test.go new file mode 100644 index 000000000000..acc6a74d43b8 --- /dev/null +++ b/azurerm/internal/services/resource/parse/subscription_template_deployment_test.go @@ -0,0 +1,96 @@ +package parse + +// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten + +import ( + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" +) + +var _ resourceid.Formatter = SubscriptionTemplateDeploymentId{} + +func TestSubscriptionTemplateDeploymentIDFormatter(t *testing.T) { + actual := NewSubscriptionTemplateDeploymentID("12345678-1234-9876-4563-123456789012", "deploy1").ID() + expected := "/subscriptions/12345678-1234-9876-4563-123456789012/providers/Microsoft.Resources/deployments/deploy1" + if actual != expected { + t.Fatalf("Expected %q but got %q", expected, actual) + } +} + +func TestSubscriptionTemplateDeploymentID(t *testing.T) { + testData := []struct { + Input string + Error bool + Expected *SubscriptionTemplateDeploymentId + }{ + + { + // empty + Input: "", + Error: true, + }, + + { + // missing SubscriptionId + Input: "/", + Error: true, + }, + + { + // missing value for SubscriptionId + Input: "/subscriptions/", + Error: true, + }, + + { + // missing DeploymentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/providers/Microsoft.Resources/", + Error: true, + }, + + { + // missing value for DeploymentName + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/providers/Microsoft.Resources/deployments/", + Error: true, + }, + + { + // valid + Input: "/subscriptions/12345678-1234-9876-4563-123456789012/providers/Microsoft.Resources/deployments/deploy1", + Expected: &SubscriptionTemplateDeploymentId{ + SubscriptionId: "12345678-1234-9876-4563-123456789012", + DeploymentName: "deploy1", + }, + }, + + { + // upper-cased + Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/PROVIDERS/MICROSOFT.RESOURCES/DEPLOYMENTS/DEPLOY1", + Error: true, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.Input) + + actual, err := SubscriptionTemplateDeploymentID(v.Input) + if err != nil { + if v.Error { + continue + } + + t.Fatalf("Expect a value but got an error: %s", err) + } + if v.Error { + t.Fatal("Expect an error but didn't get one") + } + + if actual.SubscriptionId != v.Expected.SubscriptionId { + t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) + } + if actual.DeploymentName != v.Expected.DeploymentName { + t.Fatalf("Expected %q but got %q for DeploymentName", v.Expected.DeploymentName, actual.DeploymentName) + } + } +} diff --git a/azurerm/internal/services/resource/parse/template_deployment_resource_group.go b/azurerm/internal/services/resource/parse/template_deployment_resource_group.go deleted file mode 100644 index 0fb612a06d4b..000000000000 --- a/azurerm/internal/services/resource/parse/template_deployment_resource_group.go +++ /dev/null @@ -1,44 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type ResourceGroupTemplateDeploymentId struct { - ResourceGroup string - Name string -} - -func NewResourceGroupTemplateDeploymentID(resourceGroup, name string) ResourceGroupTemplateDeploymentId { - return ResourceGroupTemplateDeploymentId{ - ResourceGroup: resourceGroup, - Name: name, - } -} - -func ResourceGroupTemplateDeploymentID(input string) (*ResourceGroupTemplateDeploymentId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - deploymentId := ResourceGroupTemplateDeploymentId{ - ResourceGroup: id.ResourceGroup, - } - - if deploymentId.Name, err = id.PopSegment("deployments"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &deploymentId, nil -} - -func (id ResourceGroupTemplateDeploymentId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Resources/deployments/%s", subscriptionId, id.ResourceGroup, id.Name) -} diff --git a/azurerm/internal/services/resource/parse/template_deployment_resource_group_test.go b/azurerm/internal/services/resource/parse/template_deployment_resource_group_test.go deleted file mode 100644 index f4b8a00e1352..000000000000 --- a/azurerm/internal/services/resource/parse/template_deployment_resource_group_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = ResourceGroupTemplateDeploymentId{} - -func TestResourceGroupTemplateDeploymentIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewResourceGroupTemplateDeploymentID("group1", "deploy1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Resources/deployments/deploy1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestResourceGroupTemplateDeploymentIDParser(t *testing.T) { - testData := []struct { - input string - expected *ResourceGroupTemplateDeploymentId - }{ - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Resources/deployments/deploy1", - expected: &ResourceGroupTemplateDeploymentId{ - ResourceGroup: "group1", - Name: "deploy1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/resourceGroups/group1/providers/Microsoft.Resources/Deployments/deploy1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := ResourceGroupTemplateDeploymentID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.ResourceGroup != test.expected.ResourceGroup { - t.Fatalf("Expected ResourceGroup to be %q but was %q", test.expected.ResourceGroup, actual.ResourceGroup) - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/resource/parse/template_deployment_subscription.go b/azurerm/internal/services/resource/parse/template_deployment_subscription.go deleted file mode 100644 index 784a372567a8..000000000000 --- a/azurerm/internal/services/resource/parse/template_deployment_subscription.go +++ /dev/null @@ -1,40 +0,0 @@ -package parse - -import ( - "fmt" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" -) - -type SubscriptionTemplateDeploymentId struct { - Name string -} - -func NewSubscriptionTemplateDeploymentID(name string) SubscriptionTemplateDeploymentId { - return SubscriptionTemplateDeploymentId{ - Name: name, - } -} - -func SubscriptionTemplateDeploymentID(input string) (*SubscriptionTemplateDeploymentId, error) { - id, err := azure.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - deploymentId := SubscriptionTemplateDeploymentId{} - - if deploymentId.Name, err = id.PopSegment("deployments"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &deploymentId, nil -} - -func (id SubscriptionTemplateDeploymentId) ID(subscriptionId string) string { - return fmt.Sprintf("/subscriptions/%s/providers/Microsoft.Resources/deployments/%s", subscriptionId, id.Name) -} diff --git a/azurerm/internal/services/resource/parse/template_deployment_subscription_test.go b/azurerm/internal/services/resource/parse/template_deployment_subscription_test.go deleted file mode 100644 index 0ff26b9951a4..000000000000 --- a/azurerm/internal/services/resource/parse/template_deployment_subscription_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/resourceid" -) - -var _ resourceid.Formatter = SubscriptionTemplateDeploymentId{} - -func TestSubscriptionTemplateDeploymentIDFormatter(t *testing.T) { - subscriptionId := "12345678-1234-5678-1234-123456789012" - actual := NewSubscriptionTemplateDeploymentID("deploy1").ID(subscriptionId) - expected := "/subscriptions/12345678-1234-5678-1234-123456789012/providers/Microsoft.Resources/deployments/deploy1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestSubscriptionTemplateDeploymentIDParser(t *testing.T) { - testData := []struct { - input string - expected *SubscriptionTemplateDeploymentId - }{ - { - // camel case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/providers/Microsoft.Resources/deployments/deploy1", - expected: &SubscriptionTemplateDeploymentId{ - Name: "deploy1", - }, - }, - { - // title case - input: "/subscriptions/12345678-1234-5678-1234-123456789012/providers/Microsoft.Resources/Deployments/deploy1", - expected: nil, - }, - } - for _, test := range testData { - t.Logf("Testing %q..", test.input) - actual, err := SubscriptionTemplateDeploymentID(test.input) - if err != nil && test.expected == nil { - continue - } else { - if err == nil && test.expected == nil { - t.Fatalf("Expected an error but didn't get one") - } else if err != nil && test.expected != nil { - t.Fatalf("Expected no error but got: %+v", err) - } - } - - if actual.Name != test.expected.Name { - t.Fatalf("Expected name to be %q but was %q", test.expected.Name, actual.Name) - } - } -} diff --git a/azurerm/internal/services/resource/registration.go b/azurerm/internal/services/resource/registration.go index fd0eeb2ef0ce..ff139f3eeab4 100644 --- a/azurerm/internal/services/resource/registration.go +++ b/azurerm/internal/services/resource/registration.go @@ -2,6 +2,7 @@ package resource import ( "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/sdk" ) type Registration struct{} @@ -31,11 +32,27 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*schema.Resource { return map[string]*schema.Resource{ - "azurerm_management_lock": resourceArmManagementLock(), - "azurerm_resource_group": resourceArmResourceGroup(), - "azurerm_template_deployment": resourceArmTemplateDeployment(), - + "azurerm_management_lock": resourceArmManagementLock(), + "azurerm_resource_group": resourceArmResourceGroup(), "azurerm_resource_group_template_deployment": resourceGroupTemplateDeploymentResource(), "azurerm_subscription_template_deployment": subscriptionTemplateDeploymentResource(), + "azurerm_template_deployment": resourceArmTemplateDeployment(), + } +} + +// PackagePath is the relative path to this package +func (r Registration) PackagePath() string { + return "TODO: do we need this?" +} + +// DataSources returns a list of Data Sources supported by this Service +func (r Registration) DataSources() []sdk.DataSource { + return []sdk.DataSource{} +} + +// Resources returns a list of Resources supported by this Service +func (r Registration) Resources() []sdk.Resource { + return []sdk.Resource{ + ResourceProviderRegistrationResource{}, } } diff --git a/azurerm/internal/services/resource/resource_arm_resource_group.go b/azurerm/internal/services/resource/resource_arm_resource_group.go deleted file mode 100644 index bd3fc3056f84..000000000000 --- a/azurerm/internal/services/resource/resource_arm_resource_group.go +++ /dev/null @@ -1,145 +0,0 @@ -package resource - -import ( - "fmt" - "log" - "time" - - "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2020-06-01/resources" - "github.com/hashicorp/go-azure-helpers/response" - "github.com/hashicorp/terraform-plugin-sdk/helper/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" - azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" -) - -func resourceArmResourceGroup() *schema.Resource { - return &schema.Resource{ - Create: resourceArmResourceGroupCreateUpdate, - Read: resourceArmResourceGroupRead, - Update: resourceArmResourceGroupCreateUpdate, - Delete: resourceArmResourceGroupDelete, - Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { - _, err := ParseResourceGroupID(id) - return err - }), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(90 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(90 * time.Minute), - Delete: schema.DefaultTimeout(90 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "name": azure.SchemaResourceGroupName(), - - "location": azure.SchemaLocation(), - - "tags": tags.Schema(), - }, - } -} - -func resourceArmResourceGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Resource.GroupsClient - ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) - defer cancel() - - name := d.Get("name").(string) - location := location.Normalize(d.Get("location").(string)) - t := d.Get("tags").(map[string]interface{}) - - if d.IsNewResource() { - existing, err := client.Get(ctx, name) - if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("Error checking for presence of existing resource group: %+v", err) - } - } - - if existing.ID != nil && *existing.ID != "" { - return tf.ImportAsExistsError("azurerm_resource_group", *existing.ID) - } - } - - parameters := resources.Group{ - Location: utils.String(location), - Tags: tags.Expand(t), - } - - if _, err := client.CreateOrUpdate(ctx, name, parameters); err != nil { - return fmt.Errorf("Error creating Resource Group %q: %+v", name, err) - } - - resp, err := client.Get(ctx, name) - if err != nil { - return fmt.Errorf("Error retrieving Resource Group %q: %+v", name, err) - } - - d.SetId(*resp.ID) - - return resourceArmResourceGroupRead(d, meta) -} - -func resourceArmResourceGroupRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Resource.GroupsClient - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := ParseResourceGroupID(d.Id()) - if err != nil { - return err - } - - resp, err := client.Get(ctx, id.Name) - if err != nil { - if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[INFO] Error reading resource group %q - removing from state", d.Id()) - d.SetId("") - return nil - } - - return fmt.Errorf("Error reading resource group: %+v", err) - } - - d.Set("name", resp.Name) - d.Set("location", location.NormalizeNilable(resp.Location)) - return tags.FlattenAndSet(d, resp.Tags) -} - -func resourceArmResourceGroupDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).Resource.GroupsClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() - - id, err := ParseResourceGroupID(d.Id()) - if err != nil { - return err - } - - deleteFuture, err := client.Delete(ctx, id.Name) - if err != nil { - if response.WasNotFound(deleteFuture.Response()) { - return nil - } - - return fmt.Errorf("Error deleting Resource Group %q: %+v", id.Name, err) - } - - err = deleteFuture.WaitForCompletionRef(ctx, client.Client) - if err != nil { - if response.WasNotFound(deleteFuture.Response()) { - return nil - } - - return fmt.Errorf("Error deleting Resource Group %q: %+v", id.Name, err) - } - - return nil -} diff --git a/azurerm/internal/services/resource/data_source_resource_group.go b/azurerm/internal/services/resource/resource_group_data_source.go similarity index 100% rename from azurerm/internal/services/resource/data_source_resource_group.go rename to azurerm/internal/services/resource/resource_group_data_source.go diff --git a/azurerm/internal/services/resource/resource_group_data_source_test.go b/azurerm/internal/services/resource/resource_group_data_source_test.go new file mode 100644 index 000000000000..92d69beea316 --- /dev/null +++ b/azurerm/internal/services/resource/resource_group_data_source_test.go @@ -0,0 +1,51 @@ +package resource_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMResourceGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_resource_group", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceAzureRMResourceGroupBasic(data), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(data.ResourceName, "name", fmt.Sprintf("acctestRg-%d", data.RandomInteger)), + resource.TestCheckResourceAttr(data.ResourceName, "location", azure.NormalizeLocation(data.Locations.Primary)), + resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "test"), + ), + }, + }, + }) +} + +func testAccDataSourceAzureRMResourceGroupBasic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRg-%d" + location = "%s" + + tags = { + env = "test" + } +} + +data "azurerm_resource_group" "test" { + name = azurerm_resource_group.test.name +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/resource/resource_group_resource.go b/azurerm/internal/services/resource/resource_group_resource.go new file mode 100644 index 000000000000..a03c892b38df --- /dev/null +++ b/azurerm/internal/services/resource/resource_group_resource.go @@ -0,0 +1,146 @@ +package resource + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2020-06-01/resources" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmResourceGroup() *schema.Resource { + return &schema.Resource{ + Create: resourceArmResourceGroupCreateUpdate, + Read: resourceArmResourceGroupRead, + Update: resourceArmResourceGroupCreateUpdate, + Delete: resourceArmResourceGroupDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ResourceGroupID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(90 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(90 * time.Minute), + Delete: schema.DefaultTimeout(90 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "tags": tags.Schema(), + }, + } +} + +func resourceArmResourceGroupCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.GroupsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + location := location.Normalize(d.Get("location").(string)) + t := d.Get("tags").(map[string]interface{}) + + if d.IsNewResource() { + existing, err := client.Get(ctx, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing resource group: %+v", err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_resource_group", *existing.ID) + } + } + + parameters := resources.Group{ + Location: utils.String(location), + Tags: tags.Expand(t), + } + + if _, err := client.CreateOrUpdate(ctx, name, parameters); err != nil { + return fmt.Errorf("Error creating Resource Group %q: %+v", name, err) + } + + resp, err := client.Get(ctx, name) + if err != nil { + return fmt.Errorf("Error retrieving Resource Group %q: %+v", name, err) + } + + d.SetId(*resp.ID) + + return resourceArmResourceGroupRead(d, meta) +} + +func resourceArmResourceGroupRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.GroupsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ResourceGroupID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] Error reading resource group %q - removing from state", d.Id()) + d.SetId("") + return nil + } + + return fmt.Errorf("Error reading resource group: %+v", err) + } + + d.Set("name", resp.Name) + d.Set("location", location.NormalizeNilable(resp.Location)) + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmResourceGroupDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.GroupsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ResourceGroupID(d.Id()) + if err != nil { + return err + } + + deleteFuture, err := client.Delete(ctx, id.ResourceGroup) + if err != nil { + if response.WasNotFound(deleteFuture.Response()) { + return nil + } + + return fmt.Errorf("Error deleting Resource Group %q: %+v", id.ResourceGroup, err) + } + + err = deleteFuture.WaitForCompletionRef(ctx, client.Client) + if err != nil { + if response.WasNotFound(deleteFuture.Response()) { + return nil + } + + return fmt.Errorf("Error deleting Resource Group %q: %+v", id.ResourceGroup, err) + } + + return nil +} diff --git a/azurerm/internal/services/resource/resource_group_resource_test.go b/azurerm/internal/services/resource/resource_group_resource_test.go new file mode 100644 index 000000000000..15a003c20a94 --- /dev/null +++ b/azurerm/internal/services/resource/resource_group_resource_test.go @@ -0,0 +1,173 @@ +package resource_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type ResourceGroupResource struct { +} + +func TestAccResourceGroup_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group", "test") + testResource := ResourceGroupResource{} + data.ResourceTest(t, testResource, []resource.TestStep{ + { + Config: testResource.basicConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(testResource), + ), + }, + data.ImportStep(), + }) +} + +func TestAccResourceGroup_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group", "test") + testResource := ResourceGroupResource{} + data.ResourceTest(t, testResource, []resource.TestStep{ + { + Config: testResource.basicConfig(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(testResource), + ), + }, + data.RequiresImportErrorStep(testResource.requiresImportConfig), + }) +} + +func TestAccResourceGroup_disappears(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group", "test") + + testResource := ResourceGroupResource{} + data.ResourceTest(t, testResource, []resource.TestStep{ + data.DisappearsStep(acceptance.DisappearsStepData{ + Config: testResource.basicConfig, + TestResource: testResource, + }), + }) +} + +func TestAccResourceGroup_withTags(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group", "test") + + testResource := ResourceGroupResource{} + assert := check.That(data.ResourceName) + data.ResourceTest(t, testResource, []resource.TestStep{ + { + Config: testResource.withTagsConfig(data), + Check: resource.ComposeTestCheckFunc( + assert.ExistsInAzure(testResource), + assert.Key("tags.%").HasValue("2"), + assert.Key("tags.cost_center").HasValue("MSFT"), + assert.Key("tags.environment").HasValue("Production"), + ), + }, + data.ImportStep(), + { + Config: testResource.withTagsUpdatedConfig(data), + Check: resource.ComposeTestCheckFunc( + assert.ExistsInAzure(testResource), + assert.Key("tags.%").HasValue("1"), + assert.Key("tags.environment").HasValue("staging"), + ), + }, + data.ImportStep(), + }) +} + +func (t ResourceGroupResource) Destroy(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + resourceGroup := state.Attributes["name"] + + groupsClient := client.Resource.GroupsClient + deleteFuture, err := groupsClient.Delete(ctx, resourceGroup) + if err != nil { + return nil, fmt.Errorf("deleting Resource Group %q: %+v", resourceGroup, err) + } + + err = deleteFuture.WaitForCompletionRef(ctx, groupsClient.Client) + if err != nil { + return nil, fmt.Errorf("waiting for deletion of Resource Group %q: %+v", resourceGroup, err) + } + + return utils.Bool(true), nil +} + +func (t ResourceGroupResource) Exists(ctx context.Context, client *clients.Client, state *terraform.InstanceState) (*bool, error) { + name := state.Attributes["name"] + + resp, err := client.Resource.GroupsClient.Get(ctx, name) + if err != nil { + return nil, fmt.Errorf("retrieving Resource Group %q: %+v", name, err) + } + + return utils.Bool(resp.Properties != nil), nil +} + +func (t ResourceGroupResource) basicConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (t ResourceGroupResource) requiresImportConfig(data acceptance.TestData) string { + template := t.basicConfig(data) + return fmt.Sprintf(` +%s + +resource "azurerm_resource_group" "import" { + name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location +} +`, template) +} + +func (t ResourceGroupResource) withTagsConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" + + tags = { + environment = "Production" + cost_center = "MSFT" + } +} +`, data.RandomInteger, data.Locations.Primary) +} + +func (t ResourceGroupResource) withTagsUpdatedConfig(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" + + tags = { + environment = "staging" + } +} +`, data.RandomInteger, data.Locations.Primary) +} diff --git a/azurerm/internal/services/resource/resource_group_template_deployment_resource.go b/azurerm/internal/services/resource/resource_group_template_deployment_resource.go new file mode 100644 index 000000000000..eb93e6c71845 --- /dev/null +++ b/azurerm/internal/services/resource/resource_group_template_deployment_resource.go @@ -0,0 +1,368 @@ +package resource + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2020-06-01/resources" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/resource/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceGroupTemplateDeploymentResource() *schema.Resource { + return &schema.Resource{ + Create: resourceGroupTemplateDeploymentResourceCreate, + Read: resourceGroupTemplateDeploymentResourceRead, + Update: resourceGroupTemplateDeploymentResourceUpdate, + Delete: resourceGroupTemplateDeploymentResourceDelete, + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.ResourceGroupTemplateDeploymentID(id) + return err + }), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(180 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(180 * time.Minute), + Delete: schema.DefaultTimeout(180 * time.Minute), + }, + + // (@jackofallops - lintignore needed as we need to make sure the JSON is usable in `output_content`) + + //lintignore:S033 + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.TemplateDeploymentName, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "deployment_mode": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(resources.Complete), + string(resources.Incremental), + }, false), + }, + + "template_content": { + Type: schema.TypeString, + Required: true, + StateFunc: utils.NormalizeJson, + }, + + // Optional + "debug_level": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice(templateDeploymentDebugLevels, false), + }, + + "parameters_content": { + Type: schema.TypeString, + Optional: true, + Computed: true, + StateFunc: utils.NormalizeJson, + }, + + "tags": tags.Schema(), + + // Computed + "output_content": { + Type: schema.TypeString, + Computed: true, + StateFunc: utils.NormalizeJson, + // NOTE: outputs can be strings, ints, objects etc - whilst using a nested object was considered + // parsing the JSON using `jsondecode` allows the users to interact with/map objects as required + }, + }, + } +} + +func resourceGroupTemplateDeploymentResourceCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.DeploymentsClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id := parse.NewResourceGroupTemplateDeploymentID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + + existing, err := client.Get(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Template Deployment %q (Resource Group %q): %+v", id.ResourceGroup, id.DeploymentName, err) + } + } + if existing.Properties != nil { + return tf.ImportAsExistsError("azurerm_resource_group_template_deployment", id.ID()) + } + + template, err := expandTemplateDeploymentBody(d.Get("template_content").(string)) + if err != nil { + return fmt.Errorf("expanding `template_content`: %+v", err) + } + deployment := resources.Deployment{ + Properties: &resources.DeploymentProperties{ + DebugSetting: expandTemplateDeploymentDebugSetting(d.Get("debug_level").(string)), + Mode: resources.DeploymentMode(d.Get("deployment_mode").(string)), + Template: template, + }, + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + if v, ok := d.GetOk("parameters_content"); ok && v != "" { + parameters, err := expandTemplateDeploymentBody(v.(string)) + if err != nil { + return fmt.Errorf("expanding `parameters_content`: %+v", err) + } + deployment.Properties.Parameters = parameters + } + + log.Printf("[DEBUG] Running validation of Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + if err := validateResourceGroupTemplateDeployment(ctx, id, deployment, client); err != nil { + return fmt.Errorf("validating Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + log.Printf("[DEBUG] Validated Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + + log.Printf("[DEBUG] Provisioning Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.DeploymentName, deployment) + if err != nil { + return fmt.Errorf("creating Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + log.Printf("[DEBUG] Waiting for deployment of Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation of Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + d.SetId(id.ID()) + return resourceGroupTemplateDeploymentResourceRead(d, meta) +} + +func resourceGroupTemplateDeploymentResourceUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.DeploymentsClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ResourceGroupTemplateDeploymentID(d.Id()) + if err != nil { + return err + } + + log.Printf("[DEBUG] Retrieving Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + template, err := client.Get(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + return fmt.Errorf("retrieving Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + if template.Properties == nil { + return fmt.Errorf("retrieving Template Deployment %q (Resource Group %q): `properties` was nil", id.DeploymentName, id.ResourceGroup) + } + + // the API doesn't have a Patch operation, so we'll need to build one + deployment := resources.Deployment{ + Properties: &resources.DeploymentProperties{ + DebugSetting: template.Properties.DebugSetting, + Mode: template.Properties.Mode, + }, + Tags: template.Tags, + } + + if d.HasChange("debug_level") { + deployment.Properties.DebugSetting = expandTemplateDeploymentDebugSetting(d.Get("debug_level").(string)) + } + + if d.HasChange("deployment_mode") { + deployment.Properties.Mode = resources.DeploymentMode(d.Get("deployment_mode").(string)) + } + + if d.HasChange("parameters_content") { + parameters, err := expandTemplateDeploymentBody(d.Get("parameters_content").(string)) + if err != nil { + return fmt.Errorf("expanding `parameters_content`: %+v", err) + } + deployment.Properties.Parameters = parameters + } + + if d.HasChange("template_content") { + templateContents, err := expandTemplateDeploymentBody(d.Get("template_content").(string)) + if err != nil { + return fmt.Errorf("expanding `template_content`: %+v", err) + } + + deployment.Properties.Template = templateContents + } else { + // retrieve the existing content and reuse that + exportedTemplate, err := client.ExportTemplate(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + return fmt.Errorf("retrieving Contents for Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + deployment.Properties.Template = exportedTemplate.Template + } + + if d.HasChange("tags") { + deployment.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + log.Printf("[DEBUG] Running validation of Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + if err := validateResourceGroupTemplateDeployment(ctx, *id, deployment, client); err != nil { + return fmt.Errorf("validating Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + log.Printf("[DEBUG] Validated Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + + log.Printf("[DEBUG] Provisioning Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + future, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.DeploymentName, deployment) + if err != nil { + return fmt.Errorf("creating Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + log.Printf("[DEBUG] Waiting for deployment of Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for creation of Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + return resourceGroupTemplateDeploymentResourceRead(d, meta) +} + +func resourceGroupTemplateDeploymentResourceRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.DeploymentsClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ResourceGroupTemplateDeploymentID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Template Deployment %q (Resource Group %q) was not found - removing from state", id.DeploymentName, id.ResourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + templateContents, err := client.ExportTemplate(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + return fmt.Errorf("retrieving Template Content for Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + d.Set("name", id.DeploymentName) + d.Set("resource_group_name", id.ResourceGroup) + + if props := resp.Properties; props != nil { + d.Set("debug_level", flattenTemplateDeploymentDebugSetting(props.DebugSetting)) + d.Set("deployment_mode", string(props.Mode)) + + filteredParams := filterOutTemplateDeploymentParameters(props.Parameters) + flattenedParams, err := flattenTemplateDeploymentBody(filteredParams) + if err != nil { + return fmt.Errorf("flattening `parameters_content`: %+v", err) + } + d.Set("parameters_content", flattenedParams) + + flattenedOutputs, err := flattenTemplateDeploymentBody(props.Outputs) + if err != nil { + return fmt.Errorf("flattening `output_content`: %+v", err) + } + d.Set("output_content", flattenedOutputs) + } + + flattenedTemplate, err := flattenTemplateDeploymentBody(templateContents.Template) + if err != nil { + return fmt.Errorf("flattening `template_content`: %+v", err) + } + d.Set("template_content", flattenedTemplate) + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceGroupTemplateDeploymentResourceDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).Resource.DeploymentsClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.ResourceGroupTemplateDeploymentID(d.Id()) + if err != nil { + return err + } + + log.Printf("[DEBUG] Retrieving Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + template, err := client.Get(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + if utils.ResponseWasNotFound(template.Response) { + return nil + } + + return fmt.Errorf("retrieving Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + if template.Properties == nil { + return fmt.Errorf("`properties` was nil for template`") + } + + deleteItemsInTemplate := meta.(*clients.Client).Features.TemplateDeployment.DeleteNestedItemsDuringDeletion + if deleteItemsInTemplate { + resourceClient := meta.(*clients.Client).Resource + log.Printf("[DEBUG] Removing items provisioned by the Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + if err := deleteItemsProvisionedByTemplate(ctx, resourceClient, *template.Properties); err != nil { + return fmt.Errorf("removing items provisioned by this Template Deployment: %+v", err) + } + log.Printf("[DEBUG] Removed items provisioned by the Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + } else { + log.Printf("[DEBUG] Skipping removing items provisioned by the Template Deployment %q (Resource Group %q) as the feature is disabled", id.DeploymentName, id.ResourceGroup) + } + + log.Printf("[DEBUG] Deleting Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + future, err := client.Delete(ctx, id.ResourceGroup, id.DeploymentName) + if err != nil { + return fmt.Errorf("deleting Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + + log.Printf("[DEBUG] Waiting for deletion of Template Deployment %q (Resource Group %q)..", id.DeploymentName, id.ResourceGroup) + if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for deletion of Template Deployment %q (Resource Group %q): %+v", id.DeploymentName, id.ResourceGroup, err) + } + log.Printf("[DEBUG] Deleted Template Deployment %q (Resource Group %q).", id.DeploymentName, id.ResourceGroup) + + return nil +} + +func validateResourceGroupTemplateDeployment(ctx context.Context, id parse.ResourceGroupTemplateDeploymentId, deployment resources.Deployment, client *resources.DeploymentsClient) error { + validationFuture, err := client.Validate(ctx, id.ResourceGroup, id.DeploymentName, deployment) + if err != nil { + return fmt.Errorf("requesting validating: %+v", err) + } + if err := validationFuture.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for validation: %+v", err) + } + validationResult, err := validationFuture.Result(*client) + if err != nil { + return fmt.Errorf("retrieving validation result: %+v", err) + } + if validationResult.Error != nil { + if validationResult.Error.Message != nil { + return fmt.Errorf("%s", *validationResult.Error.Message) + } + return fmt.Errorf("%+v", *validationResult.Error) + } + + return nil +} diff --git a/azurerm/internal/services/resource/resource_group_template_deployment_resource_test.go b/azurerm/internal/services/resource/resource_group_template_deployment_resource_test.go new file mode 100644 index 000000000000..46e381d4895c --- /dev/null +++ b/azurerm/internal/services/resource/resource_group_template_deployment_resource_test.go @@ -0,0 +1,624 @@ +package resource_test + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" +) + +func TestAccResourceGroupTemplateDeployment_empty(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_emptyConfig(data, "Complete"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + // set some tags + Config: resourceGroupTemplateDeployment_emptyWithTagsConfig(data, "Complete"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_incremental(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_emptyConfig(data, "Incremental"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + // set some tags + Config: resourceGroupTemplateDeployment_emptyWithTagsConfig(data, "Incremental"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_singleItemUpdatingParams(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_singleItemWithParameterConfig(data, "first"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: resourceGroupTemplateDeployment_singleItemWithParameterConfig(data, "second"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_singleItemUpdatingTemplate(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_singleItemWithPublicIPConfig(data, "first"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: resourceGroupTemplateDeployment_singleItemWithPublicIPConfig(data, "second"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_withOutputs(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_withOutputsConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "output_content", "{\"testOutput\":{\"type\":\"String\",\"value\":\"some-value\"}}"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_multipleItems(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_multipleItemsConfig(data, "first"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: resourceGroupTemplateDeployment_multipleItemsConfig(data, "second"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_multipleNestedItems(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_multipleNestedItemsConfig(data, "first"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: resourceGroupTemplateDeployment_multipleNestedItemsConfig(data, "second"), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccResourceGroupTemplateDeployment_childItems(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_resource_group_template_deployment", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckResourceGroupTemplateDeploymentDestroyed, + Steps: []resource.TestStep{ + { + Config: resourceGroupTemplateDeployment_childItemsConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: resourceGroupTemplateDeployment_childItemsConfig(data), + Check: resource.ComposeTestCheckFunc( + testCheckResourceGroupTemplateDeploymentExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func testCheckResourceGroupTemplateDeploymentExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Resource.DeploymentsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("bad: Get on deploymentsClient: %s", err) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("bad: Resource Group Template Deployment %q does not exist", name) + } + + return nil + } +} + +func testCheckResourceGroupTemplateDeploymentDestroyed(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).Resource.DeploymentsClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_resource_group_template_deployment" { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Resource Group Template Deployment still exists:\n%#v", resp.Properties) + } + } + + return nil +} + +func resourceGroupTemplateDeployment_emptyConfig(data acceptance.TestData, deploymentMode string) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = %q +} + +resource "azurerm_resource_group_template_deployment" "test" { + name = "acctest" + resource_group_name = azurerm_resource_group.test.name + deployment_mode = %q + + template_content = <